hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f9e45c36491a00295de9370b55f71068fbf9553b
| 448
|
py
|
Python
|
python/Data Structures and Algorithms in Python Book/recursion/linear_sum.py
|
gauravssnl/Data-Structures-and-Algorithms
|
1c335c72ce514d4f95090241bbd6edf01a1141a8
|
[
"MIT"
] | 7
|
2020-05-10T09:57:23.000Z
|
2021-03-27T11:55:07.000Z
|
python/Data Structures and Algorithms in Python Book/recursion/linear_sum.py
|
gauravssnl/Data-Structures-and-Algorithms
|
1c335c72ce514d4f95090241bbd6edf01a1141a8
|
[
"MIT"
] | null | null | null |
python/Data Structures and Algorithms in Python Book/recursion/linear_sum.py
|
gauravssnl/Data-Structures-and-Algorithms
|
1c335c72ce514d4f95090241bbd6edf01a1141a8
|
[
"MIT"
] | 3
|
2021-03-27T03:42:57.000Z
|
2021-08-09T12:03:41.000Z
|
# For n inputs, this function makes n+1 calls
# Complexity: O(n)
def linear_sum(S, n):
"""Computing the sum of a sequence recursively, by adding the last number to the sum of the first n−1 numbers"""
if n == 0:
return 0
else:
print("linear_sum({}, {}) + S[n-1]: {}".format(S, n-1, S[n-1]))
return linear_sum(S, n-1) + S[n-1]
if __name__ == "__main__":
s = list(range(1, 11))
print(linear_sum(s, len(s)))
| 34.461538
| 116
| 0.589286
|
758e148f20a3fce55bb2aff040978440eca17515
| 295
|
py
|
Python
|
benchmark/backends/command_dispatcher/channels/base.py
|
creditease-natrix/natrix
|
8b97efdc9287645ea6b99dcf3a99fbe3f6ba6862
|
[
"MIT"
] | 3
|
2019-06-28T02:25:10.000Z
|
2019-12-16T08:50:08.000Z
|
benchmark/backends/command_dispatcher/channels/base.py
|
creditease-natrix/natrix
|
8b97efdc9287645ea6b99dcf3a99fbe3f6ba6862
|
[
"MIT"
] | 3
|
2020-02-12T00:17:22.000Z
|
2021-06-10T21:29:11.000Z
|
benchmark/backends/command_dispatcher/channels/base.py
|
creditease-natrix/natrix
|
8b97efdc9287645ea6b99dcf3a99fbe3f6ba6862
|
[
"MIT"
] | 1
|
2019-06-22T06:04:59.000Z
|
2019-06-22T06:04:59.000Z
|
# -*- coding: utf-8 -*-
"""
"""
class DispachClient(object):
def __init__(self):
super(DispachClient, self).__init__()
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def subscribe(self, *args, **kwargs):
pass
| 14.75
| 50
| 0.572881
|
c08c2fc88465e6b5637e8fe10ac9dabe9ea01d0b
| 2,328
|
py
|
Python
|
tests/casefiles/Tool_Menu_EventBinding_Phoenix.py
|
RSabet/wxGlade
|
8b62eb8397308e60977857455b2765727b1b940f
|
[
"MIT"
] | 225
|
2018-03-26T11:23:22.000Z
|
2022-03-24T09:44:08.000Z
|
tests/casefiles/Tool_Menu_EventBinding_Phoenix.py
|
RSabet/wxGlade
|
8b62eb8397308e60977857455b2765727b1b940f
|
[
"MIT"
] | 403
|
2018-01-03T19:47:28.000Z
|
2018-03-23T17:43:39.000Z
|
tests/casefiles/Tool_Menu_EventBinding_Phoenix.py
|
DietmarSchwertberger/wxGlade
|
8e78cdc509d458cc896d47315e19f3daa6c09213
|
[
"MIT"
] | 47
|
2018-04-08T16:48:38.000Z
|
2021-12-21T20:08:44.000Z
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# generated by wxGlade
#
import wx
# begin wxGlade: dependencies
# end wxGlade
# begin wxGlade: extracode
# end wxGlade
class MyFrame(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: MyFrame.__init__
kwds["style"] = kwds.get("style", 0) | wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.SetSize((400, 300))
self.SetTitle("frame")
# Menu Bar
self.frame_menubar = wx.MenuBar()
wxglade_tmp_menu = wx.Menu()
self.frame_menubar.item1 = wxglade_tmp_menu.Append(wx.ID_ANY, "My Menu Item 1", "")
self.Bind(wx.EVT_MENU, self.on_menu_item1, self.frame_menubar.item1)
item = wxglade_tmp_menu.Append(wx.ID_ANY, "My Menu Item 1", "without attribute name")
self.Bind(wx.EVT_MENU, self.on_menu_item2, item)
self.frame_menubar.Append(wxglade_tmp_menu, "Menu 1")
self.SetMenuBar(self.frame_menubar)
# Menu Bar end
# Tool Bar
self.frame_toolbar = wx.ToolBar(self, -1)
tool = self.frame_toolbar.AddTool(wx.ID_ANY, "My Tool", wx.Bitmap("..\\..\\icons\\button.png", wx.BITMAP_TYPE_ANY), wx.NullBitmap, wx.ITEM_NORMAL, "", "")
self.Bind(wx.EVT_TOOL, self.on_my_tool, id=tool.GetId())
self.SetToolBar(self.frame_toolbar)
self.frame_toolbar.Realize()
# Tool Bar end
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_1.Add((0, 0), 0, 0, 0)
self.SetSizer(sizer_1)
self.Layout()
# end wxGlade
def on_menu_item1(self, event): # wxGlade: MyFrame.<event_handler>
print("Event handler 'on_menu_item1' not implemented!")
event.Skip()
def on_menu_item2(self, event): # wxGlade: MyFrame.<event_handler>
print("Event handler 'on_menu_item2' not implemented!")
event.Skip()
def on_my_tool(self, event): # wxGlade: MyFrame.<event_handler>
print("Event handler 'on_my_tool' not implemented!")
event.Skip()
# end of class MyFrame
class MyApp(wx.App):
def OnInit(self):
self.frame = MyFrame(None, wx.ID_ANY, "")
self.SetTopWindow(self.frame)
self.frame.Show()
return True
# end of class MyApp
if __name__ == "__main__":
app = MyApp(0)
app.MainLoop()
| 29.468354
| 162
| 0.631014
|
fac168f10f3c88436db00c2b99047bdf1a1e8652
| 7,742
|
py
|
Python
|
src/models/predict_model.py
|
chrimaho/MDSI_ADSI_FEB21_AT1
|
79e854dbe6a3ed5a2eb1b27ae74071a0159f2b44
|
[
"MIT"
] | null | null | null |
src/models/predict_model.py
|
chrimaho/MDSI_ADSI_FEB21_AT1
|
79e854dbe6a3ed5a2eb1b27ae74071a0159f2b44
|
[
"MIT"
] | null | null | null |
src/models/predict_model.py
|
chrimaho/MDSI_ADSI_FEB21_AT1
|
79e854dbe6a3ed5a2eb1b27ae74071a0159f2b44
|
[
"MIT"
] | 1
|
2021-01-28T10:41:21.000Z
|
2021-01-28T10:41:21.000Z
|
import numpy as np
import pandas as pd
from datetime import datetime
# Define reusable function for easy random searching
def easy_random_search \
( estimator
, search_space:dict
, feat_trn:np.real
, targ_trn:np.real
, feat_val:np.real
, targ_val:np.real
, df_metrics:pd.DataFrame
, n_iter:int=100
, cv:int=5
, random_state:int=123
, check_best_params:bool=True
, dump_model:bool=True
, dump_location:str="./models/Chris/"
, dump_name:str=datetime.now().strftime("%Y-%m-%d %H:%M:%S")
, print_all:bool=True
, print_matrix:bool=True
, print_plot:bool=True
, print_df:bool=True
):
"""
Quickly and easily re-run the Random Search algorithm to find the optimal parameters and see the model results.
Args:
estimator (estimator): An estimator to be used for training. Must be instantiated!
search_space (dict) : The search space to be checked. The keys must be valid hyperparameters in the `estimator` object.
feat_trn (np.real) : The features to be used for training.
targ_trn (np.real) : The target values to be used for training.
feat_val (np.real) : The features to be used for validation.
targ_val (np.real) : The target values to be used for validation.
df_metrics (pd.DataFrame) : The data frame to be updated to contain the model metrics.
n_iter (int, optional) : Number of times the Search Space is to be checked. Defaults to 100.
cv (int, optional) : Number of cross-validations to be run per iteration. Defaults to 5.
random_state (int, optional) : The random state to be used for the `cv` splitting. Defaults to 123.
check_best_params (bool, optional) : Whether or not to print the best params from the search space after training. Defaults to True.
dump_model (bool, optional) : Whether or not to dump the model after training. Defaults to True.
dump_location (str, optional) : The location where the model should be dumped to. Defaults to "./models/Chris/".
dump_name (str, optional) : The file name of the model once dumped. Defaults to datetime.now().strftime("%Y-%m-%d %H:%M:%S").
print_all (bool, optional) : Whether or not to print all the results & metrics. Defaults to True.
print_matrix (bool, optional) : Whether or not to print the confusion matrix. Defaults to True.
print_plot (bool, optional) : Whether or not to print the ROC plot. Defaults to True.
print_df (bool, optional) : Whether or not to print the dataframe with the results from all models for all metrics. Defaults to True.
Raises:
Assertions: All parameters are asserted to the correct type and correct attributes.
Returns:
estimator: The re-trained model, using the best params from the search space.
"""
# Imports
from sklearn.model_selection import RandomizedSearchCV
from src.utils.misc import all_in
from src.utils.performance import TicToc
from src.models.performance import save_reg_perf
import numpy as np
from xgboost.sklearn import XGBModel
from sklearn.metrics import make_scorer, roc_auc_score
import os
from joblib import dump
# Instantiate timer
t = TicToc()
# Assertions
# assert "base_estimator" in estimator.__dict__.keys()
# assert "sklearn" in estimator.__module__.split(".")[0]
assert isinstance(search_space, dict), \
"`search_space` must be type `dict`."
assert all_in(search_space.keys(), estimator.__dict__.keys()), \
"All keys in `search_space` must be valid parameters in `estimator`."
for param in ["feat_trn", "targ_trn", "feat_val", "targ_val"]:
assert isinstance(eval(param), np.ndarray), \
"`{param}` must be type `np.ndarray`."
assert np.all(np.isreal(eval(param))), \
"All elements of `{param}` must be Real numbers."
assert len(feat_trn)==len(targ_trn), \
"Lengh of `feat_trn` must be same as `targ_trn`."
assert len(feat_val)==len(targ_val), \
"Length of `feat_val` must be same as `targ_val`."
for param in ["n_iter", "cv", "random_state"]:
assert isinstance(eval(param), int), \
"`{param}` must be type `int`."
assert eval(param)>0, \
"`{param}` must be a positive integer."
for param in ["check_best_params", "dump_model", "print_all", "print_matrix", "print_plot", "print_df"]:
assert isinstance(eval(param), bool), \
"`{param}` must be type `bool`."
for param in ["dump_location", "dump_name"]:
assert isinstance(eval(param), str), \
"`{param}` must be type `str`."
assert os.path.isdir(dump_location), \
"`dump_location` must be a valid direcory."
# Instantiate trainer
clf = RandomizedSearchCV \
( estimator=estimator
, param_distributions=search_space
, n_iter=n_iter
, scoring={"auc": make_scorer(roc_auc_score, needs_proba=True)}
, cv=cv
, refit="auc"
, random_state=random_state
, return_train_score=True
)
# Search for results
t.tic()
if isinstance(estimator, XGBModel):
res = clf.fit(feat_trn, targ_trn, eval_metric="auc")
else:
res = clf.fit(feat_trn, targ_trn)
t.toc()
# Check best params
if check_best_params:
print("Best score: {}".format(res.best_score_))
print("Best params: {}".format(res.best_params_))
# Update params
estimator = estimator.set_params(**res.best_params_)
# Refit
if isinstance(estimator, XGBModel):
estimator.fit(feat_trn, targ_trn, eval_metric="auc")
else:
estimator.fit(feat_trn, targ_trn)
# Predict
pred_trn = estimator.predict(feat_trn)
pred_prob_trn = estimator.predict_proba(feat_trn)
pred_val = estimator.predict(feat_val)
pred_prob_val = estimator.predict_proba(feat_val)
# Check performance
df_metrics = save_reg_perf \
( targ=targ_trn
, pred=pred_trn
, pred_prob=pred_prob_trn
, df_metrics=df_metrics
, name=dump_name+" - within bag"
, print_all=False
, print_matrix=print_matrix
, print_plot=print_plot
, print_df=print_df
)
df_metrics = save_reg_perf \
( targ=targ_val
, pred=pred_val
, pred_prob=pred_prob_val
, df_metrics=df_metrics
, name=dump_name+" - out of bag"
, print_all=print_all
, print_matrix=print_matrix
, print_plot=print_plot
, print_df=print_df
)
# Backup
if dump_model:
dump(estimator, dump_location+dump_name+".joblib")
# Return
return estimator, df_metrics
def fit_predict_print(classifier, name:str, feat_trn, targ_trn, feat_val, targ_val, print_roc:bool=True):
# Imports
from src.utils import assertions as a
from sklearn.metrics import roc_auc_score
# Assertions
assert a.all_str(name)
assert a.all_dataframe_or_series_or_ndarray([feat_trn, targ_trn, feat_val, targ_val])
assert a.all_bool(print_roc)
# Fit classifier
classifier.fit(feat_trn, targ_trn)
# Get predictions
pred_trn = classifier.predict(feat_trn)
pred_val = classifier.predict(feat_val)
prob_val = classifier.predict_proba(feat_val)[:,1]
# Get score
scor_auc = roc_auc_score(targ_val, prob_val)
if print_roc:
print("\nROC:\t", scor_auc, "\n")
return pred_trn, pred_val, prob_val
| 39.907216
| 150
| 0.64557
|
4c38e0580ebfaa9fe1d5519a655cbd8d2e9f48c1
| 5,666
|
py
|
Python
|
openstack_dashboard/dashboards/project/volumes/snapshots/tables.py
|
whitepages/horizon
|
47e5d8528d4e0ba22de29a23f675a8c27025130b
|
[
"Apache-2.0"
] | null | null | null |
openstack_dashboard/dashboards/project/volumes/snapshots/tables.py
|
whitepages/horizon
|
47e5d8528d4e0ba22de29a23f675a8c27025130b
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:48:09.000Z
|
2021-03-21T11:48:09.000Z
|
openstack_dashboard/dashboards/project/volumes/snapshots/tables.py
|
isabella232/horizon-2
|
47e5d8528d4e0ba22de29a23f675a8c27025130b
|
[
"Apache-2.0"
] | 1
|
2021-03-21T11:36:49.000Z
|
2021-03-21T11:36:49.000Z
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils import html
from django.utils.http import urlencode
from django.utils import safestring
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard import policy
from openstack_dashboard.dashboards.project.volumes \
.volumes import tables as volume_tables
class LaunchSnapshot(volume_tables.LaunchVolume):
name = "launch_snapshot"
def get_link_url(self, datum):
base_url = reverse(self.url)
vol_id = "%s:snap" % self.table.get_object_id(datum)
params = urlencode({"source_type": "volume_snapshot_id",
"source_id": vol_id})
return "?".join([base_url, params])
def allowed(self, request, snapshot=None):
if snapshot:
if (snapshot._volume and
getattr(snapshot._volume, 'bootable', '') == 'true'):
return snapshot.status == "available"
return False
class DeleteVolumeSnapshot(policy.PolicyTargetMixin, tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Volume Snapshot",
u"Delete Volume Snapshots",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Volume Snapshot",
u"Scheduled deletion of Volume Snapshots",
count
)
policy_rules = (("volume", "volume:delete_snapshot"),)
policy_target_attrs = (("project_id",
'os-extended-snapshot-attributes:project_id'),)
def delete(self, request, obj_id):
api.cinder.volume_snapshot_delete(request, obj_id)
class EditVolumeSnapshot(policy.PolicyTargetMixin, tables.LinkAction):
name = "edit"
verbose_name = _("Edit Snapshot")
url = "horizon:project:volumes:snapshots:update"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("volume", "volume:update_snapshot"),)
policy_target_attrs = (("project_id",
'os-extended-snapshot-attributes:project_id'),)
def allowed(self, request, snapshot=None):
return snapshot.status == "available"
class CreateVolumeFromSnapshot(tables.LinkAction):
name = "create_from_snapshot"
verbose_name = _("Create Volume")
url = "horizon:project:volumes:volumes:create"
classes = ("ajax-modal",)
icon = "camera"
policy_rules = (("volume", "volume:create"),)
def get_link_url(self, datum):
base_url = reverse(self.url)
params = urlencode({"snapshot_id": self.table.get_object_id(datum)})
return "?".join([base_url, params])
def allowed(self, request, volume=None):
if volume and base.is_service_enabled(request, 'volume'):
return volume.status == "available"
return False
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, snapshot_id):
snapshot = cinder.volume_snapshot_get(request, snapshot_id)
snapshot._volume = cinder.volume_get(request, snapshot.volume_id)
return snapshot
class SnapshotVolumeNameColumn(tables.Column):
def get_raw_data(self, snapshot):
volume = snapshot._volume
if volume:
volume_name = volume.name
volume_name = html.escape(volume_name)
else:
volume_name = _("Unknown")
return safestring.mark_safe(volume_name)
def get_link_url(self, snapshot):
volume = snapshot._volume
if volume:
volume_id = volume.id
return reverse(self.link, args=(volume_id,))
class VolumeSnapshotsFilterAction(tables.FilterAction):
def filter(self, table, snapshots, filter_string):
"""Naive case-insensitive search."""
query = filter_string.lower()
return [snapshot for snapshot in snapshots
if query in snapshot.name.lower()]
class VolumeSnapshotsTable(volume_tables.VolumesTableBase):
name = tables.Column("name",
verbose_name=_("Name"),
link="horizon:project:volumes:snapshots:detail")
volume_name = SnapshotVolumeNameColumn(
"name",
verbose_name=_("Volume Name"),
link="horizon:project:volumes:volumes:detail")
class Meta(object):
name = "volume_snapshots"
verbose_name = _("Volume Snapshots")
pagination_param = 'snapshot_marker'
prev_pagination_param = 'prev_snapshot_marker'
table_actions = (VolumeSnapshotsFilterAction, DeleteVolumeSnapshot,)
row_actions = (CreateVolumeFromSnapshot, LaunchSnapshot,
EditVolumeSnapshot, DeleteVolumeSnapshot)
row_class = UpdateRow
status_columns = ("status",)
permissions = ['openstack.services.volume']
| 34.339394
| 78
| 0.667314
|
061dfe3a1c52800102fb0b2a669aecfe65ebd6bb
| 4,842
|
py
|
Python
|
graphs/__init__.py
|
alfonsoeromero/S2F
|
fccb741b15acfdeb02ca0de411eb4b00ae73be85
|
[
"MIT"
] | 9
|
2019-10-24T18:46:46.000Z
|
2022-03-23T13:21:45.000Z
|
graphs/__init__.py
|
alfonsoeromero/S2F
|
fccb741b15acfdeb02ca0de411eb4b00ae73be85
|
[
"MIT"
] | 5
|
2022-01-26T18:00:01.000Z
|
2022-02-08T14:09:42.000Z
|
graphs/__init__.py
|
alfonsoeromero/S2F
|
fccb741b15acfdeb02ca0de411eb4b00ae73be85
|
[
"MIT"
] | 2
|
2022-01-27T12:52:32.000Z
|
2022-01-29T12:08:26.000Z
|
import abc
import numpy as np
import pandas as pd
from scipy import sparse
from itertools import combinations
from Utils import FancyApp
class Graph(FancyApp.FancyApp):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def compute_graph(self):
"""
Computes protein-protein graph(s)
"""
@abc.abstractmethod
def get_graph(self, **kwargs):
"""
Return the computed graph
:return: scipy matrix or dictionary of scipy matrices
"""
@abc.abstractmethod
def write_graph(self, filename):
"""
Saves the computed graph in text format
:param filename: the path to write the graph
"""
@staticmethod
def assert_lexicographical_order(df, p1='Protein 1', p2='Protein 2'):
"""
Guarantees that lexicographical order is maintained in the
dataframe so that df[p1] < df_col[p2]
:param df: The dataframe to modify
:param p1: the name of the min column
:param p2: the name of the max column
:return: None
"""
# 3.- we guarantee the lexicographical order between
# the protein columns, that is,
# that df_col.protein1 < df_col.protein2
min_protein = df[[p1, p2]].min(axis=1)
max_protein = df[[p1, p2]].max(axis=1)
df.loc[:, p1] = min_protein
df.loc[:, p2] = max_protein
@staticmethod
def ij2triangular(rows, cols, n):
"""
Transforms the rows and columns coordinates to a 1 dimensional index
which corresponds to the upper triangle of a n*n matrix.
This mapping is consistent only for coordinates over the main diagnonal
taken from https://stackoverflow.com/q/27086195/943138
:param rows: np.array of the rows
:param cols: np.array of the columns
:param n: number of nodes in the matrix
:return: np.array indexing in 1 dimension
"""
return (n*(n-1)/2) - (n-rows)*((n-rows)-1)/2 + cols - rows - 1
# return ((cols-1)*n + rows) - ( n*(n-1)/2 - (n-cols)*(n-cols+1)/2 )
@staticmethod
def triangular2ij(indices, n):
"""
Inverse of `ij2triangular`
:param indices: np.array of indices
:param n: number of nodes in the matrix
:return: rows, cols in 2D coordinates
"""
rows = n - 2 - np.floor(np.sqrt(-8 * indices + 4 * n * (n - 1) - 7) /
2.0 - 0.5)
cols = indices + rows + 1 - n * (n - 1) / 2 +\
(n - rows) * ((n - rows) - 1) / 2
return rows, cols
@staticmethod
def to_sparse_vector(x):
"""
Given a (N, N) sparse matrix in COO format, returns a sparse matrix
of dimensions (N*(N-1)/2, 1), which keeps only the upper triangle
above the main diagonal.
This function requires that x is the output of scipy.sparse.triu(m, 1)
:param x: scipy.sparse.coo_matrix with shape (N, N)
:return: scipy.sparse.coo_matrix with shape (N*(N-1)/2, 1)
"""
n = x.shape[0]
cols = np.zeros(x.data.shape[0])
rows = Graph.ij2triangular(x.row, x.col, n)
return sparse.coo_matrix((x.data, (rows, cols)),
shape=(int(n*(n-1)/2), 1))
@staticmethod
def to_sparse_matrix(x):
"""
The inverse of `to_sparse_vector`
:param x: scipy.sparse.coo_matrix with shape (N*(N-1)/2, 1)
:return: scipy.sparse.coo_matrix with shape (N, N)
"""
n = int((1+np.sqrt(8*x.shape[0] + 1))/2)
rows, cols = Graph.triangular2ij(x.row, n)
return sparse.coo_matrix((x.data, (rows, cols)),
shape=(n, n))
@staticmethod
def fill_lower_triangle(x):
"""
Given a sparse matrix with only the upper triangle, fill the
lower triangle and return.
:param x: scipy.sparse.coo_matrix with shape (N, N)
:return: scipy.sparse.coo_matrix with shape (N, N)
"""
return sparse.coo_matrix(
sparse.triu(x, 1) + sparse.triu(x, 1).T
)
@staticmethod
def numpy_to_pandas(adjacency, proteins):
"""transform a numpy array into a pandas DataFrame
Parameters
----------
adjacency : np.ndarray
adjacency matrix
proteins : List[str]
the names of the nodes, it must correspond to the adjacency matrix
Returns
-------
pd.DataFrame
the PPI in pandas format
"""
data = {'p1': [], 'p2': [], 'w': []}
for p1, p2 in combinations(range(len(proteins)), 2):
data['p1'].append(proteins[p1])
data['p2'].append(proteins[p2])
data['w'].append(adjacency[p1, p2])
return pd.DataFrame(data)
| 32.496644
| 79
| 0.566708
|
555d99c23e42b6b405db4c7df19a89a699dc8814
| 9,725
|
py
|
Python
|
applications/ParticleMechanicsApplication/tests/test_generate_mpm_particle.py
|
lcirrott/Kratos
|
8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea
|
[
"BSD-4-Clause"
] | 2
|
2019-10-25T09:28:10.000Z
|
2019-11-21T12:51:46.000Z
|
applications/ParticleMechanicsApplication/tests/test_generate_mpm_particle.py
|
lcirrott/Kratos
|
8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea
|
[
"BSD-4-Clause"
] | 13
|
2019-10-07T12:06:51.000Z
|
2020-02-18T08:48:33.000Z
|
applications/ParticleMechanicsApplication/tests/test_generate_mpm_particle.py
|
lcirrott/Kratos
|
8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea
|
[
"BSD-4-Clause"
] | null | null | null |
from __future__ import print_function, absolute_import, division
import KratosMultiphysics
import KratosMultiphysics.ParticleMechanicsApplication as KratosParticle
import KratosMultiphysics.KratosUnittest as KratosUnittest
class TestGenerateMPMParticle(KratosUnittest.TestCase):
def _generate_particle_element_and_check(self, current_model, dimension, geometry_element, num_particle, expected_num_particle):
KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)
# Initialize model part
## Material model part definition
material_point_model_part = current_model.CreateModelPart("dummy_name")
material_point_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
## Initial material model part definition
initial_mesh_model_part = current_model.CreateModelPart("Initial_dummy_name")
initial_mesh_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
## Grid model part definition
grid_model_part = current_model.CreateModelPart("Background_Grid")
grid_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE, dimension)
# Create element and nodes for background grids
sub_background = grid_model_part.CreateSubModelPart("test_background")
self._create_nodes(sub_background, dimension, geometry_element)
self._create_elements(sub_background,dimension, geometry_element)
# Create element and nodes for initial meshes
sub_mp = initial_mesh_model_part.CreateSubModelPart("test")
sub_mp.GetProperties()[1].SetValue(KratosParticle.PARTICLES_PER_ELEMENT, num_particle)
self._create_nodes(sub_mp, dimension, geometry_element)
self._create_elements(sub_mp,dimension, geometry_element)
# Generate MP Elements
KratosParticle.GenerateMaterialPointElement(grid_model_part, initial_mesh_model_part, material_point_model_part, False, False)
# Check total number of element
particle_counter = material_point_model_part.NumberOfElements()
self.assertEqual(expected_num_particle,particle_counter)
def _create_nodes(self, initial_mp, dimension, geometry_element):
if geometry_element == "Triangle":
initial_mp.CreateNewNode(1, 0.0, 0.0, 0.0)
initial_mp.CreateNewNode(2, 1.0, 0.0, 0.0)
initial_mp.CreateNewNode(3, 0.0, 1.0, 0.0)
if (dimension == 3):
initial_mp.CreateNewNode(4, 0.0, 0.0, 1.0)
elif geometry_element == "Quadrilateral":
initial_mp.CreateNewNode(1, -0.5, -0.5, 0.0)
initial_mp.CreateNewNode(2, 0.5, -0.5, 0.0)
initial_mp.CreateNewNode(3, 0.5, 0.5, 0.0)
initial_mp.CreateNewNode(4, -0.5, 0.5, 0.0)
if (dimension == 3):
initial_mp.CreateNewNode(5, -0.5, -0.5, 1.0)
initial_mp.CreateNewNode(6, 0.5, -0.5, 1.0)
initial_mp.CreateNewNode(7, 0.5, 0.5, 1.0)
initial_mp.CreateNewNode(8, -0.5, 0.5, 1.0)
def _create_elements(self, initial_mp, dimension, geometry_element):
if geometry_element == "Triangle":
if (dimension == 2):
initial_mp.CreateNewElement("UpdatedLagrangian2D3N", 1, [1,2,3], initial_mp.GetProperties()[1])
if (dimension == 3):
initial_mp.CreateNewElement("UpdatedLagrangian3D4N", 1, [1,2,3,4], initial_mp.GetProperties()[1])
elif geometry_element == "Quadrilateral":
if (dimension == 2):
initial_mp.CreateNewElement("UpdatedLagrangian2D4N", 1, [1,2,3,4], initial_mp.GetProperties()[1])
if (dimension == 3):
initial_mp.CreateNewElement("UpdatedLagrangian3D8N", 1, [1,2,3,4,5,6,7,8], initial_mp.GetProperties()[1])
KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.ACTIVE, True, initial_mp.Elements)
def test_GenerateMPMParticleTriangle2D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleTriangle2D3P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=3, expected_num_particle=3)
def test_GenerateMPMParticleTriangle2D6P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=6, expected_num_particle=6)
def test_GenerateMPMParticleTriangle2D12P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=12, expected_num_particle=12)
def test_GenerateMPMParticleTriangle2D16P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=16, expected_num_particle=16)
def test_GenerateMPMParticleTriangle2D33P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=33, expected_num_particle=33)
def test_GenerateMPMParticleTriangle2DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Triangle", num_particle=50, expected_num_particle=3)
def test_GenerateMPMParticleTriangle3D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleTriangle3D4P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=3, expected_num_particle=4)
def test_GenerateMPMParticleTriangle3D14P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=6, expected_num_particle=14)
def test_GenerateMPMParticleTriangle3D24P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=12, expected_num_particle=24)
def test_GenerateMPMParticleTriangle3DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Triangle", num_particle=50, expected_num_particle=4)
def test_GenerateMPMParticleQuadrilateral2D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleQuadrilateral2D4P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=4, expected_num_particle=4)
def test_GenerateMPMParticleQuadrilateral2D9P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=9, expected_num_particle=9)
def test_GenerateMPMParticleQuadrilateral2D16P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=16, expected_num_particle=16)
def test_GenerateMPMParticleQuadrilateral2DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=2, geometry_element="Quadrilateral", num_particle=50, expected_num_particle=4)
def test_GenerateMPMParticleQuadrilateral3D1P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=1, expected_num_particle=1)
def test_GenerateMPMParticleQuadrilateral3D8P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=4, expected_num_particle=8)
def test_GenerateMPMParticleQuadrilateral3D27P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=9, expected_num_particle=27)
def test_GenerateMPMParticleQuadrilateral3D64P(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=16, expected_num_particle=64)
def test_GenerateMPMParticleQuadrilateral3DDefault(self):
current_model = KratosMultiphysics.Model()
self._generate_particle_element_and_check(current_model, dimension=3, geometry_element="Quadrilateral", num_particle=50, expected_num_particle=8)
if __name__ == '__main__':
KratosUnittest.main()
| 58.584337
| 154
| 0.759589
|
a5ee1638defc9a4dcfdbd3d4b6d117c0ba5afdc4
| 3,236
|
py
|
Python
|
profiles_project/settings.py
|
Kozphy/django_api_learn
|
9cdbabcdc26438af09ccbd241c0cf9bfaff4138a
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
Kozphy/django_api_learn
|
9cdbabcdc26438af09ccbd241c0cf9bfaff4138a
|
[
"MIT"
] | null | null | null |
profiles_project/settings.py
|
Kozphy/django_api_learn
|
9cdbabcdc26438af09ccbd241c0cf9bfaff4138a
|
[
"MIT"
] | null | null | null |
"""
Django settings for profiles_project project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fzdc8b7572ck2m!7k8q0_c4tzn9aem3z8c1t=e0$kh@dr*xrso'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'profiles_api',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'profiles_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'profiles_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'profiles_api.UserProfile'
| 25.68254
| 91
| 0.702101
|
9c6992441d5e22c85fcc8d4423a286f1e9360d19
| 11,763
|
py
|
Python
|
Global/detection_models/networks.py
|
ZoeyCheung/Bringing-Old-Photos-Back-to-Life
|
56032e9edbad0ab4b33feb6901b9011f5d813528
|
[
"MIT"
] | null | null | null |
Global/detection_models/networks.py
|
ZoeyCheung/Bringing-Old-Photos-Back-to-Life
|
56032e9edbad0ab4b33feb6901b9011f5d813528
|
[
"MIT"
] | null | null | null |
Global/detection_models/networks.py
|
ZoeyCheung/Bringing-Old-Photos-Back-to-Life
|
56032e9edbad0ab4b33feb6901b9011f5d813528
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import torch
import torch.nn as nn
import torch.nn.functional as F
from detection_models.sync_batchnorm.replicate import DataParallelWithCallback
from detection_models.antialiasing import Downsample
class UNet(nn.Module):
def __init__(
self,
in_channels=3,
out_channels=3,
depth=5,
conv_num=2,
wf=6,
padding=True,
batch_norm=True,
up_mode="upsample",
with_tanh=False,
sync_bn=True,
antialiasing=True,
):
"""
Implementation of
U-Net: Convolutional Networks for Biomedical Image Segmentation
(Ronneberger et al., 2015)
https://arxiv.org/abs/1505.04597
Using the default arguments will yield the exact version used
in the original paper
Args:
in_channels (int): number of input channels
out_channels (int): number of output channels
depth (int): depth of the network
wf (int): number of filters in the first layer is 2**wf
padding (bool): if True, apply padding such that the input shape
is the same as the output.
This may introduce artifacts
batch_norm (bool): Use BatchNorm after layers with an
activation function
up_mode (str): one of 'upconv' or 'upsample'.
'upconv' will use transposed convolutions for
learned upsampling.
'upsample' will use bilinear upsampling.
"""
super().__init__()
assert up_mode in ("upconv", "upsample")
self.padding = padding
self.depth = depth - 1
prev_channels = in_channels
self.first = nn.Sequential(
*[nn.ReflectionPad2d(3), nn.Conv2d(in_channels, 2 ** wf, kernel_size=7), nn.LeakyReLU(0.2, True)]
)
prev_channels = 2 ** wf
self.down_path = nn.ModuleList()
self.down_sample = nn.ModuleList()
for i in range(depth):
if antialiasing and depth > 0:
self.down_sample.append(
nn.Sequential(
*[
nn.ReflectionPad2d(1),
nn.Conv2d(prev_channels, prev_channels, kernel_size=3, stride=1, padding=0),
nn.BatchNorm2d(prev_channels),
nn.LeakyReLU(0.2, True),
Downsample(channels=prev_channels, stride=2),
]
)
)
else:
self.down_sample.append(
nn.Sequential(
*[
nn.ReflectionPad2d(1),
nn.Conv2d(prev_channels, prev_channels, kernel_size=4, stride=2, padding=0),
nn.BatchNorm2d(prev_channels),
nn.LeakyReLU(0.2, True),
]
)
)
self.down_path.append(
UNetConvBlock(conv_num, prev_channels, 2 ** (wf + i + 1), padding, batch_norm)
)
prev_channels = 2 ** (wf + i + 1)
self.up_path = nn.ModuleList()
for i in reversed(range(depth)):
self.up_path.append(
UNetUpBlock(conv_num, prev_channels, 2 ** (wf + i), up_mode, padding, batch_norm)
)
prev_channels = 2 ** (wf + i)
if with_tanh:
self.last = nn.Sequential(
*[nn.ReflectionPad2d(1), nn.Conv2d(prev_channels, out_channels, kernel_size=3), nn.Tanh()]
)
else:
self.last = nn.Sequential(
*[nn.ReflectionPad2d(1), nn.Conv2d(prev_channels, out_channels, kernel_size=3)]
)
if sync_bn:
self = DataParallelWithCallback(self)
def forward(self, x):
x = self.first(x)
blocks = []
for i, down_block in enumerate(self.down_path):
blocks.append(x)
x = self.down_sample[i](x)
x = down_block(x)
for i, up in enumerate(self.up_path):
x = up(x, blocks[-i - 1])
return self.last(x)
class UNetConvBlock(nn.Module):
def __init__(self, conv_num, in_size, out_size, padding, batch_norm):
super(UNetConvBlock, self).__init__()
block = []
for _ in range(conv_num):
block.append(nn.ReflectionPad2d(padding=int(padding)))
block.append(nn.Conv2d(in_size, out_size, kernel_size=3, padding=0))
if batch_norm:
block.append(nn.BatchNorm2d(out_size))
block.append(nn.LeakyReLU(0.2, True))
in_size = out_size
self.block = nn.Sequential(*block)
def forward(self, x):
out = self.block(x)
return out
class UNetUpBlock(nn.Module):
def __init__(self, conv_num, in_size, out_size, up_mode, padding, batch_norm):
super(UNetUpBlock, self).__init__()
if up_mode == "upconv":
self.up = nn.ConvTranspose2d(in_size, out_size, kernel_size=2, stride=2)
elif up_mode == "upsample":
self.up = nn.Sequential(
nn.Upsample(mode="bilinear", scale_factor=2, align_corners=False),
nn.ReflectionPad2d(1),
nn.Conv2d(in_size, out_size, kernel_size=3, padding=0),
)
self.conv_block = UNetConvBlock(conv_num, in_size, out_size, padding, batch_norm)
def center_crop(self, layer, target_size):
_, _, layer_height, layer_width = layer.size()
diff_y = (layer_height - target_size[0]) // 2
diff_x = (layer_width - target_size[1]) // 2
return layer[:, :, diff_y : (diff_y + target_size[0]), diff_x : (diff_x + target_size[1])]
def forward(self, x, bridge):
up = self.up(x)
crop1 = self.center_crop(bridge, up.shape[2:])
out = torch.cat([up, crop1], 1)
out = self.conv_block(out)
return out
class UnetGenerator(nn.Module):
"""Create a Unet-based generator"""
def __init__(self, input_nc, output_nc, num_downs, ngf=64, norm_type="BN", use_dropout=False):
"""Construct a Unet generator
Parameters:
input_nc (int) -- the number of channels in input images
output_nc (int) -- the number of channels in output images
num_downs (int) -- the number of downsamplings in UNet. For example, # if |num_downs| == 7,
image of size 128x128 will become of size 1x1 # at the bottleneck
ngf (int) -- the number of filters in the last conv layer
norm_layer -- normalization layer
We construct the U-Net from the innermost layer to the outermost layer.
It is a recursive process.
"""
super().__init__()
if norm_type == "BN":
norm_layer = nn.BatchNorm2d
elif norm_type == "IN":
norm_layer = nn.InstanceNorm2d
else:
raise NameError("Unknown norm layer")
# construct unet structure
unet_block = UnetSkipConnectionBlock(
ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True
) # add the innermost layer
for i in range(num_downs - 5): # add intermediate layers with ngf * 8 filters
unet_block = UnetSkipConnectionBlock(
ngf * 8,
ngf * 8,
input_nc=None,
submodule=unet_block,
norm_layer=norm_layer,
use_dropout=use_dropout,
)
# gradually reduce the number of filters from ngf * 8 to ngf
unet_block = UnetSkipConnectionBlock(
ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer
)
unet_block = UnetSkipConnectionBlock(
ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer
)
unet_block = UnetSkipConnectionBlock(
ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer
)
self.model = UnetSkipConnectionBlock(
output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer
) # add the outermost layer
def forward(self, input):
return self.model(input)
class UnetSkipConnectionBlock(nn.Module):
"""Defines the Unet submodule with skip connection.
-------------------identity----------------------
|-- downsampling -- |submodule| -- upsampling --|
"""
def __init__(
self,
outer_nc,
inner_nc,
input_nc=None,
submodule=None,
outermost=False,
innermost=False,
norm_layer=nn.BatchNorm2d,
use_dropout=False,
):
"""Construct a Unet submodule with skip connections.
Parameters:
outer_nc (int) -- the number of filters in the outer conv layer
inner_nc (int) -- the number of filters in the inner conv layer
input_nc (int) -- the number of channels in input images/features
submodule (UnetSkipConnectionBlock) -- previously defined submodules
outermost (bool) -- if this module is the outermost module
innermost (bool) -- if this module is the innermost module
norm_layer -- normalization layer
user_dropout (bool) -- if use dropout layers.
"""
super().__init__()
self.outermost = outermost
use_bias = norm_layer == nn.InstanceNorm2d
if input_nc is None:
input_nc = outer_nc
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, stride=2, padding=1, bias=use_bias)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.LeakyReLU(0.2, True)
upnorm = norm_layer(outer_nc)
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
elif innermost:
upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias)
down = [downrelu, downconv]
up = [uprelu, upconv, upnorm]
model = down + up
else:
upconv = nn.ConvTranspose2d(
inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias
)
down = [downrelu, downconv, downnorm]
up = [uprelu, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost:
return self.model(x)
else: # add skip connections
return torch.cat([x, self.model(x)], 1)
# ============================================
# Network testing
# ============================================
if __name__ == "__main__":
from torchsummary import summary
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = UNet_two_decoders(
in_channels=3,
out_channels1=3,
out_channels2=1,
depth=4,
conv_num=1,
wf=6,
padding=True,
batch_norm=True,
up_mode="upsample",
with_tanh=False,
)
model.to(device)
model_pix2pix = UnetGenerator(3, 3, 5, ngf=64, norm_type="BN", use_dropout=False)
model_pix2pix.to(device)
print("customized unet:")
summary(model, (3, 256, 256))
print("cyclegan unet:")
summary(model_pix2pix, (3, 256, 256))
x = torch.zeros(1, 3, 256, 256).requires_grad_(True).cuda()
g = make_dot(model(x))
g.render("models/Digraph.gv", view=False)
| 35.324324
| 110
| 0.579104
|
9fbe08aafd7e4c907fc2a1395521228fbbcfcaa5
| 3,273
|
py
|
Python
|
sales_info/mysite/settings.py
|
originlake/web_dev_project
|
1fe75279734fe687fb93173351b3dae6e7de93f7
|
[
"MIT"
] | null | null | null |
sales_info/mysite/settings.py
|
originlake/web_dev_project
|
1fe75279734fe687fb93173351b3dae6e7de93f7
|
[
"MIT"
] | null | null | null |
sales_info/mysite/settings.py
|
originlake/web_dev_project
|
1fe75279734fe687fb93173351b3dae6e7de93f7
|
[
"MIT"
] | null | null | null |
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')&&ojaa@&@2%_rn*r3r_-&uxsqab$a2ue#48-30v*hivurd3j@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'account',
'items',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Chicago'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS =[
os.path.join(BASE_DIR, 'static'),
]
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
| 24.984733
| 91
| 0.692026
|
ccfed67bf66b53cb27891a0e0611552b98750342
| 7,260
|
py
|
Python
|
src/compas_plotters/plotter2.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
src/compas_plotters/plotter2.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
src/compas_plotters/plotter2.py
|
XingxinHE/compas
|
d2901dbbacdaf4694e5adae78ba8f093f10532bf
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import matplotlib.pyplot as plt
from compas_plotters import Artist
__all__ = ['Plotter2']
class Plotter2(object):
""""""
def __init__(self, view=None, figsize=(8, 5), **kwargs):
"""Initialises a plotter object"""
self._show_axes = kwargs.get('show_axes', False)
self._bgcolor = None
self._viewbox = None
self._axes = None
self._artists = []
self.viewbox = view
self.figsize = figsize
self.dpi = kwargs.get('dpi', 100)
self.bgcolor = kwargs.get('bgcolor', '#ffffff')
@property
def viewbox(self):
return self._viewbox
@viewbox.setter
def viewbox(self, view):
if not view:
view = ([-10, 10], [-3, 10])
if len(view) != 2:
return
xlim, ylim = view
if len(xlim) != 2:
return
if len(ylim) != 2:
return
self._viewbox = xlim, ylim
@property
def axes(self):
"""Returns the axes subplot matplotlib object.
Returns
-------
Axes
The matplotlib axes object.
Notes
-----
For more info, see the documentation of the Axes class ([1]_) and the
axis and tick API ([2]_).
References
----------
.. [1] https://matplotlib.org/api/axes_api.html
.. [2] https://matplotlib.org/api/axis_api.html
"""
if not self._axes:
figure = plt.figure(facecolor=self.bgcolor,
figsize=self.figsize,
dpi=self.dpi)
axes = figure.add_subplot(111, aspect='equal')
if self.viewbox:
xmin, xmax = self.viewbox[0]
ymin, ymax = self.viewbox[1]
axes.set_xlim(xmin, xmax)
axes.set_ylim(ymin, ymax)
axes.set_xscale('linear')
axes.set_yscale('linear')
axes.grid(False)
if self._show_axes:
axes.set_frame_on(True)
axes.set_xticks([])
axes.set_yticks([])
axes.spines['top'].set_color('none')
axes.spines['right'].set_color('none')
axes.spines['left'].set_position('zero')
axes.spines['bottom'].set_position('zero')
axes.spines['left'].set_linestyle(':')
axes.spines['bottom'].set_linestyle(':')
else:
axes.set_frame_on(False)
axes.set_xticks([])
axes.set_yticks([])
axes.autoscale()
plt.tight_layout()
self._axes = axes
return self._axes
@property
def figure(self):
"""Returns the matplotlib figure instance.
Returns
-------
Figure
The matplotlib figure instance.
Notes
-----
For more info, see the figure API ([1]_).
References
----------
.. [1] https://matplotlib.org/2.0.2/api/figure_api.html
"""
return self.axes.get_figure()
@property
def canvas(self):
"""Returns the canvas of the figure instance.
"""
return self.figure.canvas
@property
def bgcolor(self):
"""Returns the background color.
Returns
-------
str
The color as a string (hex colors).
"""
return self._bgcolor
@bgcolor.setter
def bgcolor(self, value):
"""Sets the background color.
Parameters
----------
value : str, tuple
The color specififcation for the figure background.
Colors should be specified in the form of a string (hex colors) or
as a tuple of normalized RGB components.
"""
self._bgcolor = value
self.figure.set_facecolor(value)
@property
def title(self):
"""Returns the title of the plot.
Returns
-------
str
The title of the plot.
"""
return self.figure.canvas.get_window_title()
@title.setter
def title(self, value):
"""Sets the title of the plot.
Parameters
----------
value : str
The title of the plot.
"""
self.figure.canvas.set_window_title(value)
@property
def artists(self):
return self._artists
@artists.setter
def artists(self, artists):
self._artists = artists
# =========================================================================
# Methods
# =========================================================================
def zoom_extents(self):
self.axes.autoscale_view()
def add(self, item, artist=None, **kwargs):
if not artist:
artist = Artist.build(item, **kwargs)
artist.plotter = self
artist.draw()
self._artists.append(artist)
return artist
def add_as(self, item, artist_type, **kwargs):
artist = Artist.build_as(item, artist_type, **kwargs)
artist.plotter = self
artist.draw()
self._artists.append(artist)
return artist
def find(self, item):
raise NotImplementedError
def register_listener(self, listener):
"""Register a listener for pick events.
Parameters
----------
listener : callable
The handler for pick events.
Returns
-------
None
Notes
-----
For more information, see the docs of ``mpl_connect`` ([1]_), and on event
handling and picking ([2]_).
References
----------
.. [1] https://matplotlib.org/api/backend_bases_api.html#matplotlib.backend_bases.FigureCanvasBase.mpl_connect
.. [2] https://matplotlib.org/users/event_handling.html
Examples
--------
.. code-block:: python
#
"""
self.figure.canvas.mpl_connect('pick_event', listener)
def draw(self, pause=None):
self.figure.canvas.draw()
self.figure.canvas.flush_events()
if pause:
plt.pause(pause)
def redraw(self, pause=None):
"""Updates and pauses the plot.
Parameters
----------
pause : float
Ammount of time to pause the plot in seconds.
"""
for artist in self._artists:
artist.redraw()
self.figure.canvas.draw()
self.figure.canvas.flush_events()
if pause:
plt.pause(pause)
def show(self):
"""Displays the plot.
"""
self.draw()
plt.show()
def save(self, filepath, **kwargs):
"""Saves the plot to a file.
Parameters
----------
filepath : str
Full path of the file.
Notes
-----
For an overview of all configuration options, see [1]_.
References
----------
.. [1] https://matplotlib.org/2.0.2/api/pyplot_api.html#matplotlib.pyplot.savefig
"""
plt.savefig(filepath, **kwargs)
| 25.56338
| 118
| 0.513912
|
6cf27866fa01c25719f4abf21590bda880800285
| 13,952
|
py
|
Python
|
netapp/santricity/models/symbol/nv_meo_f_interface_statistics.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
netapp/santricity/models/symbol/nv_meo_f_interface_statistics.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
netapp/santricity/models/symbol/nv_meo_f_interface_statistics.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
# coding: utf-8
"""
NVMeoFInterfaceStatistics.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class NVMeoFInterfaceStatistics(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
NVMeoFInterfaceStatistics - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'interface_ref': 'str', # (required parameter)
'total_controller_count': 'int', # (required parameter)
'keep_alive_timeouts': 'int', # (required parameter)
'max_io_queues_connect_failures': 'int', # (required parameter)
'max_controller_connect_failures': 'int', # (required parameter)
'nvme_controller_resets': 'int', # (required parameter)
'nvme_controller_shutdowns': 'int', # (required parameter)
'admin_queue_statistics': 'NVMeoFQueueStatistics', # (required parameter)
'io_queue_statistics': 'NVMeoFQueueStatistics'
}
self.attribute_map = {
'interface_ref': 'interfaceRef', # (required parameter)
'total_controller_count': 'totalControllerCount', # (required parameter)
'keep_alive_timeouts': 'keepAliveTimeouts', # (required parameter)
'max_io_queues_connect_failures': 'maxIoQueuesConnectFailures', # (required parameter)
'max_controller_connect_failures': 'maxControllerConnectFailures', # (required parameter)
'nvme_controller_resets': 'nvmeControllerResets', # (required parameter)
'nvme_controller_shutdowns': 'nvmeControllerShutdowns', # (required parameter)
'admin_queue_statistics': 'adminQueueStatistics', # (required parameter)
'io_queue_statistics': 'ioQueueStatistics'
}
self._interface_ref = None
self._total_controller_count = None
self._keep_alive_timeouts = None
self._max_io_queues_connect_failures = None
self._max_controller_connect_failures = None
self._nvme_controller_resets = None
self._nvme_controller_shutdowns = None
self._admin_queue_statistics = None
self._io_queue_statistics = None
@property
def interface_ref(self):
"""
Gets the interface_ref of this NVMeoFInterfaceStatistics.
The reference to the interface object.
:return: The interface_ref of this NVMeoFInterfaceStatistics.
:rtype: str
:required/optional: required
"""
return self._interface_ref
@interface_ref.setter
def interface_ref(self, interface_ref):
"""
Sets the interface_ref of this NVMeoFInterfaceStatistics.
The reference to the interface object.
:param interface_ref: The interface_ref of this NVMeoFInterfaceStatistics.
:type: str
"""
self._interface_ref = interface_ref
@property
def total_controller_count(self):
"""
Gets the total_controller_count of this NVMeoFInterfaceStatistics.
The number of NVMe controllers (i.e., I_T_Nexuses in SCSI terminology) over this interface.
:return: The total_controller_count of this NVMeoFInterfaceStatistics.
:rtype: int
:required/optional: required
"""
return self._total_controller_count
@total_controller_count.setter
def total_controller_count(self, total_controller_count):
"""
Sets the total_controller_count of this NVMeoFInterfaceStatistics.
The number of NVMe controllers (i.e., I_T_Nexuses in SCSI terminology) over this interface.
:param total_controller_count: The total_controller_count of this NVMeoFInterfaceStatistics.
:type: int
"""
self._total_controller_count = total_controller_count
@property
def keep_alive_timeouts(self):
"""
Gets the keep_alive_timeouts of this NVMeoFInterfaceStatistics.
The number of Keep Alive Timeouts that have occurred on this NVMe over Fabrics interface.
:return: The keep_alive_timeouts of this NVMeoFInterfaceStatistics.
:rtype: int
:required/optional: required
"""
return self._keep_alive_timeouts
@keep_alive_timeouts.setter
def keep_alive_timeouts(self, keep_alive_timeouts):
"""
Sets the keep_alive_timeouts of this NVMeoFInterfaceStatistics.
The number of Keep Alive Timeouts that have occurred on this NVMe over Fabrics interface.
:param keep_alive_timeouts: The keep_alive_timeouts of this NVMeoFInterfaceStatistics.
:type: int
"""
self._keep_alive_timeouts = keep_alive_timeouts
@property
def max_io_queues_connect_failures(self):
"""
Gets the max_io_queues_connect_failures of this NVMeoFInterfaceStatistics.
The maximum number of I/O Queue Connect Failures that have occurred on this NVMe over Fabrics interface.
:return: The max_io_queues_connect_failures of this NVMeoFInterfaceStatistics.
:rtype: int
:required/optional: required
"""
return self._max_io_queues_connect_failures
@max_io_queues_connect_failures.setter
def max_io_queues_connect_failures(self, max_io_queues_connect_failures):
"""
Sets the max_io_queues_connect_failures of this NVMeoFInterfaceStatistics.
The maximum number of I/O Queue Connect Failures that have occurred on this NVMe over Fabrics interface.
:param max_io_queues_connect_failures: The max_io_queues_connect_failures of this NVMeoFInterfaceStatistics.
:type: int
"""
self._max_io_queues_connect_failures = max_io_queues_connect_failures
@property
def max_controller_connect_failures(self):
"""
Gets the max_controller_connect_failures of this NVMeoFInterfaceStatistics.
The maximum number of NVMe Controller Connect Failures that have occurred on this NVMe over Fabrics interface.
:return: The max_controller_connect_failures of this NVMeoFInterfaceStatistics.
:rtype: int
:required/optional: required
"""
return self._max_controller_connect_failures
@max_controller_connect_failures.setter
def max_controller_connect_failures(self, max_controller_connect_failures):
"""
Sets the max_controller_connect_failures of this NVMeoFInterfaceStatistics.
The maximum number of NVMe Controller Connect Failures that have occurred on this NVMe over Fabrics interface.
:param max_controller_connect_failures: The max_controller_connect_failures of this NVMeoFInterfaceStatistics.
:type: int
"""
self._max_controller_connect_failures = max_controller_connect_failures
@property
def nvme_controller_resets(self):
"""
Gets the nvme_controller_resets of this NVMeoFInterfaceStatistics.
The number of NVM Controller Resets that have occurred on this NVMe over Fabrics interface.
:return: The nvme_controller_resets of this NVMeoFInterfaceStatistics.
:rtype: int
:required/optional: required
"""
return self._nvme_controller_resets
@nvme_controller_resets.setter
def nvme_controller_resets(self, nvme_controller_resets):
"""
Sets the nvme_controller_resets of this NVMeoFInterfaceStatistics.
The number of NVM Controller Resets that have occurred on this NVMe over Fabrics interface.
:param nvme_controller_resets: The nvme_controller_resets of this NVMeoFInterfaceStatistics.
:type: int
"""
self._nvme_controller_resets = nvme_controller_resets
@property
def nvme_controller_shutdowns(self):
"""
Gets the nvme_controller_shutdowns of this NVMeoFInterfaceStatistics.
The number of NVM Controller Shutdowns that have occurred on this NVMe over Fabrics interface.
:return: The nvme_controller_shutdowns of this NVMeoFInterfaceStatistics.
:rtype: int
:required/optional: required
"""
return self._nvme_controller_shutdowns
@nvme_controller_shutdowns.setter
def nvme_controller_shutdowns(self, nvme_controller_shutdowns):
"""
Sets the nvme_controller_shutdowns of this NVMeoFInterfaceStatistics.
The number of NVM Controller Shutdowns that have occurred on this NVMe over Fabrics interface.
:param nvme_controller_shutdowns: The nvme_controller_shutdowns of this NVMeoFInterfaceStatistics.
:type: int
"""
self._nvme_controller_shutdowns = nvme_controller_shutdowns
@property
def admin_queue_statistics(self):
"""
Gets the admin_queue_statistics of this NVMeoFInterfaceStatistics.
This structure describes the NVMe over Fabrics queue statistics for the Admin Queue.
:return: The admin_queue_statistics of this NVMeoFInterfaceStatistics.
:rtype: NVMeoFQueueStatistics
:required/optional: required
"""
return self._admin_queue_statistics
@admin_queue_statistics.setter
def admin_queue_statistics(self, admin_queue_statistics):
"""
Sets the admin_queue_statistics of this NVMeoFInterfaceStatistics.
This structure describes the NVMe over Fabrics queue statistics for the Admin Queue.
:param admin_queue_statistics: The admin_queue_statistics of this NVMeoFInterfaceStatistics.
:type: NVMeoFQueueStatistics
"""
self._admin_queue_statistics = admin_queue_statistics
@property
def io_queue_statistics(self):
"""
Gets the io_queue_statistics of this NVMeoFInterfaceStatistics.
This structure describes the NVMe over Fabrics queue statistics for the I/O Queues.
:return: The io_queue_statistics of this NVMeoFInterfaceStatistics.
:rtype: NVMeoFQueueStatistics
:required/optional: required
"""
return self._io_queue_statistics
@io_queue_statistics.setter
def io_queue_statistics(self, io_queue_statistics):
"""
Sets the io_queue_statistics of this NVMeoFInterfaceStatistics.
This structure describes the NVMe over Fabrics queue statistics for the I/O Queues.
:param io_queue_statistics: The io_queue_statistics of this NVMeoFInterfaceStatistics.
:type: NVMeoFQueueStatistics
"""
self._io_queue_statistics = io_queue_statistics
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
if self is None:
return None
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 41.772455
| 844
| 0.690869
|
67ea0827462f662b8dd73f2ca6b30de4efcd50ab
| 3,149
|
py
|
Python
|
svg/charts/css.py
|
ewerybody/svg.charts
|
eb77a381f0721b3d59ae9461765ac9e9cffef586
|
[
"MIT"
] | 26
|
2016-05-04T09:57:13.000Z
|
2021-08-23T17:59:20.000Z
|
svg/charts/css.py
|
ewerybody/svg.charts
|
eb77a381f0721b3d59ae9461765ac9e9cffef586
|
[
"MIT"
] | 21
|
2016-03-22T00:39:51.000Z
|
2021-08-14T00:43:21.000Z
|
svg/charts/css.py
|
ewerybody/svg.charts
|
eb77a381f0721b3d59ae9461765ac9e9cffef586
|
[
"MIT"
] | 15
|
2016-04-06T09:57:38.000Z
|
2021-08-17T03:56:40.000Z
|
import cssutils
SVG = 'SVG 1.1' # http://www.w3.org/TR/SVG11/styling.html
macros = {
'paint': 'none|currentColor|{color}',
'unitidentifier': 'em|ex|px|pt|pc|cm|mm|in|%',
'length': '{positivenum}({unitidentifier})?',
'dasharray': r'{positivenum}(\s*,\s*{positivenum})*',
# a number greater-than or equal to one
'number-ge-one': r'{[1-9][0-9]*(\.[0-9]+)?',
}
properties = {
# Clipping, Masking, and Compositing
'clip-path': '{uri}|none|inherit',
'clip-rule': 'nonzero|evenodd|inherit',
'mask': '{uri}|none|inherit',
'opacity': '{num}|inherit',
# Filter Effects
'enable-background': r'accumulate|new(\s+{num}){0,4}|inherit',
'filter': '{uri}|none|inherit',
'flood-color': 'currentColor|{color}|inherit',
'flood-opacity': '{num}|inherit',
'lighting-color': 'currentColor|{color}|inherit',
# Gradient Properties
'stop-color': 'currentColor|{color}|inherit',
'stop-opacity': '{num}|inherit',
# Interactivity Properties
'pointer-events': 'visiblePainted|visibleFill|visibleStroke|visible'
'|painted|fill|stroke|all|none|inherit',
# Color and Pointing Properties
'color-interpolation': 'auto|sRGB|linearRGB|inherit',
'color-interpolation-filters': 'auto|sRGB|linearRGB|inherit',
'color-rendering': 'auto|optimizeSpeed|optimizeQuality|inherit',
'shape-rendering': 'auto|optimizeSpeed|crispEdges|geometricPrecision|inherit',
'text-rendering': (
'auto|optimizeSpeed|optimizeLegibility|geometricPrecision|inherit'
),
'fill': '{paint}',
'fill-opacity': '{num}|inherit',
'fill-rule': 'nonzero|evenodd|inherit',
'image-rendering': 'auto|optimizeSpeed|optimizeQuality|inherit',
'marker': 'none|inherit|{uri}',
'marker-end': 'none|inherit|{uri}',
'marker-mid': 'none|inherit|{uri}',
'marker-start': 'none|inherit|{uri}',
'shape-rendering': 'auto|optimizeSpeed|crispEdges|geometricPrecision|inherit',
'stroke': '{paint}',
'stroke-dasharray': 'none|{dasharray}|inherit',
'stroke-dashoffset': '{length}|inherit',
'stroke-linecap': 'butt|round|square|inherit',
'stroke-linejoin': 'miter|round|bevel|inherit',
'stroke-miterlimit': '{number-ge-one}|inherit',
'stroke-opacity': '{num}|inherit',
'stroke-width': '{length}|inherit',
# Text Properties
'alignment-baseline': 'auto|baseline|before-edge|text-before-edge|middle'
'|central|after-edge|text-after-edge|ideographic'
'|alphabetic|hanging|mathematical|inherit',
'baseline-shift': 'baseline|sub|super|{percentage}|{length}|inherit',
'dominant-baseline': 'auto|use-script|no-change|reset-size|ideographic'
'|alphabetic|hanging||mathematical|central|middle'
'|text-after-edge|text-before-edge|inherit',
'glyph-orientation-horizontal': '{angle}|inherit',
'glyph-orientation-vertical': 'auto|{angle}|inherit',
'kerning': 'auto|{length}|inherit',
'text-anchor': 'start|middle|end|inherit',
'writing-mode': 'lr-tb|rl-tb|tb-rl|lr|rl|tb|inherit',
}
cssutils.profile.addProfile(SVG, properties, macros)
cssutils.profile.defaultProfiles = [SVG, cssutils.profile.CSS_LEVEL_2]
| 41.986667
| 82
| 0.671959
|
d718a560a41f81149b9787e80b6695562144b89f
| 64
|
py
|
Python
|
discum/gateway/user/__init__.py
|
firewood-b/Discord-S.C.U.M
|
1beb8c25ab245a1389431a5206eafb9b4a95df0f
|
[
"MIT"
] | null | null | null |
discum/gateway/user/__init__.py
|
firewood-b/Discord-S.C.U.M
|
1beb8c25ab245a1389431a5206eafb9b4a95df0f
|
[
"MIT"
] | null | null | null |
discum/gateway/user/__init__.py
|
firewood-b/Discord-S.C.U.M
|
1beb8c25ab245a1389431a5206eafb9b4a95df0f
|
[
"MIT"
] | null | null | null |
from .combo import *
from .parse import *
from .request import *
| 21.333333
| 22
| 0.734375
|
f2c6fc721f0dd8316234db1205edcf98924f0a55
| 612
|
py
|
Python
|
spinsys/__init__.py
|
macthecadillac/Interacting-Fermions
|
6122d2a7e67533b28e581929995ce8e2a2ad41fc
|
[
"BSD-3-Clause"
] | 1
|
2020-07-29T06:06:12.000Z
|
2020-07-29T06:06:12.000Z
|
spinsys/__init__.py
|
macthecadillac/Interacting-Fermions
|
6122d2a7e67533b28e581929995ce8e2a2ad41fc
|
[
"BSD-3-Clause"
] | null | null | null |
spinsys/__init__.py
|
macthecadillac/Interacting-Fermions
|
6122d2a7e67533b28e581929995ce8e2a2ad41fc
|
[
"BSD-3-Clause"
] | null | null | null |
from spinsys import constructors
from spinsys import dmrg
from spinsys import exceptions
from spinsys import half
from spinsys import quantities
from spinsys import state_generators
from spinsys import time_dependent
from spinsys import utils
import shutil
import numpy
__all__ = [
"constructors",
"dmrg",
"exceptions",
"half",
"quantities",
"state_generators",
"time_dependent",
"utils"
]
# set default print options for better display of data on screen
term_width = tuple(shutil.get_terminal_size())[0]
numpy.set_printoptions(precision=5, suppress=True, linewidth=term_width)
| 23.538462
| 72
| 0.76634
|
218f8c4feadc5fce1ff089f0a30326c829827aa5
| 954
|
py
|
Python
|
test/unit/bench_cli/context.py
|
systay/arewefastyet
|
f21f3c0c63b5e3729d946bd2283004641b427aad
|
[
"Apache-2.0"
] | null | null | null |
test/unit/bench_cli/context.py
|
systay/arewefastyet
|
f21f3c0c63b5e3729d946bd2283004641b427aad
|
[
"Apache-2.0"
] | 1
|
2021-03-04T11:06:58.000Z
|
2021-03-04T11:06:58.000Z
|
test/unit/bench_cli/context.py
|
systay/arewefastyet
|
f21f3c0c63b5e3729d946bd2283004641b427aad
|
[
"Apache-2.0"
] | 1
|
2021-09-03T16:06:08.000Z
|
2021-09-03T16:06:08.000Z
|
# Copyright 2021 The Vitess Authors.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../')))
import bench_cli.cli as cli
import bench_cli.configuration as configuration
import bench_cli.run_benchmark as run_benchmark
import bench_cli.task as task
import bench_cli.task_factory as taskfac
import bench_cli.task_oltp as oltp
import bench_cli.task_tpcc as tpcc
| 41.478261
| 89
| 0.786164
|
7f7389444434f2b4fe57df1e0c221c9c565baf73
| 1,498
|
py
|
Python
|
sort_key_test.py
|
PythonCHB/sort_key_tests
|
00e0fdb872a58b9f0797176afd3332fa8c5be159
|
[
"Unlicense"
] | 1
|
2019-07-22T06:19:39.000Z
|
2019-07-22T06:19:39.000Z
|
sort_key_test.py
|
PythonCHB/sort_key_tests
|
00e0fdb872a58b9f0797176afd3332fa8c5be159
|
[
"Unlicense"
] | null | null | null |
sort_key_test.py
|
PythonCHB/sort_key_tests
|
00e0fdb872a58b9f0797176afd3332fa8c5be159
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python3
import random
import time
random.seed(hash('Testing Keys'))
lt_calls = 0
key_calls = 0
outer_key_calls = 0
def outer_key(item):
# global outer_key_calls
# outer_key_calls += 1
return item.key()
class MyObject:
def __init__(self, value1, value2):
self.value1 = value1
self.value2 = value2
def __lt__(self, other):
global lt_calls
lt_calls += 1
if self.value1 < other.value1:
return True
else:
return self.value2 < other.value2
def key(self):
global key_calls
key_calls += 1
return self.value1, self.value2
# def __lt__(self, other):
# global lt_calls
# lt_calls += 1
# return self.value1 < other.value1
# def key(self):
# global key_calls
# key_calls += 1
# return self.value1
lt_list = [MyObject(value1, value1 - 50) for value1 in reversed(range(1000))]
random.shuffle(lt_list)
key_list = lt_list[:]
outer_key_list = lt_list[:]
print("Using a length: {} list".format(len(lt_list)))
s = time.time()
key_list.sort(key=MyObject.key)
ek = time.time() - s
print('key %.6fs %6d calls' % (ek, key_calls))
s = time.time()
outer_key_list.sort(key=outer_key)
eok = time.time() - s
print('outer_key %.6fs %6d calls' % (eok, outer_key_calls))
s = time.time()
lt_list.sort()
elt = time.time() - s
print('lt %.6fs %6d calls' % (elt, lt_calls))
print("time ratio:", elt / eok)
| 18.962025
| 77
| 0.613485
|
56db30aee1fe93d79335fe03cdad6bfc7e984233
| 1,584
|
py
|
Python
|
epotential.py
|
the-fridge/Python_Projects
|
73758eb57acdae26b58a14e6e4996919a7bdde43
|
[
"MIT"
] | 1
|
2021-04-18T22:25:56.000Z
|
2021-04-18T22:25:56.000Z
|
epotential.py
|
iamfeysal/Python_Projects
|
73758eb57acdae26b58a14e6e4996919a7bdde43
|
[
"MIT"
] | null | null | null |
epotential.py
|
iamfeysal/Python_Projects
|
73758eb57acdae26b58a14e6e4996919a7bdde43
|
[
"MIT"
] | 1
|
2019-03-21T08:49:21.000Z
|
2019-03-21T08:49:21.000Z
|
#We import our libs:
import numpy as np
import matplotlib.pyplot as plt
from numba import jit
@jit
def solver(N):
# Make the initial guess for solution matrix
V = np.zeros((N,N))
# Solver:
iterations = 0
eps = 1e-10 # Convergence threshold
error = 1e4 # Large dummy error
while iterations < 1e4 and error > eps:
V_temp = np.copy(V)
error = 0
# we make this accumulate in the loop
for j in range(1,N-1):
for i in range(1,N-1):
V[i,j] = 0.25*(V[i+1,j] + V[i-1,j] + V[i,j-1] + V[i,j+1] + rho[i,j]*ds**2)
error += abs(V[i,j]-V_temp[i,j])
iterations += 1
print("iterations =", iterations)
return V
# Set dimensions of the problem:
L = 1.0
N = 21
ds = L/N
# Define arrays used for plotting:
x = np.linspace(0,L,N)
y = np.copy(x)
X, Y = np.meshgrid(x,y)
# Make the charge density matrix:
rho0 = 1.0
rho = np.zeros((N,N))
rho[int(round(N/2.0)),int(round(N/2.0))] = rho0
# for j in range(round(N/2.0)-int(N/20.0),round(N/2.0)+int(N/20.0)):
# rho[round(N/2.0)-int(N/30.0),j] = rho0
# rho[round(N/2.0)+int(N/30.0),j] = -rho0
# Solver:
V = solver(N)
# Plotting:
eps = 3
zoomX = X[round(N/2.0)-eps:round(N/2.0)+eps]
zoomY = Y[round(N/2.0)-eps:round(N/2.0)+eps]
zoomV = V[round(N/2.0)-eps:round(N/2.0)+eps]
plt.figure(figsize=(5,3))
CS = plt.contour(zoomX, zoomY, zoomV, 30) # Make a contour plot
plt.clabel(CS, inline=1, fontsize=10)
plt.title("PDE solution of a point charge")
CB = plt.colorbar(CS, extend="both")
plt.show()
# Print matrix:
print(zoomV)
| 24.75
| 90
| 0.595328
|
e5b9bcde26cfdc17e604b1629b37cd170b7f33a6
| 411
|
py
|
Python
|
python37/Scripts/pinyin-script.py
|
sheyingqi/lushi_python
|
1a093a34433f2cd90ca525baec09327a72c57099
|
[
"MIT"
] | 1
|
2021-12-23T02:30:19.000Z
|
2021-12-23T02:30:19.000Z
|
python37/Scripts/pinyin-script.py
|
sheyingqi/lushi_python
|
1a093a34433f2cd90ca525baec09327a72c57099
|
[
"MIT"
] | null | null | null |
python37/Scripts/pinyin-script.py
|
sheyingqi/lushi_python
|
1a093a34433f2cd90ca525baec09327a72c57099
|
[
"MIT"
] | null | null | null |
#!C:\Users\Administrator\Desktop\python37\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pinyin==0.4.0','console_scripts','pinyin'
__requires__ = 'pinyin==0.4.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pinyin==0.4.0', 'console_scripts', 'pinyin')()
)
| 31.615385
| 72
| 0.673966
|
522a505bda785e941a43161f1e4b466e509c1a81
| 38
|
py
|
Python
|
tabular/src/autogluon/tabular/trainer/__init__.py
|
zhiqiangdon/autogluon
|
71ee7ef0f05d8f0aad112d8c1719174aa33194d9
|
[
"Apache-2.0"
] | 4,462
|
2019-12-09T17:41:07.000Z
|
2022-03-31T22:00:41.000Z
|
tabular/src/autogluon/tabular/trainer/__init__.py
|
zhiqiangdon/autogluon
|
71ee7ef0f05d8f0aad112d8c1719174aa33194d9
|
[
"Apache-2.0"
] | 1,408
|
2019-12-09T17:48:59.000Z
|
2022-03-31T20:24:12.000Z
|
tabular/src/autogluon/tabular/trainer/__init__.py
|
zhiqiangdon/autogluon
|
71ee7ef0f05d8f0aad112d8c1719174aa33194d9
|
[
"Apache-2.0"
] | 623
|
2019-12-10T02:04:18.000Z
|
2022-03-20T17:11:01.000Z
|
from .auto_trainer import AutoTrainer
| 19
| 37
| 0.868421
|
6feb97609a006b219ba2e12264c00c16b77972b1
| 458
|
py
|
Python
|
ccvalidate/api/permissions.py
|
orion3000/albert
|
84373ab1e413513758d131adf992534e5ffd4621
|
[
"BSD-3-Clause"
] | null | null | null |
ccvalidate/api/permissions.py
|
orion3000/albert
|
84373ab1e413513758d131adf992534e5ffd4621
|
[
"BSD-3-Clause"
] | 5
|
2021-04-08T20:24:24.000Z
|
2022-02-10T11:17:41.000Z
|
ccvalidate/api/permissions.py
|
orion3000/albert
|
84373ab1e413513758d131adf992534e5ffd4621
|
[
"BSD-3-Clause"
] | null | null | null |
from rest_framework.permissions import BasePermission
from .models import Creditcard
class IsOwner(BasePermission):
"""Custom permission class to allow only creditcard owners to edit them."""
def has_object_permission(self, request, view, obj):
"""Return True if permission is granted to the creditcard owner."""
if isinstance(obj, Creditcard):
return obj.owner == request.user
return obj.owner == request.user
| 35.230769
| 79
| 0.713974
|
e61c7b240ef1e5b3cb5597a0cb8c7b8c0e6ba4b7
| 1,903
|
py
|
Python
|
autotest/gcore/aaigrid_read.py
|
dtusk/gdal1
|
30dcdc1eccbca2331674f6421f1c5013807da609
|
[
"MIT"
] | 3
|
2017-01-12T10:18:56.000Z
|
2020-03-21T16:42:55.000Z
|
autotest/gcore/aaigrid_read.py
|
ShinNoNoir/gdal-1.11.5-vs2015
|
5d544e176a4c11f9bcd12a0fe66f97fd157824e6
|
[
"MIT"
] | null | null | null |
autotest/gcore/aaigrid_read.py
|
ShinNoNoir/gdal-1.11.5-vs2015
|
5d544e176a4c11f9bcd12a0fe66f97fd157824e6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test basic read support for Arc/Info ASCII grid (AAIGrid) file.
# Author: Andrey Kiselev, dron@remotesensing.org
#
###############################################################################
# Copyright (c) 2003, Andrey Kiselev <dron@remotesensing.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
###############################################################################
import os
import sys
sys.path.append( '../pymod' )
import gdaltest
from osgeo import gdal
###############################################################################
# When imported build a list of units based on the files available.
gdaltest_list = []
init_list = [ \
('byte.tif.grd', 1, 4672, None)]
for item in init_list:
ut = gdaltest.GDALTest( 'AAIGrid', item[0], item[1], item[2] )
if ut is None:
print( 'AAIGrid tests skipped' )
sys.exit()
gdaltest_list.append( (ut.testOpen, item[0]) )
if __name__ == '__main__':
gdaltest.setup_run( 'aaigrid_read' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| 32.254237
| 79
| 0.594325
|
bc3513cb203b5eb746d7ca1948d927c26402a7f1
| 390
|
py
|
Python
|
palsbet/migrations/0003_auto_20180323_0018.py
|
denis254/palsbetc
|
d70d0fadaa661ff36c046a4f0a87a88d890c0dc4
|
[
"BSD-3-Clause"
] | null | null | null |
palsbet/migrations/0003_auto_20180323_0018.py
|
denis254/palsbetc
|
d70d0fadaa661ff36c046a4f0a87a88d890c0dc4
|
[
"BSD-3-Clause"
] | 11
|
2020-03-24T16:11:23.000Z
|
2021-12-13T19:47:29.000Z
|
palsbet/migrations/0003_auto_20180323_0018.py
|
denis254/overtimebet
|
063af2fc263580d96e396e953ef8658a75ac38a5
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.0.2 on 2018-03-22 21:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('palsbet', '0002_viptipsgames'),
]
operations = [
migrations.AlterField(
model_name='viptipsgames',
name='cathegory',
field=models.CharField(max_length=100),
),
]
| 20.526316
| 51
| 0.602564
|
1017b343a5b7dcbbbb64251b24cb90f54b1de90d
| 2,925
|
py
|
Python
|
qbflask/conventions.py
|
kevindkeogh/qbootstrapper-flask
|
490906837d6522e3669193e5097bd33e1f953451
|
[
"MIT"
] | 1
|
2017-04-27T08:59:01.000Z
|
2017-04-27T08:59:01.000Z
|
qbflask/conventions.py
|
kevindkeogh/qbootstrapper-flask
|
490906837d6522e3669193e5097bd33e1f953451
|
[
"MIT"
] | null | null | null |
qbflask/conventions.py
|
kevindkeogh/qbootstrapper-flask
|
490906837d6522e3669193e5097bd33e1f953451
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
'''Functions and constants for all conventions, including adding and returning
lists of conventions
'''
import qbflask.models as models
import json
import re
INSTRUMENT_TYPES = [('OISCashRate', 'OIS Cash Rate'),
('OISSwap', 'OIS Swap'),
('LIBORCashRate', 'LIBOR Cash Rate'),
('LIBORFuture', 'LIBOR Future'),
('LIBORFRA', 'LIBOR FRA'),
('LIBORSwap', 'LIBOR Swap Rate')]
CURVE_TYPES = [('OIS', 'OIS'), ('LIBOR', 'LIBOR')]
FREQ_TYPES = [('months', 'Months'), ('weeks', 'Weeks'), ('days', 'Days')]
ADJ_TYPES = [('unadjusted', 'Unadjusted'), ('following', 'Following'),
('modified following', 'Modified Following'),
('preceding', 'Preceding')]
BASIS_TYPES = [('act360', 'Actual/360'), ('act365', 'Actual/365'),
('30360', '30/360'), ('30E360', '30/360E')]
def add_convention(raw_data):
'''Takes a flask request JSON object, calls the parser, and adds the
information to the database
'''
try:
data = parse_convs_form(raw_data)
name = data['conv_name']
ccy = data['currency']
inst = data['conv_instrument_type']
conv = json.dumps(data)
db = models.get_db()
cur = db.cursor()
query = ('INSERT INTO CONVENTIONS(name, currency, instrument, '
'convention) VALUES(?, ?, ?, ?)')
cur.execute(query, (name, ccy, inst, conv))
db.commit()
return (True, '{name} convention successfully added'.format(**locals()))
except:
return (False, 'An error occurred: {name} not added'.format(**locals()))
def parse_convs_form(raw_data):
'''Takes Flask request JSON object and parses to dict for db addition'''
convs = {}
for row in raw_data:
convs[row['name']] = row['value']
return convs
def get_conventions_list():
'''Returns nested dict of all conventions
{ Currency : { Instrument_type : [Name] } }
'''
db = models.get_db()
cur = db.cursor()
query = 'SELECT currency, instrument, name FROM CONVENTIONS'
cur.execute(query)
convs = {}
for row in cur:
if row['currency'] not in convs:
convs[row['currency']] = {}
if row['instrument'] not in convs[row['currency']]:
convs[row['currency']][row['instrument']] = []
convs[row['currency']][row['instrument']].append(row['name'])
return convs
def convs_validate(conv):
'''
'''
pass
def get_convention(name, currency):
'''Gets a single convention from the database. Returns a python dict of
conventions and strings
'''
db = models.get_db()
cur = db.cursor()
query = 'SELECT convention FROM conventions WHERE (name=? AND currency=?)'
cur.execute(query, (name, currency))
conv = cur.fetchone()[0]
conv = json.loads(conv)
return conv
| 30.46875
| 80
| 0.585641
|
5c434083f7212828c6a5503ed23b5c1ea78119fa
| 1,765
|
py
|
Python
|
tulingApi.py
|
Jim-Luo/MyQQRobot2
|
35b97593ff6b317e68eb1e68a9ef9821d5f862a7
|
[
"MIT"
] | 3
|
2019-10-01T13:46:16.000Z
|
2021-03-14T11:37:29.000Z
|
tulingApi.py
|
Jim-Luo/MyQQRobot2
|
35b97593ff6b317e68eb1e68a9ef9821d5f862a7
|
[
"MIT"
] | null | null | null |
tulingApi.py
|
Jim-Luo/MyQQRobot2
|
35b97593ff6b317e68eb1e68a9ef9821d5f862a7
|
[
"MIT"
] | 1
|
2019-07-23T06:55:00.000Z
|
2019-07-23T06:55:00.000Z
|
# -*- coding:utf-8 -*-
import json
import logging
import requests
from urllib import urlopen
from urllib import urlencode
class TulingAPI(object):
def __init__(self):
# API接口地址
self.turing_url = 'http://openapi.tuling123.com/openapi/api/v2?'
def get_turing_text(self,text):
turing_url_data = {
"perception": {
"inputText": {
"text": text
}
},
}
# print("The things to Request is:",self.turing_url + urlencode(turing_url_data))
# print("The result of Request is:",self.request)
try:
self.request = requests.post(self.turing_url, data=json.dumps(turing_url_data))
# print("Type of the data from urlopen:",type(w_data))
# print("The data from urlopen is:",w_data)
except Exception,e:
logging.error(e.message)
raise KeyError("Server wouldn't respond (invalid key or quota has been maxed out)")
# 其他情况断言提示服务相应次数已经达到上限
response_text = self.request.text
# print("Type of the response_text :",type(response_text))
# print("response_text :",response_text)
json_result = json.loads(response_text)
# print("Type of the json_result :",type(json_result))
return json.loads(response_text)['results'][0]['values']['text'].encode('utf-8')
if __name__ == '__main__':
print("Now u can type in something & input q to quit")
turing = TulingAPI()
while True:
msg = raw_input('\nMaster:')
if msg == 'q':
exit("u r quit the chat !") # 设定输入q,退出聊天。
else:
turing_data = turing.get_turing_text(msg)
print turing_data
| 29.915254
| 96
| 0.584136
|
78dc6cc9edea9c4210a0b5d28078e367fec43a70
| 1,819
|
py
|
Python
|
datumaro/datumaro/plugins/voc_format/importer.py
|
lravindr/cvat
|
b025acea43fbb55c7ea7eac7b12007f0eb6d3f45
|
[
"MIT"
] | 2
|
2020-03-16T03:41:27.000Z
|
2020-03-16T03:53:01.000Z
|
datumaro/datumaro/plugins/voc_format/importer.py
|
lravindr/cvat
|
b025acea43fbb55c7ea7eac7b12007f0eb6d3f45
|
[
"MIT"
] | 24
|
2020-11-13T18:43:15.000Z
|
2022-03-12T00:21:52.000Z
|
datumaro/datumaro/plugins/voc_format/importer.py
|
lravindr/cvat
|
b025acea43fbb55c7ea7eac7b12007f0eb6d3f45
|
[
"MIT"
] | 5
|
2020-07-01T18:02:48.000Z
|
2021-01-22T02:21:48.000Z
|
# Copyright (C) 2019 Intel Corporation
#
# SPDX-License-Identifier: MIT
from glob import glob
import os.path as osp
from datumaro.components.extractor import Importer
from .format import VocTask, VocPath
class VocImporter(Importer):
_TASKS = [
(VocTask.classification, 'voc_classification', 'Main'),
(VocTask.detection, 'voc_detection', 'Main'),
(VocTask.segmentation, 'voc_segmentation', 'Segmentation'),
(VocTask.person_layout, 'voc_layout', 'Layout'),
(VocTask.action_classification, 'voc_action', 'Action'),
]
@classmethod
def detect(cls, path):
return len(cls.find_subsets(path)) != 0
def __call__(self, path, **extra_params):
from datumaro.components.project import Project # cyclic import
project = Project()
subset_paths = self.find_subsets(path)
if len(subset_paths) == 0:
raise Exception("Failed to find 'voc' dataset at '%s'" % path)
for task, extractor_type, subset_path in subset_paths:
project.add_source('%s-%s' %
(task.name, osp.splitext(osp.basename(subset_path))[0]),
{
'url': subset_path,
'format': extractor_type,
'options': dict(extra_params),
})
return project
@staticmethod
def find_subsets(path):
subset_paths = []
for task, extractor_type, task_dir in __class__._TASKS:
task_dir = osp.join(path, VocPath.SUBSETS_DIR, task_dir)
if not osp.isdir(task_dir):
continue
task_subsets = [p for p in glob(osp.join(task_dir, '*.txt'))
if '_' not in osp.basename(p)]
subset_paths += [(task, extractor_type, p) for p in task_subsets]
return subset_paths
| 31.912281
| 77
| 0.611875
|
25379fb59d0f92a5acae582e1383759f8ed83135
| 62
|
py
|
Python
|
malib/evaluators/__init__.py
|
renos/Emergent-Multiagent-Strategies
|
afaf6acfdd6d505668f06ac23dfb33e872ab2872
|
[
"MIT"
] | 23
|
2020-07-05T11:13:00.000Z
|
2022-01-28T00:24:41.000Z
|
malib/evaluators/__init__.py
|
renos/Emergent-Multiagent-Strategies
|
afaf6acfdd6d505668f06ac23dfb33e872ab2872
|
[
"MIT"
] | 2
|
2020-09-07T19:09:40.000Z
|
2021-06-02T02:21:51.000Z
|
malib/evaluators/__init__.py
|
renos/Emergent-Multiagent-Strategies
|
afaf6acfdd6d505668f06ac23dfb33e872ab2872
|
[
"MIT"
] | 8
|
2020-07-06T07:24:37.000Z
|
2021-09-27T20:28:25.000Z
|
from malib.evaluators.multiagent_evaluator import MAEvaluator
| 31
| 61
| 0.903226
|
89502a87a1247b7bfdf5f805a5eed458d40b2e51
| 2,741
|
py
|
Python
|
Matching/Scheduler.py
|
JaredsAlgorithms/MatchingSchedules
|
08c6a60a28119cf9d6a49effb306158d0fa7dc5b
|
[
"MIT"
] | null | null | null |
Matching/Scheduler.py
|
JaredsAlgorithms/MatchingSchedules
|
08c6a60a28119cf9d6a49effb306158d0fa7dc5b
|
[
"MIT"
] | null | null | null |
Matching/Scheduler.py
|
JaredsAlgorithms/MatchingSchedules
|
08c6a60a28119cf9d6a49effb306158d0fa7dc5b
|
[
"MIT"
] | null | null | null |
from Matching.TimeSlot import TimeSlot
from Matching.Stack import Stack
class Scheduler:
def combineSchedules(self, person1, person2) -> list:
"""
Remove duplicate time intervals
"""
a, b = len(person1.schedule), len(person2.schedule)
# True: person1
# False: person2
_info = (
False if a < b else True,
abs(a - b)
)
container = []
for schedule1, schedule2 in zip(person1.schedule, person2.schedule):
if(schedule1 not in container):
container.append(schedule1)
if(schedule2 not in container):
container.append(schedule2)
# make sure you add the contents of the larger schedule into the container
if(a != b):
__which_container, index = _info
person = person1 if(__which_container) else person2
for value in person.schedule[index:]:
if(value not in container):
container.append(value)
container.sort(key=lambda x: x.begin) # O(n * log(n)) time complexity
return container
def mergeSchedules(self, person1, person2):
"""
Input: pre-sorted container
Return a range of times that possibly work for each party
"""
# NOTE: this project pulls from this solution from LeetCode:
# https://leetcode.com/problems/merge-intervals/solution/
# smaller time intervals will be combined into one contiguous interval
# I wanted to be creative and use a stack instead of a pure list
# assumes result is not a n empty list
result = self.combineSchedules(person1, person2)
if not(result):
return []
_Stack = Stack()
_Stack.push(result[0])
for slot in result[1:]:
top = _Stack.peek()
if(top.end < slot.begin):
_Stack.push(slot)
else:
top.end = max(top.end, slot.end)
return _Stack.container
def dispenseTimes(self, merged):
# strip edge cases
edge_one, edge_two = merged[0].end, merged[-1].begin # 2
new_container = []
# iterate over the range not including fringe cases
for element in merged[1:-1]:
begin, end = element.begin, element.end
new_container.append(begin)
new_container.append(end)
new_container.insert(0, edge_one)
new_container.append(edge_two)
# create a list where every other indexes are pairs:
# container = [1, 2, 3, 4] would be [[1, 2], [3, 4]]
return [new_container[n:n+2]
for n in range(0, len(new_container), 2)]
| 32.247059
| 82
| 0.581175
|
241b71dadf22b5b3892b7c4f2889c50a1a83ecb4
| 411
|
py
|
Python
|
src/dsalgo/stack_test.py
|
kagemeka/python-algorithms
|
dface89b8c618845cf524429aa8e97c4b2b10ceb
|
[
"MIT"
] | 1
|
2022-02-10T02:13:07.000Z
|
2022-02-10T02:13:07.000Z
|
src/dsalgo/stack_test.py
|
kagemeka/python-algorithms
|
dface89b8c618845cf524429aa8e97c4b2b10ceb
|
[
"MIT"
] | 6
|
2022-01-05T09:15:54.000Z
|
2022-01-09T05:48:43.000Z
|
src/dsalgo/stack_test.py
|
kagemeka/python-algorithms
|
dface89b8c618845cf524429aa8e97c4b2b10ceb
|
[
"MIT"
] | null | null | null |
import unittest
import dsalgo.stack
class Test(unittest.TestCase):
def test(self) -> None:
st = dsalgo.stack.Stack[int]()
st.push(3)
st.push(2)
self.assertEqual(len(st), 2)
self.assertEqual(st.top(), 2)
self.assertEqual(len(st), 2)
self.assertEqual(st.pop(), 2)
self.assertEqual(len(st), 1)
if __name__ == "__main__":
unittest.main()
| 20.55
| 38
| 0.586375
|
abcfbc020b718594f4d703d1edc81820b706f90e
| 706
|
py
|
Python
|
tests/test_is_pattern.py
|
rtmigo/framefile_py
|
b787ef7701bd3e1e99822fc2de6304384a8a06c0
|
[
"MIT"
] | null | null | null |
tests/test_is_pattern.py
|
rtmigo/framefile_py
|
b787ef7701bd3e1e99822fc2de6304384a8a06c0
|
[
"MIT"
] | null | null | null |
tests/test_is_pattern.py
|
rtmigo/framefile_py
|
b787ef7701bd3e1e99822fc2de6304384a8a06c0
|
[
"MIT"
] | null | null | null |
# SPDX-FileCopyrightText: (c) 2021 Artёm IG <github.com/rtmigo>
# SPDX-License-Identifier: MIT
import unittest
from framefile import is_pattern, Format
class TestIsPattern(unittest.TestCase):
def test_pct(self):
self.assertTrue(is_pattern("/path/to/%05d.png", fmt=Format.percent))
self.assertFalse(is_pattern("/path/to/#####.png", fmt=Format.percent))
self.assertFalse(is_pattern("/path/to/image.png", fmt=Format.percent))
def test_hash(self):
self.assertTrue(is_pattern("/path/to/#####.png", fmt=Format.hash))
self.assertFalse(is_pattern("/path/to/%05d.png", fmt=Format.hash))
self.assertFalse(is_pattern("/path/to/image.png", fmt=Format.hash))
| 39.222222
| 78
| 0.695467
|
868cc36730d19cde3ac7cbcf9836ae5412fa3c3a
| 1,002
|
py
|
Python
|
mimic/text_translator.py
|
ubclaunchpad/mimic
|
a5cee4e96d726d8d91f344ad86428501b63b1320
|
[
"MIT"
] | 4
|
2019-02-08T06:25:29.000Z
|
2020-02-12T04:29:40.000Z
|
mimic/text_translator.py
|
ubclaunchpad/mimic
|
a5cee4e96d726d8d91f344ad86428501b63b1320
|
[
"MIT"
] | 62
|
2019-02-02T22:35:38.000Z
|
2022-02-26T10:17:19.000Z
|
mimic/text_translator.py
|
ubclaunchpad/mimic
|
a5cee4e96d726d8d91f344ad86428501b63b1320
|
[
"MIT"
] | 1
|
2019-07-11T22:33:49.000Z
|
2019-07-11T22:33:49.000Z
|
"""Core text translator module."""
from mimic.model.translation_model import TranslationModel
class TextTranslator:
"""
Core text translator class.
User-facing class that offers loading of bilingual dataset for training
and predition functionalities.
"""
def __init__(self):
"""Initialize a TextTranslator."""
self.model = TranslationModel()
def load_bilingual_text_file(self, bilingual_file_path):
"""
Load training dataset for consumption by the model.
The dataset is a pkl file of lines of phrases in source language
in 1st column and in target laungauge in 2nd column, seperated by a
tab.
Example:
Hi. Hallo!
Hi. Grüß Gott!
Run! Lauf!
Wow! Potzdonner!
Wow! Donnerwetter!
"""
raise NotImplementedError
def translate_text(self):
"""Translate text to the target language of the training dataset."""
raise NotImplementedError
| 27.081081
| 76
| 0.652695
|
38b762dbb185f806dcff54992459538d529f8a99
| 108
|
py
|
Python
|
2019/D10/Q2/MonitoringStation.py
|
buchasia/advent-of-code
|
f568c6330c8934325913705b39ef8c25a1023057
|
[
"MIT"
] | null | null | null |
2019/D10/Q2/MonitoringStation.py
|
buchasia/advent-of-code
|
f568c6330c8934325913705b39ef8c25a1023057
|
[
"MIT"
] | null | null | null |
2019/D10/Q2/MonitoringStation.py
|
buchasia/advent-of-code
|
f568c6330c8934325913705b39ef8c25a1023057
|
[
"MIT"
] | null | null | null |
from Map import AsteroidMap
asteroidMap = AsteroidMap('InputDay10.txt')
asteroidMap.getDistanceSlopeMap()
| 18
| 43
| 0.824074
|
a79f164ba0f3bb7bb2ff551ce0c9ea7f76ef3c84
| 1,786
|
py
|
Python
|
homeassistant/components/nsw_fuel_station/__init__.py
|
NikoM87/core
|
7403ba1e81579b4ab83da24e570d4afe864e6312
|
[
"Apache-2.0"
] | 2
|
2020-03-29T05:32:57.000Z
|
2021-06-13T06:55:05.000Z
|
homeassistant/components/nsw_fuel_station/__init__.py
|
NikoM87/core
|
7403ba1e81579b4ab83da24e570d4afe864e6312
|
[
"Apache-2.0"
] | 79
|
2020-07-23T07:13:37.000Z
|
2022-03-22T06:02:37.000Z
|
homeassistant/components/nsw_fuel_station/__init__.py
|
kmdm/home-assistant
|
4007430d7262ef035bb80affea13657fdc993b1d
|
[
"Apache-2.0"
] | 1
|
2020-11-18T21:04:18.000Z
|
2020-11-18T21:04:18.000Z
|
"""The nsw_fuel_station component."""
from __future__ import annotations
from dataclasses import dataclass
import datetime
import logging
from nsw_fuel import FuelCheckClient, FuelCheckError, Station
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DATA_NSW_FUEL_STATION
_LOGGER = logging.getLogger(__name__)
DOMAIN = "nsw_fuel_station"
SCAN_INTERVAL = datetime.timedelta(hours=1)
async def async_setup(hass, config):
"""Set up the NSW Fuel Station platform."""
client = FuelCheckClient()
async def async_update_data():
return await hass.async_add_executor_job(fetch_station_price_data, client)
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="sensor",
update_interval=SCAN_INTERVAL,
update_method=async_update_data,
)
hass.data[DATA_NSW_FUEL_STATION] = coordinator
await coordinator.async_refresh()
return True
@dataclass
class StationPriceData:
"""Data structure for O(1) price and name lookups."""
stations: dict[int, Station]
prices: dict[tuple[int, str], float]
def fetch_station_price_data(client: FuelCheckClient) -> StationPriceData | None:
"""Fetch fuel price and station data."""
try:
raw_price_data = client.get_fuel_prices()
# Restructure prices and station details to be indexed by station code
# for O(1) lookup
return StationPriceData(
stations={s.code: s for s in raw_price_data.stations},
prices={
(p.station_code, p.fuel_type): p.price for p in raw_price_data.prices
},
)
except FuelCheckError as exc:
_LOGGER.error("Failed to fetch NSW Fuel station price data. %s", exc)
return None
| 27.476923
| 85
| 0.703247
|
0a4ca05c3cc8ac05bc2affa4412756434ad1671c
| 12,823
|
py
|
Python
|
SmartBinApp.py
|
OpenSUTD/SmartBin
|
134eee02795dd9bd5936846c261283070c7b062e
|
[
"MIT"
] | 37
|
2018-04-21T22:35:01.000Z
|
2020-02-22T15:21:30.000Z
|
SmartBinApp.py
|
OpenSUTD/SmartBin
|
134eee02795dd9bd5936846c261283070c7b062e
|
[
"MIT"
] | 3
|
2018-09-10T17:11:02.000Z
|
2019-02-07T01:16:50.000Z
|
SmartBinApp.py
|
OpenSUTD/SmartBin
|
134eee02795dd9bd5936846c261283070c7b062e
|
[
"MIT"
] | 12
|
2018-07-09T02:53:20.000Z
|
2020-12-18T15:37:22.000Z
|
# ==============================================
# Configuration
# Some often-tweaked parameters during testing
# ==============================================
import os
os.environ['KIVY_HOME'] = "/home/pi/.kivy"
# model configuration file, taken from training enviroment
config_path = "data/config.json"
# path to the best weights, taken from the training enviroment
weights_path = "data/best_weights_11.h5"
# Kivy resizes the camera image to size before displaying
frame_size = 1180, 1180
# ====================
# Initialise LED Strip
# ====================
print("[i] Initialising LED Strip")
from neopixel import *
from threading import Thread
import time
red = Color(0, 255, 0)
green = Color(255, 0, 0)
blue = Color(0, 0, 255)
yellow = Color(255, 255, 0)
# Create NeoPixel object with appropriate configuration.
strip = Adafruit_NeoPixel(25, 18, 800000, 10, False, 100, 0)
# Intialize the library (must be called once before other functions).
strip.begin()
class lightshow():
"""
A thread that's sole purpose is to show you the loading progress.
The model takes around 110 seconds to load, so that's what the progress bar shows you.
It's a bit naive, but it's also here for fun.
"""
def __init__(self):
# Initiate properties
global strip
self.stopped = False
self.start_time = None
self.progress = 0
self.pixels = strip.numPixels()
def start(self):
# start the thread to read frames from the video stream
self.start_time = time.time()
Thread(target=self.update, args=()).start()
return self
def update(self):
# keep looping infinitely until the thread is stopped
global strip, yellow, green
while True:
if self.stopped:
return
elif self.progress == 100:
self.stop()
else:
time.sleep(0.6)
self.progress += 0.5
for i in range(int((self.progress+4.4)/100*self.pixels)):
strip.setPixelColor(i, red)
for i in range(int((self.progress+2.6)/100*self.pixels)):
strip.setPixelColor(i, yellow)
for i in range(int(self.progress/100*self.pixels)):
strip.setPixelColor(i, green)
strip.show()
def stop(self):
self.stopped = True
# ====================================
# Computer Vision Pipeline
# Components (as threads):
# 1. Camera stream (PiVideoStream)
# 2. Inference (prediction) stream
# ====================================
# start the progress bar animation
progress_bar = lightshow().start()
print("[i] Initialising Computer Vision pipeline")
import cv2
import json
import numpy as np
from box_utils import draw_boxes
from object_detection_model import ObjectDetection
with open(config_path) as config_buffer:
config = json.load(config_buffer)
from camera import PiVideoStream
print("[i] Loading feature extractor:", config['model']['backend'])
print("[+] Trained labels:", config['model']['labels'])
print("[i] Building model... This will take a while... (< 2 mins)")
load_start = time.time()
model = ObjectDetection(backend=config['model']['backend'],
input_size=config['model']['input_size'],
labels=config['model']['labels'],
max_box_per_image=config['model']['max_box_per_image'],
anchors=config['model']['anchors'])
print("[i] Model took", (time.time()-load_start), "seconds to load")
print("[c] Starting video capture")
cap = PiVideoStream().start()
print("[i] Loading weights from", weights_path)
model.load_weights(weights_path)
class predictions():
"""
Streaming inferences independently of camera and UI updates
Makes use of the following global variables:
1. current frame from camera stream
2. currently loaded object detection model
"""
def __init__(self):
self.boxes = ["can", "bottle", "ken",
"grace", "frank", "tim", "shelly"]
self.stopped = False
def start(self):
# start the thread to read frames from the video stream
Thread(target=self.update, args=()).start()
return self
def update(self):
global model, frame
# keep looping infinitely until the thread is stopped
while True:
if self.stopped:
return
else:
self.boxes = model.predict(frame)
def read(self):
return self.boxes
def stop(self):
self.stopped = True
# =========
# IOT Setup
# 1. Import firebase iot functions
# 2. Authenticate and instantiate firebase
# 3. Reset firebase on first run
# =========
from iot import *
#firebase = firebase_setup()
# firebase_reset(firebase)
# ======================================================
# Perform one inference to test if everything is working
# ======================================================
print("[i] Running self-test")
try:
frame = cap.read() # read one frame from the stream
boxes = model.predict(frame) # get bounding boxes
# if previous line succeded, our model is functional; start the predictions stream
pred = predictions().start()
print("[+] Self-test: OK")
except Exception as error:
print("[!] Fatal error", end=": ")
print(error)
exit()
# ==============================
# Kivy Configuration
# Only needed on the first run
# ==============================
from kivy.config import Config
Config.set('graphics', 'fullscreen', 'fake')
Config.set('graphics', 'fbo', 'hardware')
Config.set('graphics', 'show_cursor', 1)
Config.set('graphics', 'borderless', 0)
Config.set('kivy', 'exit_on_escape', 1)
Config.write()
# ========================
# GUI Setup
# Necessary Kivy imports
# =========================
from kivy.app import App
from kivy.graphics import *
from kivy.graphics.texture import Texture
from kivy.lang import Builder
from kivy.clock import Clock
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.core.window import Window
Builder.load_file('app_layout.kv') # Kivy layout file
# Declare individual screens
class MainView(Screen):
"""
This is the main screen, shown when the app starts.
It displays the camera feed and 3 buttons
"""
def __init__(self, **kwargs):
global cap, frame, frame_size
# capture and render the first frame
self.frame_size = frame_size
frame = cap.read()
image = cv2.flip(frame, 0)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = cv2.resize(image, frame_size)
buf = image.tostring()
self.image_texture = Texture.create(size=(image.shape[1], image.shape[0]), colorfmt='rgb')
self.image_texture.blit_buffer(buf, colorfmt='rgb', bufferfmt='ubyte')
# coordinates of Trashy
self.t_x = 0
self.t_y = 0
self.current_user = 'No user yet'
self.tickcount = 0
self.labels = ["can", "bottle", "ken",
"grace", "frank", "tim", "shelly"]
self.users = ["ken", "grace", "frank", "tim", "shelly"]
super(MainView, self).__init__(**kwargs)
Clock.schedule_interval(self.tick, 0.06)
def tick(self, dt):
global pred, cap, frame, strip, red, green, blue
#global firebase
can_detected, bottle_detected = False, False
#self.tickcount += 1
# Process frame from OpenCV
frame = cap.read()
image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
boxes = pred.read()
image = draw_boxes(image, boxes, config['model']['labels'])
image = cv2.resize(cv2.flip(image, 0), self.frame_size)
buf = image.tostring()
# Update displayed image in user interface camera view
self.image_texture = Texture.create(
size=(self.frame_size), colorfmt='rgb')
self.image_texture.blit_buffer(buf, colorfmt='rgb', bufferfmt='ubyte')
self.ids.cameraView.texture = self.image_texture
if len(boxes) > 0:
# Trashy avatar follows the bounding box of the detected entity
# Augmented Reality :)
self.t_x = int((boxes[0].xmin-0.5) * 1000) - 80
self.t_y = -1 * (int((boxes[0].ymin-0.5) * 1000) + 80)
self.ids.trashyView.opacity = 1.0
self.ids.trashyView.pos = (self.t_x, self.t_y)
display_label = ""
for box in boxes:
# Obtain current entity prediction label
curr_label = box.get_label()
if self.labels[curr_label] == "can":
can_detected = True
if self.labels[curr_label] == "bottle":
bottle_detected = True
# if self.labels[curr_label] in self.users:
# Update current user property if a valid entity label is detected
# self.current_user = self.labels[curr_label]
if can_detected == True:
# Set led lights at the 'cans' box to green to signal user
for i in range(8):
strip.setPixelColor(i, red)
for i in range(15, 25):
strip.setPixelColor(i, green)
display_label = display_label + \
"\nThrow your can in the recycling bin\nPlease wash the can first!"
# Increment firebase user count for cans by 1 every time a can is detected with a valid user
# Also only updates every 10 ticks to reduce lag
# if self.current_user in self.users and self.tickcount % 10 == 0:
# firebase_update(firebase, self.current_user, 'cans', 1)
if bottle_detected == True:
# Set led lights at the 'blue' box to green to signal user
for i in range(8):
strip.setPixelColor(i, red)
for i in range(8, 15):
strip.setPixelColor(i, blue)
display_label = display_label + \
"\nThrow your bottle into the recycling bin\nPlease empty it first!"
# Increment firebase user count for bottles by 1 every time a bottle is detected with a valid user
# Also only updates every 10 ticks to reduce lag
# if self.current_user in self.users and self.tickcount % 10 == 0:
# firebase_update(firebase, self.current_user, 'bottles', 1)
self.ids.labelObjDet.text = display_label
else:
# Trashy avatar disappears and message popup
self.ids.trashyView.opacity = 0.0
self.ids.labelObjDet.text = "No recyclable trash detected"
strip.show()
# reset the LED strip to original state (but don't show it!)
for i in range(strip.numPixels()):
strip.setPixelColor(i, red)
for i in range(8):
strip.setPixelColor(i, green)
def quit(self):
# Stop predictions and video capture
global strip
pred.stop()
cap.stop()
# Turn off led strip
for i in range(strip.numPixels()):
strip.setPixelColor(i, Color(0, 0, 0))
strip.show()
# Exit kivy
Window.close()
App.get_running_app().stop()
exit()
class InfoView(Screen):
"""Secondary screen that displays information about recycling in Singapore"""
def __init__(self, **kwargs):
super(InfoView, self).__init__(**kwargs)
class AboutView(Screen):
"""Secondary screen that displays information about this project"""
def __init__(self, **kwargs):
super(AboutView, self).__init__(**kwargs)
# ==========================================
# Tie everything together and launch the app
# ==========================================
# everything works! set LED strip to initial state
for i in range(strip.numPixels()):
strip.setPixelColor(i, red)
for i in range(8):
strip.setPixelColor(i, green)
strip.show()
print("[u] Loading UI")
Window.clearcolor = (1, 1, 1, 1) # set white background
# setup Kivy screen manager
sm = ScreenManager()
sm.add_widget(MainView(name='mainView'))
sm.add_widget(InfoView(name='infoView'))
sm.add_widget(AboutView(name='aboutView'))
class SmartBinApp(App):
# Main Kivy app
def build(self):
return sm
# Run SmartBinApp and exit if running fails
try:
SmartBinApp().run()
except KeyboardInterrupt:
pred.stop()
cap.stop()
print('exciting due to KeyboardInterrupt')
for i in range(strip.numPixels()):
strip.setPixelColor(i, Color(0, 0, 0))
strip.show()
App.get_running_app().stop()
exit()
| 32.218593
| 114
| 0.591047
|
dea2d7bc65bb27f34dde357e8857090910e648fe
| 472
|
py
|
Python
|
data/scripts/templates/object/mobile/shared_dressed_rebel_second_lieutenant_rodian_male_01.py
|
obi-two/GameServer
|
7d37024e2291a97d49522610cd8f1dbe5666afc2
|
[
"MIT"
] | 20
|
2015-02-23T15:11:56.000Z
|
2022-03-18T20:56:48.000Z
|
data/scripts/templates/object/mobile/shared_dressed_rebel_second_lieutenant_rodian_male_01.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | null | null | null |
data/scripts/templates/object/mobile/shared_dressed_rebel_second_lieutenant_rodian_male_01.py
|
apathyboy/swganh
|
665128efe9154611dec4cb5efc61d246dd095984
|
[
"MIT"
] | 20
|
2015-04-04T16:35:59.000Z
|
2022-03-24T14:54:37.000Z
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_rebel_second_lieutenant_rodian_male_01.iff"
result.attribute_template_id = 9
result.stfName("npc_name","rodian_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
| 27.764706
| 92
| 0.745763
|
be01d659f3c954c560b89a834a7f444f9c8f0f0e
| 26,538
|
py
|
Python
|
mmocr/datasets/pipelines/transforms.py
|
quincylin1/mmocr-1
|
0e70f99f4d8fa0180bdab16f8697e65e71590c72
|
[
"Apache-2.0"
] | null | null | null |
mmocr/datasets/pipelines/transforms.py
|
quincylin1/mmocr-1
|
0e70f99f4d8fa0180bdab16f8697e65e71590c72
|
[
"Apache-2.0"
] | null | null | null |
mmocr/datasets/pipelines/transforms.py
|
quincylin1/mmocr-1
|
0e70f99f4d8fa0180bdab16f8697e65e71590c72
|
[
"Apache-2.0"
] | null | null | null |
import math
import cv2
import numpy as np
import torchvision.transforms as transforms
from PIL import Image
import mmocr.core.evaluation.utils as eval_utils
from mmdet.core import BitmapMasks, PolygonMasks
from mmdet.datasets.builder import PIPELINES
from mmdet.datasets.pipelines.transforms import Resize
from mmocr.utils import check_argument
@PIPELINES.register_module()
class RandomCropInstances:
"""Randomly crop images and make sure to contain text instances.
Args:
target_size (tuple or int): (height, width)
positive_sample_ratio (float): The probability of sampling regions
that go through positive regions.
"""
def __init__(
self,
target_size,
instance_key,
mask_type='inx0', # 'inx0' or 'union_all'
positive_sample_ratio=5.0 / 8.0):
assert mask_type in ['inx0', 'union_all']
self.mask_type = mask_type
self.instance_key = instance_key
self.positive_sample_ratio = positive_sample_ratio
self.target_size = target_size if (target_size is None or isinstance(
target_size, tuple)) else (target_size, target_size)
def sample_offset(self, img_gt, img_size):
h, w = img_size
t_h, t_w = self.target_size
# target size is bigger than origin size
t_h = t_h if t_h < h else h
t_w = t_w if t_w < w else w
if (img_gt is not None
and np.random.random_sample() < self.positive_sample_ratio
and np.max(img_gt) > 0):
# make sure to crop the positive region
# the minimum top left to crop positive region (h,w)
tl = np.min(np.where(img_gt > 0), axis=1) - (t_h, t_w)
tl[tl < 0] = 0
# the maximum top left to crop positive region
br = np.max(np.where(img_gt > 0), axis=1) - (t_h, t_w)
br[br < 0] = 0
# if br is too big so that crop the outside region of img
br[0] = min(br[0], h - t_h)
br[1] = min(br[1], w - t_w)
#
h = np.random.randint(tl[0], br[0]) if tl[0] < br[0] else 0
w = np.random.randint(tl[1], br[1]) if tl[1] < br[1] else 0
else:
# make sure not to crop outside of img
h = np.random.randint(0, h - t_h) if h - t_h > 0 else 0
w = np.random.randint(0, w - t_w) if w - t_w > 0 else 0
return (h, w)
@staticmethod
def crop_img(img, offset, target_size):
h, w = img.shape[:2]
br = np.min(
np.stack((np.array(offset) + np.array(target_size), np.array(
(h, w)))),
axis=0)
return img[offset[0]:br[0], offset[1]:br[1]], np.array(
[offset[1], offset[0], br[1], br[0]])
def crop_bboxes(self, bboxes, canvas_bbox):
kept_bboxes = []
kept_inx = []
canvas_poly = eval_utils.box2polygon(canvas_bbox)
tl = canvas_bbox[0:2]
for inx, bbox in enumerate(bboxes):
poly = eval_utils.box2polygon(bbox)
area, inters = eval_utils.poly_intersection(poly, canvas_poly)
if area == 0:
continue
xmin, xmax, ymin, ymax = inters.boundingBox()
kept_bboxes += [
np.array(
[xmin - tl[0], ymin - tl[1], xmax - tl[0], ymax - tl[1]],
dtype=np.float32)
]
kept_inx += [inx]
if len(kept_inx) == 0:
return np.array([]).astype(np.float32).reshape(0, 4), kept_inx
return np.stack(kept_bboxes), kept_inx
@staticmethod
def generate_mask(gt_mask, type):
if type == 'inx0':
return gt_mask.masks[0]
if type == 'union_all':
mask = gt_mask.masks[0].copy()
for inx in range(1, len(gt_mask.masks)):
mask = np.logical_or(mask, gt_mask.masks[inx])
return mask
raise NotImplementedError
def __call__(self, results):
gt_mask = results[self.instance_key]
mask = None
if len(gt_mask.masks) > 0:
mask = self.generate_mask(gt_mask, self.mask_type)
results['crop_offset'] = self.sample_offset(mask,
results['img'].shape[:2])
# crop img. bbox = [x1,y1,x2,y2]
img, bbox = self.crop_img(results['img'], results['crop_offset'],
self.target_size)
results['img'] = img
img_shape = img.shape
results['img_shape'] = img_shape
# crop masks
for key in results.get('mask_fields', []):
results[key] = results[key].crop(bbox)
# for mask rcnn
for key in results.get('bbox_fields', []):
results[key], kept_inx = self.crop_bboxes(results[key], bbox)
if key == 'gt_bboxes':
# ignore gt_labels accordingly
if 'gt_labels' in results:
ori_labels = results['gt_labels']
ori_inst_num = len(ori_labels)
results['gt_labels'] = [
ori_labels[inx] for inx in range(ori_inst_num)
if inx in kept_inx
]
# ignore g_masks accordingly
if 'gt_masks' in results:
ori_mask = results['gt_masks'].masks
kept_mask = [
ori_mask[inx] for inx in range(ori_inst_num)
if inx in kept_inx
]
target_h, target_w = bbox[3] - bbox[1], bbox[2] - bbox[0]
if len(kept_inx) > 0:
kept_mask = np.stack(kept_mask)
else:
kept_mask = np.empty((0, target_h, target_w),
dtype=np.float32)
results['gt_masks'] = BitmapMasks(kept_mask, target_h,
target_w)
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class RandomRotateTextDet:
"""Randomly rotate images."""
def __init__(self, rotate_ratio=1.0, max_angle=10):
self.rotate_ratio = rotate_ratio
self.max_angle = max_angle
@staticmethod
def sample_angle(max_angle):
angle = np.random.random_sample() * 2 * max_angle - max_angle
return angle
@staticmethod
def rotate_img(img, angle):
h, w = img.shape[:2]
rotation_matrix = cv2.getRotationMatrix2D((w / 2, h / 2), angle, 1)
img_target = cv2.warpAffine(
img, rotation_matrix, (w, h), flags=cv2.INTER_NEAREST)
assert img_target.shape == img.shape
return img_target
def __call__(self, results):
if np.random.random_sample() < self.rotate_ratio:
# rotate imgs
results['rotated_angle'] = self.sample_angle(self.max_angle)
img = self.rotate_img(results['img'], results['rotated_angle'])
results['img'] = img
img_shape = img.shape
results['img_shape'] = img_shape
# rotate masks
for key in results.get('mask_fields', []):
masks = results[key].masks
mask_list = []
for m in masks:
rotated_m = self.rotate_img(m, results['rotated_angle'])
mask_list.append(rotated_m)
results[key] = BitmapMasks(mask_list, *(img_shape[:2]))
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class ColorJitter:
"""An interface for torch color jitter so that it can be invoked in
mmdetection pipeline."""
def __init__(self, **kwargs):
self.transform = transforms.ColorJitter(**kwargs)
def __call__(self, results):
# img is bgr
img = results['img'][..., ::-1]
img = Image.fromarray(img)
img = self.transform(img)
img = np.asarray(img)
img = img[..., ::-1]
results['img'] = img
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class ScaleAspectJitter(Resize):
"""Resize image and segmentation mask encoded by coordinates.
Allowed resize types are `around_min_img_scale`, `long_short_bound`, and
`indep_sample_in_range`.
"""
def __init__(self,
img_scale=None,
multiscale_mode='range',
ratio_range=None,
keep_ratio=False,
resize_type='around_min_img_scale',
aspect_ratio_range=None,
long_size_bound=None,
short_size_bound=None,
scale_range=None):
super().__init__(
img_scale=img_scale,
multiscale_mode=multiscale_mode,
ratio_range=ratio_range,
keep_ratio=keep_ratio)
assert not keep_ratio
assert resize_type in [
'around_min_img_scale', 'long_short_bound', 'indep_sample_in_range'
]
self.resize_type = resize_type
if resize_type == 'indep_sample_in_range':
assert ratio_range is None
assert aspect_ratio_range is None
assert short_size_bound is None
assert long_size_bound is None
assert scale_range is not None
else:
assert scale_range is None
assert isinstance(ratio_range, tuple)
assert isinstance(aspect_ratio_range, tuple)
assert check_argument.equal_len(ratio_range, aspect_ratio_range)
if resize_type in ['long_short_bound']:
assert short_size_bound is not None
assert long_size_bound is not None
self.aspect_ratio_range = aspect_ratio_range
self.long_size_bound = long_size_bound
self.short_size_bound = short_size_bound
self.scale_range = scale_range
@staticmethod
def sample_from_range(range):
assert len(range) == 2
min_value, max_value = min(range), max(range)
value = np.random.random_sample() * (max_value - min_value) + min_value
return value
def _random_scale(self, results):
if self.resize_type == 'indep_sample_in_range':
w = self.sample_from_range(self.scale_range)
h = self.sample_from_range(self.scale_range)
results['scale'] = (int(w), int(h)) # (w,h)
results['scale_idx'] = None
return
h, w = results['img'].shape[0:2]
if self.resize_type == 'long_short_bound':
scale1 = 1
if max(h, w) > self.long_size_bound:
scale1 = self.long_size_bound / max(h, w)
scale2 = self.sample_from_range(self.ratio_range)
scale = scale1 * scale2
if min(h, w) * scale <= self.short_size_bound:
scale = (self.short_size_bound + 10) * 1.0 / min(h, w)
elif self.resize_type == 'around_min_img_scale':
short_size = min(self.img_scale[0])
ratio = self.sample_from_range(self.ratio_range)
scale = (ratio * short_size) / min(h, w)
else:
raise NotImplementedError
aspect = self.sample_from_range(self.aspect_ratio_range)
h_scale = scale * math.sqrt(aspect)
w_scale = scale / math.sqrt(aspect)
results['scale'] = (int(w * w_scale), int(h * h_scale)) # (w,h)
results['scale_idx'] = None
@PIPELINES.register_module()
class AffineJitter:
"""An interface for torchvision random affine so that it can be invoked in
mmdet pipeline."""
def __init__(self,
degrees=4,
translate=(0.02, 0.04),
scale=(0.9, 1.1),
shear=None,
resample=False,
fillcolor=0):
self.transform = transforms.RandomAffine(
degrees=degrees,
translate=translate,
scale=scale,
shear=shear,
resample=resample,
fillcolor=fillcolor)
def __call__(self, results):
# img is bgr
img = results['img'][..., ::-1]
img = Image.fromarray(img)
img = self.transform(img)
img = np.asarray(img)
img = img[..., ::-1]
results['img'] = img
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class RandomCropPolyInstances:
"""Randomly crop images and make sure to contain at least one intact
instance."""
def __init__(self,
instance_key='gt_masks',
crop_ratio=5.0 / 8.0,
min_side_ratio=0.4):
super().__init__()
self.instance_key = instance_key
self.crop_ratio = crop_ratio
self.min_side_ratio = min_side_ratio
def sample_valid_start_end(self, valid_array, min_len, max_start, min_end):
assert isinstance(min_len, int)
assert len(valid_array) > min_len
start_array = valid_array.copy()
max_start = min(len(start_array) - min_len, max_start)
start_array[max_start:] = 0
start_array[0] = 1
diff_array = np.hstack([0, start_array]) - np.hstack([start_array, 0])
region_starts = np.where(diff_array < 0)[0]
region_ends = np.where(diff_array > 0)[0]
region_ind = np.random.randint(0, len(region_starts))
start = np.random.randint(region_starts[region_ind],
region_ends[region_ind])
end_array = valid_array.copy()
min_end = max(start + min_len, min_end)
end_array[:min_end] = 0
end_array[-1] = 1
diff_array = np.hstack([0, end_array]) - np.hstack([end_array, 0])
region_starts = np.where(diff_array < 0)[0]
region_ends = np.where(diff_array > 0)[0]
region_ind = np.random.randint(0, len(region_starts))
end = np.random.randint(region_starts[region_ind],
region_ends[region_ind])
return start, end
def sample_crop_box(self, img_size, results):
"""Generate crop box and make sure not to crop the polygon instances.
Args:
img_size (tuple(int)): The image size (h, w).
results (dict): The results dict.
"""
assert isinstance(img_size, tuple)
h, w = img_size[:2]
key_masks = results[self.instance_key].masks
x_valid_array = np.ones(w, dtype=np.int32)
y_valid_array = np.ones(h, dtype=np.int32)
selected_mask = key_masks[np.random.randint(0, len(key_masks))]
selected_mask = selected_mask[0].reshape((-1, 2)).astype(np.int32)
max_x_start = max(np.min(selected_mask[:, 0]) - 2, 0)
min_x_end = min(np.max(selected_mask[:, 0]) + 3, w - 1)
max_y_start = max(np.min(selected_mask[:, 1]) - 2, 0)
min_y_end = min(np.max(selected_mask[:, 1]) + 3, h - 1)
for key in results.get('mask_fields', []):
if len(results[key].masks) == 0:
continue
masks = results[key].masks
for mask in masks:
assert len(mask) == 1
mask = mask[0].reshape((-1, 2)).astype(np.int32)
clip_x = np.clip(mask[:, 0], 0, w - 1)
clip_y = np.clip(mask[:, 1], 0, h - 1)
min_x, max_x = np.min(clip_x), np.max(clip_x)
min_y, max_y = np.min(clip_y), np.max(clip_y)
x_valid_array[min_x - 2:max_x + 3] = 0
y_valid_array[min_y - 2:max_y + 3] = 0
min_w = int(w * self.min_side_ratio)
min_h = int(h * self.min_side_ratio)
x1, x2 = self.sample_valid_start_end(x_valid_array, min_w, max_x_start,
min_x_end)
y1, y2 = self.sample_valid_start_end(y_valid_array, min_h, max_y_start,
min_y_end)
return np.array([x1, y1, x2, y2])
def crop_img(self, img, bbox):
assert img.ndim == 3
h, w, _ = img.shape
assert 0 <= bbox[1] < bbox[3] <= h
assert 0 <= bbox[0] < bbox[2] <= w
return img[bbox[1]:bbox[3], bbox[0]:bbox[2]]
def __call__(self, results):
if len(results[self.instance_key].masks) < 1:
return results
if np.random.random_sample() < self.crop_ratio:
crop_box = self.sample_crop_box(results['img'].shape, results)
results['crop_region'] = crop_box
img = self.crop_img(results['img'], crop_box)
results['img'] = img
results['img_shape'] = img.shape
# crop and filter masks
x1, y1, x2, y2 = crop_box
w = max(x2 - x1, 1)
h = max(y2 - y1, 1)
labels = results['gt_labels']
valid_labels = []
for key in results.get('mask_fields', []):
if len(results[key].masks) == 0:
continue
results[key] = results[key].crop(crop_box)
# filter out polygons beyond crop box.
masks = results[key].masks
valid_masks_list = []
for ind, mask in enumerate(masks):
assert len(mask) == 1
polygon = mask[0].reshape((-1, 2))
if (polygon[:, 0] >
-4).all() and (polygon[:, 0] < w + 4).all() and (
polygon[:, 1] > -4).all() and (polygon[:, 1] <
h + 4).all():
mask[0][::2] = np.clip(mask[0][::2], 0, w)
mask[0][1::2] = np.clip(mask[0][1::2], 0, h)
if key == self.instance_key:
valid_labels.append(labels[ind])
valid_masks_list.append(mask)
results[key] = PolygonMasks(valid_masks_list, h, w)
results['gt_labels'] = np.array(valid_labels)
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class RandomRotatePolyInstances:
def __init__(self,
rotate_ratio=0.5,
max_angle=10,
pad_with_fixed_color=False,
pad_value=(0, 0, 0)):
"""Randomly rotate images and polygon masks.
Args:
rotate_ratio (float): The ratio of samples to operate rotation.
max_angle (int): The maximum rotation angle.
pad_with_fixed_color (bool): The flag for whether to pad rotated
image with fixed value. If set to False, the rotated image will
be padded onto cropped image.
pad_value (tuple(int)): The color value for padding rotated image.
"""
self.rotate_ratio = rotate_ratio
self.max_angle = max_angle
self.pad_with_fixed_color = pad_with_fixed_color
self.pad_value = pad_value
def rotate(self, center, points, theta, center_shift=(0, 0)):
# rotate points.
(center_x, center_y) = center
center_y = -center_y
x, y = points[::2], points[1::2]
y = -y
theta = theta / 180 * math.pi
cos = math.cos(theta)
sin = math.sin(theta)
x = (x - center_x)
y = (y - center_y)
_x = center_x + x * cos - y * sin + center_shift[0]
_y = -(center_y + x * sin + y * cos) + center_shift[1]
points[::2], points[1::2] = _x, _y
return points
def cal_canvas_size(self, ori_size, degree):
assert isinstance(ori_size, tuple)
angle = degree * math.pi / 180.0
h, w = ori_size[:2]
cos = math.cos(angle)
sin = math.sin(angle)
canvas_h = int(w * math.fabs(sin) + h * math.fabs(cos))
canvas_w = int(w * math.fabs(cos) + h * math.fabs(sin))
canvas_size = (canvas_h, canvas_w)
return canvas_size
def sample_angle(self, max_angle):
angle = np.random.random_sample() * 2 * max_angle - max_angle
return angle
def rotate_img(self, img, angle, canvas_size):
h, w = img.shape[:2]
rotation_matrix = cv2.getRotationMatrix2D((w / 2, h / 2), angle, 1)
rotation_matrix[0, 2] += int((canvas_size[1] - w) / 2)
rotation_matrix[1, 2] += int((canvas_size[0] - h) / 2)
if self.pad_with_fixed_color:
target_img = cv2.warpAffine(
img,
rotation_matrix, (canvas_size[1], canvas_size[0]),
flags=cv2.INTER_NEAREST,
borderValue=self.pad_value)
else:
mask = np.zeros_like(img)
(h_ind, w_ind) = (np.random.randint(0, h * 7 // 8),
np.random.randint(0, w * 7 // 8))
img_cut = img[h_ind:(h_ind + h // 9), w_ind:(w_ind + w // 9)]
img_cut = cv2.resize(img_cut, (canvas_size[1], canvas_size[0]))
mask = cv2.warpAffine(
mask,
rotation_matrix, (canvas_size[1], canvas_size[0]),
borderValue=[1, 1, 1])
target_img = cv2.warpAffine(
img,
rotation_matrix, (canvas_size[1], canvas_size[0]),
borderValue=[0, 0, 0])
target_img = target_img + img_cut * mask
return target_img
def __call__(self, results):
if np.random.random_sample() < self.rotate_ratio:
img = results['img']
h, w = img.shape[:2]
angle = self.sample_angle(self.max_angle)
canvas_size = self.cal_canvas_size((h, w), angle)
center_shift = (int(
(canvas_size[1] - w) / 2), int((canvas_size[0] - h) / 2))
# rotate image
results['rotated_poly_angle'] = angle
img = self.rotate_img(img, angle, canvas_size)
results['img'] = img
img_shape = img.shape
results['img_shape'] = img_shape
# rotate polygons
for key in results.get('mask_fields', []):
if len(results[key].masks) == 0:
continue
masks = results[key].masks
rotated_masks = []
for mask in masks:
rotated_mask = self.rotate((w / 2, h / 2), mask[0], angle,
center_shift)
rotated_masks.append([rotated_mask])
results[key] = PolygonMasks(rotated_masks, *(img_shape[:2]))
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class SquareResizePad:
def __init__(self,
target_size,
pad_ratio=0.6,
pad_with_fixed_color=False,
pad_value=(0, 0, 0)):
"""Resize or pad images to be square shape.
Args:
target_size (int): The target size of square shaped image.
pad_with_fixed_color (bool): The flag for whether to pad rotated
image with fixed value. If set to False, the rescales image will
be padded onto cropped image.
pad_value (tuple(int)): The color value for padding rotated image.
"""
assert isinstance(target_size, int)
assert isinstance(pad_ratio, float)
assert isinstance(pad_with_fixed_color, bool)
assert isinstance(pad_value, tuple)
self.target_size = target_size
self.pad_ratio = pad_ratio
self.pad_with_fixed_color = pad_with_fixed_color
self.pad_value = pad_value
def resize_img(self, img, keep_ratio=True):
h, w, _ = img.shape
if keep_ratio:
t_h = self.target_size if h >= w else int(h * self.target_size / w)
t_w = self.target_size if h <= w else int(w * self.target_size / h)
else:
t_h = t_w = self.target_size
img = cv2.resize(img, (t_w, t_h))
return img, (t_h, t_w)
def square_pad(self, img):
h, w = img.shape[:2]
if h == w:
return img, (0, 0)
pad_size = max(h, w)
if self.pad_with_fixed_color:
expand_img = np.ones((pad_size, pad_size, 3), dtype=np.uint8)
expand_img[:] = self.pad_value
else:
(h_ind, w_ind) = (np.random.randint(0, h * 7 // 8),
np.random.randint(0, w * 7 // 8))
img_cut = img[h_ind:(h_ind + h // 9), w_ind:(w_ind + w // 9)]
expand_img = cv2.resize(img_cut, (pad_size, pad_size))
if h > w:
y0, x0 = 0, (h - w) // 2
else:
y0, x0 = (w - h) // 2, 0
expand_img[y0:y0 + h, x0:x0 + w] = img
offset = (x0, y0)
return expand_img, offset
def square_pad_mask(self, points, offset):
x0, y0 = offset
pad_points = points.copy()
pad_points[::2] = pad_points[::2] + x0
pad_points[1::2] = pad_points[1::2] + y0
return pad_points
def __call__(self, results):
img = results['img']
if np.random.random_sample() < self.pad_ratio:
img, out_size = self.resize_img(img, keep_ratio=True)
img, offset = self.square_pad(img)
else:
img, out_size = self.resize_img(img, keep_ratio=False)
offset = (0, 0)
results['img'] = img
results['img_shape'] = img.shape
for key in results.get('mask_fields', []):
if len(results[key].masks) == 0:
continue
results[key] = results[key].resize(out_size)
masks = results[key].masks
processed_masks = []
for mask in masks:
square_pad_mask = self.square_pad_mask(mask[0], offset)
processed_masks.append([square_pad_mask])
results[key] = PolygonMasks(processed_masks, *(img.shape[:2]))
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
| 36.155313
| 79
| 0.546801
|
b7d5a60fbdb76687fc39cd1c5c8e08ebfcca6f94
| 403
|
py
|
Python
|
thirtyonedays/thirtyonedays/wsgi.py
|
kmikitin/31daysofhalloween
|
989e304ddb6ffa414d4e396221fe0f8cc5d3b175
|
[
"MIT"
] | null | null | null |
thirtyonedays/thirtyonedays/wsgi.py
|
kmikitin/31daysofhalloween
|
989e304ddb6ffa414d4e396221fe0f8cc5d3b175
|
[
"MIT"
] | 6
|
2019-12-04T23:12:28.000Z
|
2022-02-10T09:03:49.000Z
|
thirtyonedays/thirtyonedays/wsgi.py
|
kmikitin/31daysofhalloween
|
989e304ddb6ffa414d4e396221fe0f8cc5d3b175
|
[
"MIT"
] | null | null | null |
"""
WSGI config for thirtyonedays project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'thirtyonedays.settings')
application = get_wsgi_application()
| 23.705882
| 78
| 0.791563
|
a5226e42afbe3e86ee8a5202f12d275869692771
| 16,082
|
py
|
Python
|
common/mask_prune/prune_v1.py
|
jiahuei/tf-sparse-captioning
|
9d7b8ecdd44fb1541500ca4f920d6c94fd15bad1
|
[
"BSD-3-Clause"
] | null | null | null |
common/mask_prune/prune_v1.py
|
jiahuei/tf-sparse-captioning
|
9d7b8ecdd44fb1541500ca4f920d6c94fd15bad1
|
[
"BSD-3-Clause"
] | null | null | null |
common/mask_prune/prune_v1.py
|
jiahuei/tf-sparse-captioning
|
9d7b8ecdd44fb1541500ca4f920d6c94fd15bad1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on 12 Jul 2019 22:45:39
@author: jiahuei
"""
import tensorflow as tf
import numpy as np
import os
import logging
from tensorflow.contrib.model_pruning.python import pruning_utils
# from tensorflow.contrib.model_pruning.python import pruning
from common import ops_v1 as ops
from common.mask_prune import masked_layer
from common.mask_prune import sampler
logger = logging.getLogger(__name__)
# _NBINS = 256
_NBINS = 512
LOSS_TYPE = ['L1', 'L2', 'hinge_L1']
pjoin = os.path.join
_shape = ops.shape
def calculate_weight_sparsities(weights, weight_op_names=None):
return calculate_sparsities(tensor_list=weights,
count_nnz_fn=lambda x: tf.count_nonzero(x, axis=None, dtype=tf.float32),
tensor_op_names=weight_op_names)
def calculate_mask_sparsities(sampled_masks, mask_op_names):
return calculate_sparsities(tensor_list=sampled_masks,
count_nnz_fn=tf.reduce_sum,
tensor_op_names=mask_op_names)
def calculate_sparsities(tensor_list, count_nnz_fn, tensor_op_names=None):
if tensor_op_names is not None:
assert isinstance(tensor_op_names, list)
tensor_sizes = [tf.to_float(tf.reduce_prod(_shape(t))) for t in tensor_list]
tensor_nnz = []
tensor_sps = []
for i, m in enumerate(tensor_list):
m_nnz = count_nnz_fn(m)
m_sps = tf.subtract(1.0, tf.divide(m_nnz, tensor_sizes[i]))
tensor_nnz.append(m_nnz)
if tensor_op_names is None:
m_name = ''
else:
# m_name = '/'.join(tensor_op_names[i].split('/')[-3:])
m_name = tensor_op_names[i]
tensor_sps.append((m_name, m_sps))
# tf.summary.scalar(m_name, m_sps)
total_nnz = tf.add_n(tensor_nnz)
total_size = tf.add_n(tensor_sizes)
total_sparsity = tf.subtract(1.0, tf.divide(total_nnz, total_size))
return total_sparsity, total_nnz, tensor_sps
def mask_sparsity_summaries(masks_list, mask_op_names):
"""
Add summary ops for mask sparsity levels. The masks provided must have binary values (either 0. or 1.).
:param masks_list:
:param mask_op_names:
:return:
"""
with tf.name_scope('sparsity'):
total_sparsity, total_nnz, mask_sps = calculate_mask_sparsities(masks_list, mask_op_names)
for sps in mask_sps:
tf.summary.scalar(*sps)
tf.summary.scalar('total_nnz', total_nnz)
tf.summary.scalar('total_sparsity', total_sparsity)
return total_sparsity
def write_sparsities_to_file(log_dir, val):
assert 'global_step' in val
assert 'total_sparsity' in val
assert 'total_nnz' in val
assert 'mask_sps' in val
out = [
'{}'.format(val['global_step']),
'{:9.7f}'.format(val['total_sparsity']),
'{:d}'.format(int(val['total_nnz']))
]
out += ['{:9.7f}'.format(sps[1]) for sps in val['mask_sps']]
out = '\r\n' + ','.join(out)
fpath = pjoin(log_dir, 'sparsity_values.csv')
if not os.path.isfile(fpath):
headers = 'Global step,Total sparsity,Total NNZ,'
headers += ','.join([str(sps[0]) for sps in val['mask_sps']])
out = headers + out
with open(fpath, 'a') as f:
f.write(out)
def get_masks(sampling_method='binarise_round', exclude_scopes=None):
masks = tf.contrib.model_pruning.get_masks()
mask_sampled_ref = tf.get_collection('masks_sampled')
is_mag_prune = len(mask_sampled_ref) == 0
if exclude_scopes is not None:
assert isinstance(exclude_scopes, (list, tuple))
masks = tf.contrib.framework.filter_variables(
var_list=masks,
include_patterns=None,
exclude_patterns=exclude_scopes,
reg_search=True)
mask_sampled_ref = tf.contrib.framework.filter_variables(
var_list=mask_sampled_ref,
include_patterns=None,
exclude_patterns=exclude_scopes,
reg_search=True)
if is_mag_prune:
logger.debug('get_mask(): Should be magnitude pruning')
return masks, masks
else:
assert sampling_method in ['binarise_round', 'rand', 'sigmoid']
if sampling_method == 'rand':
sampled_masks = mask_sampled_ref[:]
else:
sampled_masks = []
for m, m_sampled in zip(masks, mask_sampled_ref):
if sampling_method == 'binarise_round':
m = sampler.binarise_sigmoid(m)
else:
raise NotImplementedError
m = tf.nn.sigmoid(m)
m_sampled_s = _shape(m_sampled)
if _shape(m) != m_sampled_s:
# Mask mode is Structured
m = tf.tile(m, multiples=[m_sampled_s[0], m_sampled_s[1] // _shape(m)[-1]])
sampled_masks.append(m)
return sampled_masks, masks
def get_weights(exclude_scopes=None):
weights = tf.contrib.model_pruning.get_weights()
if exclude_scopes is not None:
assert isinstance(exclude_scopes, (list, tuple))
weights = tf.contrib.framework.filter_variables(
var_list=weights,
include_patterns=None,
exclude_patterns=exclude_scopes,
reg_search=True)
return weights
def get_mask_assign_ops(mask_type, sparsity_target, exclude_scopes, loss=None):
if mask_type in masked_layer.MAG_PRUNE_MASKS + [masked_layer.LOTTERY]:
masks, _ = get_masks(exclude_scopes=exclude_scopes)
weights = get_weights(exclude_scopes=exclude_scopes)
else:
raise ValueError('Invalid mask type. Must be one of {}'.format(
# masked_layer.MAG_PRUNE_MASKS + masked_layer.MASK_PRUNE))
masked_layer.MAG_PRUNE_MASKS))
assert len(weights) == len(masks)
assert len(masks) > 0
with tf.name_scope('mask_assign_ops'):
if mask_type == masked_layer.SNIP:
# Maybe accumulate saliency
with tf.variable_scope('accum_saliency'):
zero_init = tf.initializers.zeros(loss.dtype)
var_kwargs = dict(dtype=loss.dtype, initializer=zero_init, trainable=False)
saliency = [tf.get_variable('saliency_m{}'.format(i), shape=_shape(m), **var_kwargs)
for i, m in enumerate(masks)]
# saliency_batch = [tf.abs(s) for s in tf.gradients(ys=loss, xs=masks)]
saliency_batch = [s for s in tf.gradients(ys=loss, xs=masks)]
# Ops for accumulating saliency
accum_ops = [sal.assign_add(sal_b) for (sal, sal_b) in zip(saliency, saliency_batch)]
# saliency = [tf.abs(s) for s in tf.gradients(ys=loss, xs=masks)]
mask_ori_shape = [_shape(m) for m in masks]
mask_num_elems = [np.prod(m) for m in mask_ori_shape]
saliency_vec = tf.concat([tf.reshape(s, [-1]) for s in saliency], axis=0)
saliency_vec = tf.abs(saliency_vec)
saliency_vec = tf.divide(saliency_vec, tf.reduce_sum(saliency_vec))
num_params = _shape(saliency_vec)[0]
kappa = int(round(num_params * (1. - sparsity_target)))
_, ind = tf.nn.top_k(saliency_vec, k=kappa, sorted=True)
mask_sparse_vec = tf.sparse_to_dense(ind, tf.shape(saliency_vec),
tf.ones_like(ind, dtype=tf.float32),
validate_indices=False)
mask_sparse_split = tf.split(mask_sparse_vec, mask_num_elems)
mask_sparse = [tf.reshape(m, ms) for m, ms in zip(mask_sparse_split, mask_ori_shape)]
assign_ops = [tf.assign(m, new_mask) for m, new_mask in zip(masks, mask_sparse)]
return assign_ops, accum_ops
elif mask_type == masked_layer.MAG_DIST:
# Magnitude pruning, class-distribution
# Calculate standard dev of each class
# Transform weights as positive factor of standard dev, ie w' = | (w - mean) / std_dev |
# Reshape and concat all factorised weights, and calculate threshold
# The rest of the operations are same as class-blind
abs_weights = []
for w in weights:
mean, var = tf.nn.moments(w, axes=list(range(len(_shape(w)))))
std_dev = tf.sqrt(var)
w = tf.abs(tf.divide(tf.subtract(w, mean), std_dev))
abs_weights.append(w)
criterion = [tf.concat([tf.reshape(w, [-1]) for w in abs_weights], axis=0)]
else:
abs_weights = [tf.abs(w) for w in weights]
# if mask_type in masked_layer.MAG_BLIND + masked_layer.MASK_BLIND:
if mask_type == masked_layer.MAG_UNIFORM:
# Magnitude pruning, class-uniform
criterion = abs_weights
elif mask_type in (masked_layer.MAG_BLIND, masked_layer.LOTTERY):
# Magnitude pruning, class-blind
# We reshape all the weights into a vector, and concat them
criterion = [tf.concat([tf.reshape(w, [-1]) for w in abs_weights], axis=0)]
# len == 1 for class-blind, and len == len(weights) for others
thresholds = [_get_threshold(c, sparsity_target, nbins=_NBINS) for c in criterion]
if len(thresholds) != len(masks):
assert len(thresholds) == 1, 'Threshold list should be either of length 1 or equal length as masks list.'
assign_ops = []
# new_masks = []
for index, mask in enumerate(masks):
abs_w = abs_weights[index]
threshold = thresholds[min(index, len(thresholds) - 1)]
new_mask = tf.cast(tf.greater(abs_w, threshold), tf.float32)
assign_ops.append(tf.assign(mask, new_mask))
# new_masks.append(new_mask)
# Assign ops need to be executed for the summaries to capture correct values
# mask_sparsity_summaries(masks, [m.op.name for m in masks])
return assign_ops
def conditional_mask_update_op(exclude_scopes,
pruning_scheme,
global_step,
initial_sparsity,
final_sparsity,
pruning_start_step,
pruning_end_step,
prune_frequency):
"""
Conditional mask update ops for gradual pruning.
https://arxiv.org/abs/1710.01878
https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/contrib/model_pruning
:param exclude_scopes:
:param pruning_scheme:
:param global_step:
:param initial_sparsity:
:param final_sparsity:
:param pruning_start_step:
:param pruning_end_step:
:param prune_frequency:
:return:
"""
assert pruning_scheme in masked_layer.MAG_PRUNE_MASKS
if (pruning_end_step - pruning_start_step) % prune_frequency != 0:
raise ValueError('Pruning end step must be equal to start step added by multiples of frequency.')
def maybe_update_masks():
with tf.name_scope('mask_update'):
is_step_within_pruning_range = tf.logical_and(
tf.greater_equal(global_step, pruning_start_step),
# If end_pruning_step is negative, keep pruning forever!
tf.logical_or(
tf.less_equal(global_step, pruning_end_step), tf.less(pruning_end_step, 0)))
is_pruning_step = tf.equal(
tf.floormod(tf.subtract(global_step, pruning_start_step), prune_frequency), 0)
is_pruning_step = tf.logical_and(is_step_within_pruning_range, is_pruning_step)
return is_pruning_step
def mask_update_op():
current_sparsity = _get_current_sparsity(global_step=global_step,
initial_sparsity=initial_sparsity,
final_sparsity=final_sparsity,
pruning_start_step=pruning_start_step,
pruning_end_step=pruning_end_step)
# tf.summary.scalar('sparsity_target', current_sparsity)
mask_assign_ops = get_mask_assign_ops(
mask_type=pruning_scheme, sparsity_target=current_sparsity, exclude_scopes=exclude_scopes)
with tf.control_dependencies(mask_assign_ops):
# logger.info('Updating masks.')
return tf.no_op('mask_update')
# return tf.identity(global_step)
def no_update_op():
return tf.no_op()
# return tf.identity(global_step)
return tf.cond(maybe_update_masks(), mask_update_op, no_update_op)
def _get_current_sparsity(global_step,
initial_sparsity,
final_sparsity,
pruning_start_step,
pruning_end_step):
"""
Get current sparsity level for gradual pruning.
https://arxiv.org/abs/1710.01878
https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/contrib/model_pruning
:param global_step:
:param initial_sparsity:
:param final_sparsity:
:param pruning_start_step:
:param pruning_end_step:
:return:
"""
si = initial_sparsity
sf = final_sparsity
t = global_step
t0 = pruning_start_step
tn = pruning_end_step
p = tf.div(tf.cast(t - t0, tf.float32), tn - t0)
p = tf.minimum(1.0, tf.maximum(0.0, p))
st = tf.add(sf, tf.multiply(si - sf, tf.pow(1 - p, 3)))
return st
def sparsity_loss(sparsity_target,
loss_type='L1',
exclude_scopes=None):
"""
Loss for controlling sparsity of Supermasks.
:param sparsity_target: Desired sparsity rate.
:param loss_type: The distance metric.
:param exclude_scopes: Mask scopes to exclude.
:return: Scalar loss value.
"""
assert loss_type in LOSS_TYPE, 'Valid loss functions: {}'.format(LOSS_TYPE)
if loss_type == 'L1':
loss_fn = _l1_loss
elif loss_type == 'L2':
loss_fn = _l2_loss
elif loss_type == 'hinge_L1':
loss_fn = _hinge_l1_loss
else:
raise ValueError()
logger.debug('Using mask sparsity loss: `{}`'.format(loss_type))
sampled_masks, masks = get_masks(exclude_scopes=exclude_scopes)
if len(masks) == 0:
return 0.
total_sparsity = mask_sparsity_summaries(sampled_masks, [m.op.name for m in masks])
with tf.name_scope('sparsity'):
# Log average mask value
mask_vec = tf.concat([tf.reshape(m, [-1]) for m in masks], axis=0)
mask_av = tf.reduce_mean(mask_vec)
tf.summary.scalar('mask_average_val', mask_av)
with tf.name_scope('loss'):
loss = loss_fn(total_sparsity, sparsity_target)
total_size_np = int(sum([np.prod(_shape(m)) for m in sampled_masks]))
logger.debug('mask_loss: Total mask size: {:,d}'.format(total_size_np))
return loss
def _l1_loss(curr, target):
with tf.name_scope('l1'):
return tf.abs(tf.subtract(target, curr))
# return tf.abs(tf.subtract(curr, target))
def _l2_loss(curr, target):
with tf.name_scope('l2'):
return tf.squared_difference(curr, target)
def _hinge_l1_loss(curr, target):
with tf.name_scope('hinge_l1'):
return tf.nn.relu(tf.subtract(target, curr))
def _get_threshold(abs_weights, sparsity_target, nbins, use_tpu=False):
with tf.name_scope('get_threshold'):
max_value = tf.reduce_max(abs_weights)
cdf_fn = pruning_utils.compute_cdf_from_histogram
if use_tpu:
cdf_fn = pruning_utils.compute_cdf
norm_cdf = cdf_fn(abs_weights, [0.0, max_value], nbins=nbins)
prune_nbins = tf.reduce_sum(tf.cast(tf.less(norm_cdf, sparsity_target), tf.float32))
threshold = tf.multiply(tf.div(prune_nbins, float(nbins)), max_value)
return threshold
| 41.663212
| 117
| 0.623057
|
eadc393e19f2820fe270f419062b84e06b133edf
| 278
|
py
|
Python
|
testresults/scons120_vs_make/results_windows/all/scons_cleanbuild_plot.py
|
SCons/scons-performance
|
2df4558a1132b62a36f20c1c0b37da8fafa00114
|
[
"MIT"
] | null | null | null |
testresults/scons120_vs_make/results_windows/all/scons_cleanbuild_plot.py
|
SCons/scons-performance
|
2df4558a1132b62a36f20c1c0b37da8fafa00114
|
[
"MIT"
] | 1
|
2020-09-24T16:09:23.000Z
|
2020-09-27T17:30:13.000Z
|
testresults/scons120_vs_make/results_windows/all/scons_cleanbuild_plot.py
|
SCons/scons-performance
|
2df4558a1132b62a36f20c1c0b37da8fafa00114
|
[
"MIT"
] | 2
|
2020-09-27T21:18:11.000Z
|
2022-03-23T17:32:03.000Z
|
import matplotlib.pyplot as plt
files = [2500, 4500, 8500, 16500]
buildtime = [425.255, 730.854, 1151.060, 2161.544]
plt.plot(files, buildtime, marker='o', color='g')
plt.xlabel('C Files')
plt.ylabel('Time [s]')
plt.title('SCons Build')
plt.legend(loc='upper left')
plt.show()
| 25.272727
| 50
| 0.694245
|
1a516f3d944ecf89703351bc6e54a4c52c2c7daf
| 2,409
|
py
|
Python
|
stacker/lookups/handlers/kms.py
|
theister/stacker
|
f563a6f5a23550c7a668a1500bcea2b4e94f5bbf
|
[
"BSD-2-Clause"
] | 372
|
2018-05-16T19:35:54.000Z
|
2022-02-28T09:11:53.000Z
|
stacker/lookups/handlers/kms.py
|
theister/stacker
|
f563a6f5a23550c7a668a1500bcea2b4e94f5bbf
|
[
"BSD-2-Clause"
] | 452
|
2015-03-12T16:46:29.000Z
|
2018-05-14T21:15:01.000Z
|
stacker/lookups/handlers/kms.py
|
theister/stacker
|
f563a6f5a23550c7a668a1500bcea2b4e94f5bbf
|
[
"BSD-2-Clause"
] | 111
|
2015-03-29T19:22:02.000Z
|
2018-05-04T02:17:27.000Z
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import codecs
import sys
from stacker.session_cache import get_session
from . import LookupHandler
from ...util import read_value_from_path
TYPE_NAME = "kms"
class KmsLookup(LookupHandler):
@classmethod
def handle(cls, value, **kwargs):
"""Decrypt the specified value with a master key in KMS.
kmssimple field types should be in the following format:
[<region>@]<base64 encrypted value>
Note: The region is optional, and defaults to the environment's
`AWS_DEFAULT_REGION` if not specified.
For example:
# We use the aws cli to get the encrypted value for the string
# "PASSWORD" using the master key called "myStackerKey" in
# us-east-1
$ aws --region us-east-1 kms encrypt --key-id alias/myStackerKey \
--plaintext "PASSWORD" --output text --query CiphertextBlob
CiD6bC8t2Y<...encrypted blob...>
# In stacker we would reference the encrypted value like:
conf_key: ${kms us-east-1@CiD6bC8t2Y<...encrypted blob...>}
You can optionally store the encrypted value in a file, ie:
kms_value.txt
us-east-1@CiD6bC8t2Y<...encrypted blob...>
and reference it within stacker (NOTE: the path should be relative
to the stacker config file):
conf_key: ${kms file://kms_value.txt}
# Both of the above would resolve to
conf_key: PASSWORD
"""
value = read_value_from_path(value)
region = None
if "@" in value:
region, value = value.split("@", 1)
kms = get_session(region).client('kms')
# encode str value as an utf-8 bytestring for use with codecs.decode.
value = value.encode('utf-8')
# get raw but still encrypted value from base64 version.
decoded = codecs.decode(value, 'base64')
# check python version in your system
python3_or_later = sys.version_info[0] >= 3
# decrypt and return the plain text raw value.
if python3_or_later:
return kms.decrypt(CiphertextBlob=decoded)["Plaintext"]\
.decode('utf-8')
else:
return kms.decrypt(CiphertextBlob=decoded)["Plaintext"]
| 31.697368
| 79
| 0.626816
|
2da7e57672dbb51d40da8d6dddf7cd232b830a4b
| 683
|
py
|
Python
|
Not_necessary_for_dhcp_spoofing/snifftest.py
|
shamiul94/DHCP-Spoofing-Attack-Network-Security
|
09312d439b56701d82e22fe4ae9c99cc9678e232
|
[
"MIT"
] | 2
|
2021-05-03T08:54:12.000Z
|
2022-03-22T08:19:38.000Z
|
Not_necessary_for_dhcp_spoofing/snifftest.py
|
shamiul94/DHCP-Spoofing-Attack-Network-Security
|
09312d439b56701d82e22fe4ae9c99cc9678e232
|
[
"MIT"
] | null | null | null |
Not_necessary_for_dhcp_spoofing/snifftest.py
|
shamiul94/DHCP-Spoofing-Attack-Network-Security
|
09312d439b56701d82e22fe4ae9c99cc9678e232
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
from collections import Counter
from scapy.all import sniff
## Create a Packet Counter
packet_counts = Counter()
## Define our Custom Action function
def custom_action(packet):
# Create tuple of Src/Dst in sorted order
key = tuple(sorted([packet[0][1].src, packet[0][1].dst]))
packet_counts.update([key])
return f"Packet #{sum(packet_counts.values())}: {packet[0][1].src} ==> {packet[0][1].dst}"
## Setup sniff, filtering for IP traffic
sniff(iface="wlo1", prn=custom_action, count=1000)
## Print out packet count per A <--> Z address pair
print("\n".join(f"{f'{key[0]} <--> {key[1]}'}: {count}" for key, count in packet_counts.items()))
| 32.52381
| 97
| 0.682284
|
79ff29b5acad87155a04ed5cbd6bade028873dfb
| 412
|
py
|
Python
|
tests/assets/sample.py
|
Kludex/typer-cli
|
a8000afa67ec8b05238e3ee02910eca5d3c1ef16
|
[
"MIT"
] | 187
|
2020-03-08T23:27:42.000Z
|
2022-03-23T20:53:16.000Z
|
tests/assets/sample.py
|
Kludex/typer-cli
|
a8000afa67ec8b05238e3ee02910eca5d3c1ef16
|
[
"MIT"
] | 53
|
2020-03-08T19:00:38.000Z
|
2022-03-25T13:04:11.000Z
|
tests/assets/sample.py
|
Kludex/typer-cli
|
a8000afa67ec8b05238e3ee02910eca5d3c1ef16
|
[
"MIT"
] | 17
|
2020-06-29T03:10:50.000Z
|
2022-03-11T18:25:50.000Z
|
import typer
app = typer.Typer()
@app.command()
def hello(name: str = "World", formal: bool = False):
"""
Say hi
"""
if formal:
typer.echo(f"Good morning Ms. {name}")
else:
typer.echo(f"Hello {name}!")
@app.command()
def bye(friend: bool = False):
"""
Say bye
"""
if friend:
typer.echo("Goodbye my friend")
else:
typer.echo("Goodbye")
| 15.846154
| 53
| 0.536408
|
3c21f29c4640b6a457b4ac57fab083bf5907f3cc
| 4,213
|
py
|
Python
|
maptrainer/data/IPDataLoader.py
|
mothguib/maptrainer
|
335334fed073f8d14a4c5137eaa0424efcbcac63
|
[
"MIT"
] | null | null | null |
maptrainer/data/IPDataLoader.py
|
mothguib/maptrainer
|
335334fed073f8d14a4c5137eaa0424efcbcac63
|
[
"MIT"
] | null | null | null |
maptrainer/data/IPDataLoader.py
|
mothguib/maptrainer
|
335334fed073f8d14a4c5137eaa0424efcbcac63
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
from maptrainer import DATA, DURATION
from maptrainer.data.MAPDataLoader import MAPDataLoader
class IPDataLoader(MAPDataLoader):
"""
On-vertex idleness path data loader: loads data with individual
idlenesses as input and real idlenesses as output.
"""
def __init__(self, nagts: int,
tpl: str,
nb_folds: int = 1,
pre: bool = False,
datasrc: str = None,
strt: str = None,
strt_variant: str = None,
rtn: float = 0.8,
domain_data_dirpath: str = DATA,
duration: int = DURATION,
soc_name: str = None,
inf_exec_id: int = 0,
sup_exec_id: int = 99):
"""
:param datasrc: data type
:type datasrc: str
:param nagts:
:type nagts:
:param _map:
:type _map:
:param nb_folds:
:type nb_folds:
:param pre:
:type pre:
:param strt:
:type strt:
:param rtn: rate of train data over the whole dataset if the number of
folds is 1.
:type rtn:
:param domain_data_dirpath: path of data
:param duration: duration of executions to load.
:type duration:
:param soc_name:
:type soc_name:
:param inf_exec_id:
:type inf_exec_id:
:param sup_exec_id:
:type sup_exec_id:
"""
MAPDataLoader.__init__(self, nagts=nagts, tpl=tpl,
nb_folds=nb_folds, pre=pre,
strt=strt, rtn=rtn, datasrc=datasrc,
domain_data_dirpath=domain_data_dirpath,
duration=duration, soc_name=soc_name,
inf_exec_id=inf_exec_id,
sup_exec_id=sup_exec_id,
strt_variant=strt_variant)
def specific_load_data(self):
"""
Shape: Nb_seq x (seq_length -1) x dim_vector
:return:
"""
domain_data = self.load_viidls()
target_data = self.load_vidls()
self.domain_data = torch.from_numpy(np.array(domain_data)). \
float().contiguous()
self.target_data = torch.from_numpy(np.array(target_data)). \
float().contiguous()
@staticmethod
def label_data(_input: torch.FloatTensor,
targets: torch.FloatTensor,
evaluation: bool = False) \
-> (torch.FloatTensor, torch.FloatTensor):
"""
Returns inputs and labels for the output of the model wrapped into the
data `Variable` structure.
:param _input:
:type _input:
of inputs
:param targets: the output vectors not labelled
:type targets:
:param cuda:
:type cuda:
:param evaluation:
:type evaluation:
:return:
:rtype:
"""
return _input, targets
@staticmethod
def mean(t: torch.FloatTensor, dim: int) -> torch.FloatTensor:
"""
:param t: the tensor whose the mean will be computed for each
element on the dimension `dim` over the other dimensions
:type t:
:param dim: dimension to keep
:type dim:
:return:
:rtype:
"""
mean = t
for d in range(len(t.size())):
offset = 0
if d != dim:
mean = torch.mean(mean, offset)
else:
offset += 1
return mean
def specific_load_pre_data(self):
pass
@staticmethod
def reshape_output(output: torch.Tensor):
"""
Reshapes output data for its use in the `criterion` function
:param output:
:type output:
:return:
"""
return output.view(-1)
@staticmethod
def reshape_labels(labels: torch.Tensor):
"""
Reshapes label data for its use in the `criterion` function
:param labels:
:type labels:
:return:
"""
return labels.view(-1)
| 26.834395
| 78
| 0.525754
|
dcbd92ea7f427b7ff762fb5c0f4f08fcc478b4c0
| 6,862
|
py
|
Python
|
server/mysch.py
|
creasyimm/flask-vue-crud
|
49858176df71604436cfae2dfc08be2faae42ffe
|
[
"MIT"
] | null | null | null |
server/mysch.py
|
creasyimm/flask-vue-crud
|
49858176df71604436cfae2dfc08be2faae42ffe
|
[
"MIT"
] | null | null | null |
server/mysch.py
|
creasyimm/flask-vue-crud
|
49858176df71604436cfae2dfc08be2faae42ffe
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
from multiprocessing import Process
import os
import time
import json
import urllib.request
from utils import *
from tcp_latency import measure_latency
from datetime import datetime
# import docker
def mypost(body):
try:
# body = {"who":"2.247","cpu":6,"memory":3,"disk":5}
myurl = "http://127.0.0.1:5000/update"
req = urllib.request.Request(myurl)
req.add_header('Content-Type', 'application/json; charset=utf-8')
jsondata = json.dumps(body)
jsondataasbytes = jsondata.encode('utf-8') # needs to be bytes
req.add_header('Content-Length', len(jsondataasbytes))
response = urllib.request.urlopen(req, jsondataasbytes)
return response.status
except:
return 404
def test_ping(who, stype, host):
delay = 1000
my_post_body={'who':who, 'type':stype, 'latency':delay}
while True:
# ping server
try:
delay = do_one_ping(host)
except:
delay = 1000
# write latency to post data
my_post_body['latency'] = delay
# post to rest API
# print( "delay: %d"%delay)
ret = mypost(my_post_body)
if (ret) != 200:
print('rest service error [%d]'%ret, file=sys.stderr)
if stype == 'ping' and delay == 1000:
continue
time.sleep(2)
def test_tcp(who, stype, host, port):
delay = 1000
my_post_body={'who':who, 'type':stype, 'latency':delay}
while True:
# ping server
try:
delay_ar = measure_latency(host=host, port=port, wait=0)
delay = round(delay_ar[0],3)
except:
delay = 1000
# write latency to post data
my_post_body['latency'] = delay
# post to rest API
ret = mypost(my_post_body)
if (ret) != 200:
print('rest service error [%d]'%ret, file=sys.stderr)
if stype == 'ping' and delay == 1000:
continue
time.sleep(0.9)
def test_service(who, stype, host, keykey):
delay = 1000
my_post_body={'who':who, 'type':stype, 'latency':delay}
while True:
# ping server
try:
a=datetime.now()
resp = urllib.request.urlopen(host).read()
b=datetime.now()
delay = round(((b-a).microseconds)/1000, 3)
json_resp = json.loads(resp)
# print(type(json_resp))
# print(json_resp,file=sys.stderr)
test_key = eval(keykey%json_resp)
# print (test_key)
if not test_key:
delay = 1000
except:
delay = 1000
# write latency to post data
my_post_body['latency'] = delay
# post to rest API
ret = mypost(my_post_body)
if (ret) != 200:
print('rest service error [%d]'%ret, file=sys.stderr)
if stype == 'ping' and delay == 1000:
continue
time.sleep(0.9)
# a = measure_latency(host='172.16.0.221', port=8081, wait=0)
def backup_server_p():
'''
172.16.0.220, BareMetal
ping
'''
print('running')
stype = 'ping'
who = '0.220'
host = '172.16.0.220'
test_ping(who, stype, host)
def nexus_server_p():
'''
172.16.0.221, BareMetal
ping
'''
stype = 'ping'
who = '0.221'
host = '172.16.0.221'
test_ping(who, stype, host)
def nexus_services_port_p():
'''
172.16.0.221, nexus, Service
tcp_ping
'''
stype = 'tcp_ping'
who = 'nexus'
host = '172.16.0.221'
test_tcp(who, stype, host, 8081)
def nas_backup_p():
'''
172.16.0.57, VirtualMachine
ping
'''
stype = 'ping'
who = '0.57'
host = '172.16.0.57'
test_ping(who, stype, host)
def dev_env_p():
'''
172.16.75.223, BareMetal
ping
'''
stype = 'ping'
who = '75.223'
host = '172.16.75.223'
test_ping(who, stype, host)
def test_env_p():
'''
172.16.75.249, BareMetal
ping
'''
stype = 'ping'
who = '75.249'
host = '172.16.75.249'
test_ping(who, stype, host)
def it0_env_p():
'''
192.168.2.247, BareMetal
ping
'''
stype = 'ping'
who = '2.247'
host = '192.168.2.247'
test_ping(who, stype, host)
def yapi_p():
'''
172.16.0.58, VirtualMachine
ping
'''
stype = 'ping'
who = '0.58'
host = '172.16.0.58'
test_ping(who, stype, host)
def jump_p():
'''
172.16.0.239, VirtualMachine
ping
'''
stype = 'ping'
who = '0.239'
host = '172.16.0.239'
test_ping(who, stype, host)
def it1_env_p():
'''
192.168.2.248, BareMetal
ping
'''
stype = 'ping'
who = '2.248'
host = '192.168.2.248'
test_ping(who, stype, host)
def nas_p():
'''
172.16.0.55, VirtualMachine
ping
'''
stype = 'ping'
who = '0.55'
host = '172.16.0.55'
test_ping(who, stype, host)
def git_p():
'''
172.16.0.222, VirtualMachine
ping
'''
stype = 'ping'
who = '0.222'
host = '172.16.0.222'
test_ping(who, stype, host)
def pfsense_gw_p():
'''
192.168.2.204, VirtualMachine
ping
'''
stype = 'ping'
who = '0.11'
host = '192.168.2.204'
test_ping(who, stype, host)
def pfsense_p():
'''
172.16.0.5, VirtualMachine
ping
'''
stype = 'ping'
who = '2.10'
host = '172.16.0.5'
test_ping(who, stype, host)
def dockerhome_p():
'''
172.16.0.235, VirtualMachine
ping
'''
stype = 'ping'
who = '0.235'
host = '172.16.0.235'
test_ping(who, stype, host)
def docker_services_port_p():
'''
172.16.0.235, docker, Service
tcp_ping
'''
stype = 'tcp_ping'
who = 'dockerhome'
host = '172.16.0.235'
test_tcp(who, stype, host, 1219)
def mysql_port_p():
'''
172.16.0.235, docker, Service
tcp_ping
'''
stype = 'tcp_ping'
who = 'mysql'
host = 'http://172.16.0.235:1219/containers/9a194e59486e/json'
test_service(who, stype, host, '%s["State"]["Status"]!="exited"')
def pm_services_p():
'''
172.16.0.235, pm, Container
curl
'''
stype = 'url_ping'
who = 'pm'
host = 'http://pm.csdev.com/projects.json'
test_service(who, stype, host, '"projects" in %s')
def pm2_services_p():
'''
172.16.0.235, pm2, Container
curl
'''
stype = 'url_ping'
who = 'pm2'
host = 'http://pm2.csdev.com/projects.json'
test_service(who, stype, host, '"projects" in %s')
def wiki_services_p():
'''
172.16.0.235, wiki, Container
curl
'''
stype = 'url_ping'
who = 'wiki'
host = 'http://wiki.csdev.com/status'
test_service(who, stype, host, '%s["state"] == "RUNNING"')
def run_proc():
"""子进程要执行的代码"""
print('子进程运行中,pid=%d...' % os.getpid()) # os.getpid获取当前进程的进程号
print('子进程将要结束...')
def run_proc1():
test('192.168.1.1')
all_proc = [
'backup_server_p',
'nexus_server_p',
'nexus_services_port_p',
'nas_backup_p',
'dev_env_p',
'test_env_p',
'it0_env_p',
'yapi_p',
'jump_p',
'it1_env_p',
'nas_p',
'git_p',
'pfsense_gw_p',
'pfsense_p',
'dockerhome_p',
'docker_services_port_p',
'mysql_port_p',
'pm_services_p',
'pm2_services_p',
'wiki_services_p',
]
def active_auto_update_ts_job():
response = urllib.request.urlopen('http://127.0.0.1:5000/updatets').read()
ret = json.loads(response)
print(ret)
def run_all():
active_auto_update_ts_job()
for p in all_proc:
pp = eval('Process(target=%s)'%p)
pp.daemon = True
pp.start()
while True:
time.sleep(60)
if __name__ == '__main__':
# print('父进程pid: %d' % os.getpid()) # os.getpid获取当前进程的进程号
# p = Process(target=run_proc)
# p2 = Process(target=run_proc1)
# p3 = Process(target=run_proc1)
# p2.start()
# p.start()
run_all()
# wiki_services_p()
| 18.955801
| 75
| 0.644564
|
accc199dfc0495359087a54dc6fb49913c761bf0
| 3,257
|
py
|
Python
|
samples/fig5.py
|
ctschnur/kr-poylmer-growth-simulation
|
7dfbd71cd7cc96eb34afe5632cbb18e95ca87e74
|
[
"MIT"
] | null | null | null |
samples/fig5.py
|
ctschnur/kr-poylmer-growth-simulation
|
7dfbd71cd7cc96eb34afe5632cbb18e95ca87e74
|
[
"MIT"
] | null | null | null |
samples/fig5.py
|
ctschnur/kr-poylmer-growth-simulation
|
7dfbd71cd7cc96eb34afe5632cbb18e95ca87e74
|
[
"MIT"
] | null | null | null |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
# -- import most important classes
from kr.kr import Kr, Kr_constants
from kr.plot_utils import Plot_utility
def mpl_settings():
# -- plotting settings
pgf_with_custom_preamble = {
"figure.figsize": (6.4*0.8, 4.8*0.7),
"savefig.format": "png", # change this to pgf or png
"pgf.rcfonts": False, # don't use pre-specified fonts (in rcParmams)
"text.usetex": True, # use latex backend not just built-in MathText
"text.latex.unicode": True, # to be able to pass unicode characters to mpl
"text.latex.preamble": [ # required for actual rendering to png
r"\usepackage{amsmath, siunitx}",
],
"pgf.preamble": [ # when exporting pgf code, mpl checks it for compilablity
r"\usepackage{amsmath, siunitx}",
]}
mpl.rcParams.update(pgf_with_custom_preamble)
def fig5(my_kr):
config_title = "FIG05-1000"
print(" --- ", config_title)
# import a configuration of initial parameters
importdict = Kr_constants.import_dict_from_file("conf.json", config_title)
# show first, which simulation parameters are different
# compared with DEFAULT configuration
print("custom simulation parameters: \n",
Kr_constants.compare_dicts(
importdict,
Kr_constants.import_dict_from_file("conf.json", "DEFAULT")))
evolution_data_unit_dict, clds = my_kr.run(
importdict)
# --- dispersity
bundle_name = "dispersity"
config_and_bundle_str = config_title + bundle_name
x_name, y_name = evolution_data_unit_dict[bundle_name].get_xy_names()
fig, ax = plt.subplots()
ax.set_xlabel("$X$")
ax.set_ylabel("$\mathrm{PDI}$")
# # reference data
# ref_x, ref_y = np.loadtxt("paper_ref_data/butteFig5b-KR1000pointssolid.csv",
# skiprows=1, delimiter=',', unpack=True)
# ax.plot(ref_x, ref_y, ".", markersize=5, alpha=0.8, color="k",
# label=r"reference simulated curve")
# bundle data
x, y = evolution_data_unit_dict[bundle_name].get_xy_vectors()
ax.plot(x, y, linestyle='-', label="simulated")
ax.grid()
ax.legend(loc='upper left')
plt.tight_layout()
plt.savefig(my_kr.get_run_hash_str() + config_and_bundle_str)
plt.savefig(my_kr.get_run_hash_str() + "_" +
config_and_bundle_str + ".pgf", format="pgf")
fig_clds = plt.figure()
Plot_utility.plot_clds(
clds,
refdatas=[
# {
# "data": np.loadtxt(
# "paper_ref_data/butteFig5a-KR1000pointssolid.csv",
# skiprows=1, delimiter=','),
# "label": "$X=0.6$"
# }
],
labels={"config_and_bundle_str": "FIG05-1000",
# "ref": r"reference simulated curve",
"own": "simulated"},
kr_obj=my_kr, savefig=True, savefig_pgf=True, mpl_figure=fig_clds)
def main():
mpl_settings() # matplotlib settings
my_kr = Kr()
fig5(my_kr)
if __name__ == "__main__":
main()
| 32.89899
| 84
| 0.621738
|
d34bf60711970c9a7213fd134eca72664d9f8123
| 909
|
py
|
Python
|
courses/migrations/0003_auto_20210519_1041.py
|
OjureFred/CloudSchool
|
ca45e031ac68dddd01e0abf74aa915043bb896c5
|
[
"MIT"
] | null | null | null |
courses/migrations/0003_auto_20210519_1041.py
|
OjureFred/CloudSchool
|
ca45e031ac68dddd01e0abf74aa915043bb896c5
|
[
"MIT"
] | null | null | null |
courses/migrations/0003_auto_20210519_1041.py
|
OjureFred/CloudSchool
|
ca45e031ac68dddd01e0abf74aa915043bb896c5
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.14 on 2021-05-19 07:41
import courses.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('courses', '0002_content_file_image_text_video'),
]
operations = [
migrations.AlterModelOptions(
name='content',
options={'ordering': ['order']},
),
migrations.AlterModelOptions(
name='module',
options={'ordering': ['order']},
),
migrations.AddField(
model_name='content',
name='order',
field=courses.fields.OrderField(blank=True, default=0),
preserve_default=False,
),
migrations.AddField(
model_name='module',
name='order',
field=courses.fields.OrderField(blank=True, default=0),
preserve_default=False,
),
]
| 25.971429
| 67
| 0.563256
|
ebfd1409fa73ee49a4bcb56f8265efc3e08d92d7
| 4,178
|
py
|
Python
|
aioketraapi/models/inline_response2003.py
|
s4v4g3/aio-ketra-api
|
1c8fefa2a66d4a66addeefdc33c71b2f0faa1137
|
[
"MIT"
] | null | null | null |
aioketraapi/models/inline_response2003.py
|
s4v4g3/aio-ketra-api
|
1c8fefa2a66d4a66addeefdc33c71b2f0faa1137
|
[
"MIT"
] | null | null | null |
aioketraapi/models/inline_response2003.py
|
s4v4g3/aio-ketra-api
|
1c8fefa2a66d4a66addeefdc33c71b2f0faa1137
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Ketra Lighting API
Control your Ketra lights # noqa: E501
The version of the OpenAPI document: 1.4.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from aioketraapi.configuration import Configuration
class InlineResponse2003(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'success': 'bool',
'error': 'str'
}
attribute_map = {
'success': 'Success',
'error': 'Error'
}
def __init__(self, success=None, error=None, local_vars_configuration=None): # noqa: E501
"""InlineResponse2003 - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._success = None
self._error = None
self.discriminator = None
if success is not None:
self.success = success
if error is not None:
self.error = error
@property
def success(self):
"""Gets the success of this InlineResponse2003. # noqa: E501
true if the transaction was successful, false if an error occurred # noqa: E501
:return: The success of this InlineResponse2003. # noqa: E501
:rtype: bool
"""
return self._success
@success.setter
def success(self, success):
"""Sets the success of this InlineResponse2003.
true if the transaction was successful, false if an error occurred # noqa: E501
:param success: The success of this InlineResponse2003. # noqa: E501
:type success: bool
"""
self._success = success
@property
def error(self):
"""Gets the error of this InlineResponse2003. # noqa: E501
error message # noqa: E501
:return: The error of this InlineResponse2003. # noqa: E501
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""Sets the error of this InlineResponse2003.
error message # noqa: E501
:param error: The error of this InlineResponse2003. # noqa: E501
:type error: str
"""
self._error = error
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InlineResponse2003):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, InlineResponse2003):
return True
return self.to_dict() != other.to_dict()
| 27.668874
| 94
| 0.577549
|
ed0d36133da372d55b0352efff8ceb7fa3e388aa
| 9,663
|
py
|
Python
|
webnlg_eval_scripts/benchmark_reader.py
|
zhaochaocs/DualEnc
|
4175a7ed3f2c3232152ecce5ffd6ee4c727e64b9
|
[
"MIT"
] | 19
|
2020-07-09T03:46:08.000Z
|
2022-01-05T08:34:43.000Z
|
webnlg_eval_scripts/benchmark_reader.py
|
zhaochaocs/DualEnc
|
4175a7ed3f2c3232152ecce5ffd6ee4c727e64b9
|
[
"MIT"
] | 12
|
2020-07-11T07:44:40.000Z
|
2022-03-12T00:44:15.000Z
|
webnlg_eval_scripts/benchmark_reader.py
|
zhaochaocs/DualEnc
|
4175a7ed3f2c3232152ecce5ffd6ee4c727e64b9
|
[
"MIT"
] | 4
|
2020-07-15T16:11:38.000Z
|
2021-10-16T16:58:02.000Z
|
import random
import re
import string
import xml.etree.ElementTree as Et
from collections import defaultdict
import _pickle as pickle
punc_regex = re.compile('[%s]' % re.escape(string.punctuation))
def remove_punc(s): # From Vinko's solution, with fix.
return punc_regex.sub('', s)
def normalize(p, split=False):
p = punc_regex.sub('', p.lower().replace('_', ' ').replace('"', ''))
if split:
return ' '.join(p.split())
else:
return ''.join(p.split())
def normalize2(p, punc=False, split=False, lower=True):
p = p.replace('_', ' ').replace('"', '')
if lower:
p = p.lower()
if not punc:
p = punc_regex.sub("", p)
else:
p = ' '.join(re.split('(\W)', p))
if split:
return ' '.join(p.split())
else:
return ''.join(p.split())
class Triple:
def __init__(self, s, p, o):
self.s = s
self.o = o
self.p = p
def __eq__(self, other):
if not isinstance(other, Triple):
# don't attempt to compare against unrelated types
return NotImplemented
return remove_punc(self.s) == remove_punc(other.s) and \
remove_punc(self.p) == remove_punc(other.p) and \
remove_punc(self.o) == remove_punc(other.o)
class Tripleset:
def __init__(self):
self.triples = []
@property
def size(self):
return len(self.triples)
def fill_tripleset(self, t):
for xml_triple in t:
s, p, o = xml_triple.text.split(' | ')
triple = Triple(s, p, o)
self.triples.append(triple)
def fill_tripleset2(self, t):
for xml_triple in t:
s, p, o = xml_triple.split(' | ')
triple = Triple(s, p, o)
self.triples.append(triple)
def shuffle_triples(self):
pass
# random.shuffle(self.triples)
def get_order(self, tripleset2):
order = []
for triple in tripleset2.triples:
idx = self.triples.index(triple)
order.append(idx)
assert len(set(order)) == len(order)
return list(map(str, order))
class Lexicalisation:
def __init__(self, lex, comment, lid, orderedtripleset=None, refs=None, template=None, tripleset_split=[]):
self.lex = lex
self.comment = comment
self.id = lid
self.orderedtripleset = orderedtripleset
self.refs = refs
self.template = template
self.tripleset_split = tripleset_split
self.good = True
class Entry:
def __init__(self, category, size, eid):
self.originaltripleset = []
self.modifiedtripleset = Tripleset()
self.lexs = []
self.category = category
self.size = size
self.id = eid
self.agent_entity_map = {}
self.agent_entity_map_relex = {}
self.entity_agent_map = {}
def fill_originaltriple(self, xml_t):
otripleset = Tripleset()
self.originaltripleset.append(otripleset) # multiple originaltriplesets for one entry
otripleset.fill_tripleset(xml_t)
def fill_modifiedtriple(self, xml_t):
self.modifiedtripleset.fill_tripleset(xml_t)
self.modifiedtripleset.shuffle_triples()
def create_lex(self, xml_lex, size):
comment = xml_lex.attrib['comment']
lid = xml_lex.attrib['lid']
tripleset, lex_text, refs, lex_template = None, "", [], ''
for child in xml_lex:
if child.tag == "sortedtripleset":
sents = []
for sentence in child:
if len(sents) and not len(sents[-1]):
pass
else:
sents.append([])
for striple in sentence:
sents[-1].append(striple)
sents_len = [len(subsents) for subsents in sents if len(subsents)]
sents = [sent for subsents in sents for sent in subsents]
tripleset = Tripleset()
tripleset.fill_tripleset(sents)
elif child.tag == "text":
lex_text = child.text
elif child.tag == "template":
lex_template = child.text
elif child.tag == 'references':
for ref in child:
ref_info = {'entity': ref.attrib['entity'], 'tag': ref.attrib['tag'], 'text': ref.text}
refs.append(ref_info)
lex = Lexicalisation(lex_text, comment, lid, tripleset, refs, lex_template, sents_len)
if tripleset is not None and lex_text is not None: # and tripleset.size == size:
self.lexs.append(lex)
def count_lexs(self):
return len(self.lexs)
class Benchmark:
def __init__(self):
self.entries = []
def fill_benchmark(self, fileslist):
cnt = 0
for file in fileslist:
tree = Et.parse(file[0] + '/' + file[1])
root = tree.getroot()
for xml_entry in root.iter('entry'):
# ignore triples with no lexicalisations
lexfound = False
for child in xml_entry:
if child.tag == "lex":
lexfound = True
break
if lexfound is False:
continue
entry_id = xml_entry.attrib['eid']
category = xml_entry.attrib['category']
size = xml_entry.attrib['size']
entry = Entry(category, size, entry_id)
for child in xml_entry:
if child.tag == 'originaltripleset':
entry.fill_originaltriple(child)
elif child.tag == 'modifiedtripleset':
entry.fill_modifiedtriple(child)
elif child.tag == 'lex':
entry.create_lex(child, int(size))
elif child.tag == 'entitymap':
for entity_map in child:
agent, entity = entity_map.text.split(' | ')
agent = agent.strip()
entity = entity.replace('_', ' ').replace('"', '').strip()
entity = ' '.join(re.split('(\W)', entity))
assert agent not in entry.agent_entity_map
entry.agent_entity_map[agent] = normalize(entity)
entry.agent_entity_map_relex[agent.lower()] = normalize2(entity, punc=True, lower=True, split=True)
entry.entity_agent_map = {e:a for a, e in entry.agent_entity_map.items()}
for lex in entry.lexs: # check the size
# assert int(size) == len(lex.orderedtripleset.triples)
cnt += 1
self.entries.append(entry)
print(" ** Reading {} lex entries **".format(cnt))
def total_lexcount(self):
count = [entry.count_lexs() for entry in self.entries]
return sum(count)
def unique_p(self):
properties = [triple.p for entry in self.entries for triple in entry.modifiedtripleset.triples]
return len(set(properties))
def entry_count(self, size=None, cat=None):
"""
calculate the number of entries in benchmark
:param size: size (should be string)
:param cat: category
:return: entry count
"""
if not size and cat:
entries = [entry for entry in self.entries if entry.category == cat]
elif not cat and size:
entries = [entry for entry in self.entries if entry.size == size]
elif not size and not cat:
return len(self.entries)
else:
entries = [entry for entry in self.entries if entry.category == cat and entry.size == size]
return len(entries)
def lexcount_size_category(self, size='', cat=''):
count = [entry.count_lexs() for entry in self.entries if entry.category == cat and entry.size == size]
return len(count)
def property_map(self):
mprop_oprop = defaultdict(set)
for entry in self.entries:
for tripleset in entry.originaltripleset:
for i, triple in enumerate(tripleset.triples):
mprop_oprop[entry.modifiedtripleset.triples[i].p].add(triple.p)
return mprop_oprop
# def order_tripleset(self, ordered_dataset):
# with open(ordered_dataset, 'rb') as fr:
# lexEntry_orderedTripleset = pickle.load(fr)
#
# for entry in self.entries:
# for lex in entry.lexs:
# entry_id = "{}_{}_{}_{}".format(entry.id, entry.size, entry.category, lex.id)
# try:
# ordered_tripleset = Tripleset()
# orderedtripleset_str = lexEntry_orderedTripleset[entry_id]["ordered_source_out"]
# for triple in orderedtripleset_str.split(" < TSP > "):
# s, p, o = triple.split(" | ")
# ordered_tripleset.triples.append(Triple(s, p, o))
# lex.orderedtripleset = ordered_tripleset
# except:
# # print("Fail to match the ordered tripleset of {} ...".format(entry_id))
# lex.orderedtripleset = entry.modifiedtripleset
| 37.453488
| 128
| 0.534099
|
4bb30338c51ab12e51d691b2c08b06a133e8558f
| 4,291
|
py
|
Python
|
tests/conftest.py
|
sdrobert/pydrobert-pytorch
|
7abad0dbb2e80b4267aebcee492aa9fd7d83ea3f
|
[
"Apache-2.0"
] | 14
|
2019-01-04T21:19:55.000Z
|
2021-01-06T16:01:03.000Z
|
tests/conftest.py
|
sdrobert/pydrobert-pytorch
|
7abad0dbb2e80b4267aebcee492aa9fd7d83ea3f
|
[
"Apache-2.0"
] | 6
|
2021-04-17T23:34:57.000Z
|
2022-02-11T00:49:41.000Z
|
tests/conftest.py
|
sdrobert/pydrobert-pytorch
|
7abad0dbb2e80b4267aebcee492aa9fd7d83ea3f
|
[
"Apache-2.0"
] | 1
|
2020-05-19T08:03:43.000Z
|
2020-05-19T08:03:43.000Z
|
# Copyright 2021 Sean Robertson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import os
import math
from tempfile import mkdtemp
from shutil import rmtree
import torch
@pytest.fixture
def temp_dir():
dir_name = mkdtemp()
yield dir_name
rmtree(dir_name)
@pytest.fixture(
params=[
pytest.param("cpu", marks=pytest.mark.cpu),
pytest.param("cuda", marks=pytest.mark.gpu),
],
scope="session",
)
def device(request):
if request.param == "cuda":
return torch.device(torch.cuda.current_device())
else:
return torch.device(request.param)
CUDA_AVAIL = torch.cuda.is_available()
def pytest_runtest_setup(item):
if any(mark.name == "gpu" for mark in item.iter_markers()):
if not CUDA_AVAIL:
pytest.skip("cuda is not available")
# implicitly seeds all tests for the sake of reproducibility
torch.manual_seed(abs(hash(item.name)))
@pytest.fixture(scope="session")
def populate_torch_dir():
def _populate_torch_dir(
dr,
num_utts,
min_width=1,
max_width=10,
num_filts=5,
max_class=10,
include_ali=True,
include_ref=True,
file_prefix="",
file_suffix=".pt",
seed=1,
include_frame_shift=True,
feat_dtype=torch.float,
):
torch.manual_seed(seed)
feat_dir = os.path.join(dr, "feat")
ali_dir = os.path.join(dr, "ali")
ref_dir = os.path.join(dr, "ref")
if not os.path.isdir(feat_dir):
os.makedirs(feat_dir)
if include_ali and not os.path.isdir(ali_dir):
os.makedirs(ali_dir)
if include_ref and not os.path.isdir(ref_dir):
os.makedirs(ref_dir)
feats, feat_sizes, utt_ids = [], [], []
alis = [] if include_ali else None
refs, ref_sizes = ([], []) if include_ref else (None, None)
utt_id_fmt_str = "{{:0{}d}}".format(int(math.log10(num_utts)) + 1)
for utt_idx in range(num_utts):
utt_id = utt_id_fmt_str.format(utt_idx)
feat_size = torch.randint(min_width, max_width + 1, (1,)).long()
feat_size = feat_size.item()
feat = (torch.rand(feat_size, num_filts) * 1000).to(dtype=feat_dtype)
torch.save(feat, os.path.join(feat_dir, file_prefix + utt_id + file_suffix))
feats.append(feat)
feat_sizes.append(feat_size)
utt_ids.append(utt_id)
if include_ali:
ali = torch.randint(max_class + 1, (feat_size,)).long()
torch.save(
ali, os.path.join(ali_dir, file_prefix + utt_id + file_suffix)
)
alis.append(ali)
if include_ref:
ref_size = torch.randint(1, feat_size + 1, (1,)).long().item()
max_ref_length = torch.randint(1, feat_size + 1, (1,)).long()
max_ref_length = max_ref_length.item()
ref = torch.randint(100, (ref_size,)).long()
if include_frame_shift:
ref_starts = torch.randint(
feat_size - max_ref_length + 1, (ref_size,)
).long()
ref_lengths = torch.randint(
1, max_ref_length + 1, (ref_size,)
).long()
ref = torch.stack(
[ref, ref_starts, ref_starts + ref_lengths], dim=-1
)
torch.save(
ref, os.path.join(ref_dir, file_prefix + utt_id + file_suffix)
)
ref_sizes.append(ref_size)
refs.append(ref)
return feats, alis, refs, feat_sizes, ref_sizes, utt_ids
return _populate_torch_dir
| 34.055556
| 88
| 0.588907
|
bad50547319a01f69819e0c7ea4346456f6321c0
| 445
|
py
|
Python
|
opensource/opencv/readimage.py
|
marciojv/hacks-cognitives-plataforms
|
5b43f52d6afde4ad2768ad5b85e376578e2c9b2f
|
[
"Apache-2.0"
] | 1
|
2021-05-14T18:43:51.000Z
|
2021-05-14T18:43:51.000Z
|
opensource/opencv/readimage.py
|
marciojv/hacks-cognitives-plataforms
|
5b43f52d6afde4ad2768ad5b85e376578e2c9b2f
|
[
"Apache-2.0"
] | null | null | null |
opensource/opencv/readimage.py
|
marciojv/hacks-cognitives-plataforms
|
5b43f52d6afde4ad2768ad5b85e376578e2c9b2f
|
[
"Apache-2.0"
] | 9
|
2019-02-04T22:08:08.000Z
|
2021-07-17T12:12:12.000Z
|
import cv2
#captura imagem do disco e mostra colorido
# o defaulthe colorido caso nao informado
imagem = cv2.imread("datasets/fotos/reuniao-professores.jpeg",1)
cv2.imshow("Mostra Imagem Colorida",imagem)
cv2.waitKey(0)
cv2.destroyAllWindows()
#captura imagem do disco e mostra em cinza
imagem = cv2.imread("datasets/fotos/reuniao-professores.jpeg",0)
cv2.imshow("Mostra Imagem em Cinza",imagem)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 22.25
| 64
| 0.775281
|
04d1cc371ccaf3ef4cf21a3fb08417b18d0b3064
| 1,324
|
py
|
Python
|
lux/utils/message.py
|
Moh-Yakoub/lux
|
127806f653602afeea92d6cb45917401c0ee366e
|
[
"Apache-2.0"
] | 3,731
|
2020-01-16T01:06:30.000Z
|
2022-03-31T21:01:04.000Z
|
lux/utils/message.py
|
Moh-Yakoub/lux
|
127806f653602afeea92d6cb45917401c0ee366e
|
[
"Apache-2.0"
] | 393
|
2020-01-29T04:52:27.000Z
|
2022-03-31T20:02:19.000Z
|
lux/utils/message.py
|
Moh-Yakoub/lux
|
127806f653602afeea92d6cb45917401c0ee366e
|
[
"Apache-2.0"
] | 304
|
2020-01-29T03:00:50.000Z
|
2022-03-25T22:31:20.000Z
|
# Copyright 2019-2020 The Lux Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Message:
def __init__(self):
self.messages = []
def add_unique(self, item, priority=-1):
msg = {"text": item, "priority": priority}
if msg not in self.messages:
self.messages.append(msg)
def add(self, item, priority=-1):
self.messages.append({"text": item, "priority": priority})
def to_html(self):
if len(self.messages) == 0:
return ""
else:
sorted_msgs = sorted(self.messages, key=lambda i: i["priority"], reverse=True)
html = "<ul>"
for msg in sorted_msgs:
msgTxt = msg["text"]
html += f"<li>{msgTxt}</li>"
html += "</ul>"
return html
| 33.948718
| 90
| 0.619335
|
a2e4abbc9027ba8a09caf01c5dcf9b9ce5a1cb5e
| 792
|
py
|
Python
|
Notes/Python/src/requests/requests_demo.py
|
liuhll/BlogAndarticle
|
23b3b69178b0616837cd6f0b588bda943366b448
|
[
"MIT"
] | 2
|
2016-10-21T16:29:30.000Z
|
2016-10-26T12:49:02.000Z
|
Notes/Python/src/requests/requests_demo.py
|
liuhll/BlogAndarticle
|
23b3b69178b0616837cd6f0b588bda943366b448
|
[
"MIT"
] | 8
|
2016-10-16T15:38:46.000Z
|
2021-07-14T02:25:10.000Z
|
Notes/Python/src/requests/requests_demo.py
|
liuhll/BlogAndArticle
|
23b3b69178b0616837cd6f0b588bda943366b448
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import requests
URL_IP = 'http://192.168.0.160:8001/ip'
URL_GET = 'http://192.168.0.160:8001/get'
def use_simple_requests():
response = requests.get(URL_IP)
print '>>>>Response Headers:'
print response.headers
print '>>>>Response Body:'
print response.text
def use_params_requests():
params = {'param1':'hellp','param2':'world'}
print '>>>>Request Params:'
print params
resp = requests.get(URL_GET,params=params)
print '>>>>Response Headers:'
print resp.headers
print '>>>>Status Code:'
print resp.status_code
print '>>>>Response Body:'
print resp.json()
if __name__ == '__main__':
print 'Use simple urllib2:'
use_simple_requests()
print "--------------------------"
print 'Use params urllib2:'
use_params_requests()
| 20.307692
| 46
| 0.651515
|
f90cd981f08717cabaa84d79be6d2213ae00c14f
| 5,642
|
py
|
Python
|
src/sagemaker_sklearn_container/serving.py
|
ipanepen/sagemaker-scikit-learn-container
|
3214b0d36955fed0b6338b997b26bcc883f7b883
|
[
"Apache-2.0"
] | 105
|
2018-11-28T21:48:12.000Z
|
2022-03-27T18:51:29.000Z
|
src/sagemaker_sklearn_container/serving.py
|
ipanepen/sagemaker-scikit-learn-container
|
3214b0d36955fed0b6338b997b26bcc883f7b883
|
[
"Apache-2.0"
] | 55
|
2019-01-01T18:52:12.000Z
|
2022-03-29T09:06:38.000Z
|
src/sagemaker_sklearn_container/serving.py
|
ipanepen/sagemaker-scikit-learn-container
|
3214b0d36955fed0b6338b997b26bcc883f7b883
|
[
"Apache-2.0"
] | 94
|
2019-01-21T06:46:07.000Z
|
2022-03-31T18:25:13.000Z
|
# Copyright 2019-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import os
import importlib
import logging
import numpy as np
from sagemaker_containers.beta.framework import (
content_types, encoders, env, modules, transformer, worker, server)
from sagemaker_sklearn_container.serving_mms import start_model_server
logging.basicConfig(format='%(asctime)s %(levelname)s - %(name)s - %(message)s', level=logging.INFO)
logging.getLogger('boto3').setLevel(logging.INFO)
logging.getLogger('s3transfer').setLevel(logging.INFO)
logging.getLogger('botocore').setLevel(logging.WARN)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def is_multi_model():
return os.environ.get('SAGEMAKER_MULTI_MODEL')
def default_model_fn(model_dir):
"""Loads a model. For Scikit-learn, a default function to load a model is not provided.
Users should provide customized model_fn() in script.
Args:
model_dir: a directory where model is saved.
Returns: A Scikit-learn model.
"""
return transformer.default_model_fn(model_dir)
def default_input_fn(input_data, content_type):
"""Takes request data and de-serializes the data into an object for prediction.
When an InvokeEndpoint operation is made against an Endpoint running SageMaker model server,
the model server receives two pieces of information:
- The request Content-Type, for example "application/json"
- The request data, which is at most 5 MB (5 * 1024 * 1024 bytes) in size.
The input_fn is responsible to take the request data and pre-process it before prediction.
Args:
input_data (obj): the request data.
content_type (str): the request Content-Type.
Returns:
(obj): data ready for prediction.
"""
np_array = encoders.decode(input_data, content_type)
return np_array.astype(np.float32) if content_type in content_types.UTF8_TYPES else np_array
def default_predict_fn(input_data, model):
"""A default predict_fn for Scikit-learn. Calls a model on data deserialized in input_fn.
Args:
input_data: input data (Numpy array) for prediction deserialized by input_fn
model: Scikit-learn model loaded in memory by model_fn
Returns: a prediction
"""
output = model.predict(input_data)
return output
def default_output_fn(prediction, accept):
"""Function responsible to serialize the prediction for the response.
Args:
prediction (obj): prediction returned by predict_fn .
accept (str): accept content-type expected by the client.
Returns:
(worker.Response): a Flask response object with the following args:
* Args:
response: the serialized data to return
accept: the content-type that the data was transformed to.
"""
return worker.Response(encoders.encode(prediction, accept), accept, mimetype=accept)
def _user_module_transformer(user_module):
model_fn = getattr(user_module, 'model_fn', default_model_fn)
input_fn = getattr(user_module, 'input_fn', default_input_fn)
predict_fn = getattr(user_module, 'predict_fn', default_predict_fn)
output_fn = getattr(user_module, 'output_fn', default_output_fn)
return transformer.Transformer(model_fn=model_fn, input_fn=input_fn, predict_fn=predict_fn,
output_fn=output_fn)
def _user_module_execution_parameters_fn(user_module):
return getattr(user_module, 'execution_parameters_fn', None)
def import_module(module_name, module_dir):
try: # if module_name already exists, use the existing one
user_module = importlib.import_module(module_name)
except ImportError: # if the module has not been loaded, 'modules' downloads and installs it.
user_module = modules.import_module(module_dir, module_name)
except Exception: # this shouldn't happen
logger.info("Encountered an unexpected error.")
raise
user_module_transformer = _user_module_transformer(user_module)
user_module_transformer.initialize()
return user_module_transformer, _user_module_execution_parameters_fn(user_module)
app = None
def main(environ, start_response):
global app
if app is None:
serving_env = env.ServingEnv()
user_module_transformer, execution_parameters_fn = import_module(serving_env.module_name,
serving_env.module_dir)
app = worker.Worker(transform_fn=user_module_transformer.transform,
module_name=serving_env.module_name,
execution_parameters_fn=execution_parameters_fn)
return app(environ, start_response)
def serving_entrypoint():
"""Start Inference Server.
NOTE: If the inference server is multi-model, MxNet Model Server will be used as the base server. Otherwise,
GUnicorn is used as the base server.
"""
if is_multi_model():
start_model_server()
else:
server.start(env.ServingEnv().framework_module)
| 38.380952
| 112
| 0.718894
|
3cfe36f4d9e977833f97d387e6f25dad6d4c9f18
| 1,672
|
py
|
Python
|
test/test_curvature.py
|
weepingwillowben/reward-surfaces
|
f27211faf3784df3305972b7cad65002fd57d7bf
|
[
"MIT"
] | null | null | null |
test/test_curvature.py
|
weepingwillowben/reward-surfaces
|
f27211faf3784df3305972b7cad65002fd57d7bf
|
[
"MIT"
] | null | null | null |
test/test_curvature.py
|
weepingwillowben/reward-surfaces
|
f27211faf3784df3305972b7cad65002fd57d7bf
|
[
"MIT"
] | 2
|
2021-10-03T14:51:38.000Z
|
2021-11-10T02:54:26.000Z
|
import gym
import numpy as np
from stable_baselines3.a2c import A2C
from stable_baselines3.ppo import PPO
from stable_baselines3.ddpg import DDPG
from stable_baselines3.td3 import TD3
from stable_baselines3.sac import SAC
from stable_baselines3.her import HER
import tempfile
import gym # open ai gym
from stable_baselines3.common.bit_flipping_env import BitFlippingEnv
from reward_surfaces.agents import SB3OnPolicyTrainer,SB3OffPolicyTrainer,SB3HerPolicyTrainer
from reward_surfaces.agents import ExtA2C, ExtPPO, ExtSAC
from reward_surfaces.algorithms import calculate_est_hesh_eigenvalues
from reward_surfaces.agents import RainbowTrainer
def test_curvature(env_fn, trainer):
# test trainer learning
saved_files = trainer.train(100,"test_results",save_freq=1000)
results = trainer.calculate_eigenvalues(100,1.e-5)
print(results['maxeig'], results['mineig'], results['ratio'])
def discrete_env_fn():
return gym.make("CartPole-v1")
def continious_env_fn():
return gym.make("Pendulum-v0")
if __name__ == "__main__":
print("testing SB3 SAC curvature")
test_curvature(continious_env_fn, SB3OffPolicyTrainer(continious_env_fn,ExtSAC("MlpPolicy",continious_env_fn(),device="cuda")))
print("testing SB3 A2C curvature")
test_curvature(discrete_env_fn, SB3OnPolicyTrainer(discrete_env_fn,ExtA2C("MlpPolicy",discrete_env_fn(),device="cpu")))
print("testing SB3 PPO curvature")
test_curvature(discrete_env_fn, SB3OnPolicyTrainer(discrete_env_fn,ExtPPO("MlpPolicy",discrete_env_fn(),device="cpu")))
print("testing Rainbow curvature")
test_curvature(discrete_env_fn,RainbowTrainer("space_invaders",learning_starts=1000))
| 42.871795
| 131
| 0.805622
|
199284c680bc27e8966bc04f6718fd87a2603e35
| 30,055
|
py
|
Python
|
tests/helpers/test_entity_platform.py
|
unverbraucht/core
|
312af53935a1bffd58b3b35e82e31292a6ec22ad
|
[
"Apache-2.0"
] | 2
|
2019-11-20T20:56:59.000Z
|
2021-01-03T08:52:18.000Z
|
tests/helpers/test_entity_platform.py
|
shownor/core
|
b50281a9173e7fb4a37b3f813ca92876088eaac3
|
[
"Apache-2.0"
] | null | null | null |
tests/helpers/test_entity_platform.py
|
shownor/core
|
b50281a9173e7fb4a37b3f813ca92876088eaac3
|
[
"Apache-2.0"
] | 1
|
2021-04-18T19:36:34.000Z
|
2021-04-18T19:36:34.000Z
|
"""Tests for the EntityPlatform helper."""
import asyncio
from datetime import timedelta
import logging
from unittest.mock import MagicMock, Mock, patch
import asynctest
import pytest
from homeassistant.const import UNIT_PERCENTAGE
from homeassistant.core import callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import entity_platform, entity_registry
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.entity_component import (
DEFAULT_SCAN_INTERVAL,
EntityComponent,
)
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
MockEntity,
MockEntityPlatform,
MockPlatform,
async_fire_time_changed,
mock_entity_platform,
mock_registry,
)
_LOGGER = logging.getLogger(__name__)
DOMAIN = "test_domain"
PLATFORM = "test_platform"
async def test_polling_only_updates_entities_it_should_poll(hass):
"""Test the polling of only updated entities."""
component = EntityComponent(_LOGGER, DOMAIN, hass, timedelta(seconds=20))
no_poll_ent = MockEntity(should_poll=False)
no_poll_ent.async_update = Mock()
poll_ent = MockEntity(should_poll=True)
poll_ent.async_update = Mock()
await component.async_add_entities([no_poll_ent, poll_ent])
no_poll_ent.async_update.reset_mock()
poll_ent.async_update.reset_mock()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=20))
await hass.async_block_till_done()
assert not no_poll_ent.async_update.called
assert poll_ent.async_update.called
async def test_polling_updates_entities_with_exception(hass):
"""Test the updated entities that not break with an exception."""
component = EntityComponent(_LOGGER, DOMAIN, hass, timedelta(seconds=20))
update_ok = []
update_err = []
def update_mock():
"""Mock normal update."""
update_ok.append(None)
def update_mock_err():
"""Mock error update."""
update_err.append(None)
raise AssertionError("Fake error update")
ent1 = MockEntity(should_poll=True)
ent1.update = update_mock_err
ent2 = MockEntity(should_poll=True)
ent2.update = update_mock
ent3 = MockEntity(should_poll=True)
ent3.update = update_mock
ent4 = MockEntity(should_poll=True)
ent4.update = update_mock
await component.async_add_entities([ent1, ent2, ent3, ent4])
update_ok.clear()
update_err.clear()
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=20))
await hass.async_block_till_done()
assert len(update_ok) == 3
assert len(update_err) == 1
async def test_update_state_adds_entities(hass):
"""Test if updating poll entities cause an entity to be added works."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
ent1 = MockEntity()
ent2 = MockEntity(should_poll=True)
await component.async_add_entities([ent2])
assert len(hass.states.async_entity_ids()) == 1
ent2.update = lambda *_: component.add_entities([ent1])
async_fire_time_changed(hass, dt_util.utcnow() + DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids()) == 2
async def test_update_state_adds_entities_with_update_before_add_true(hass):
"""Test if call update before add to state machine."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
ent = MockEntity()
ent.update = Mock(spec_set=True)
await component.async_add_entities([ent], True)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids()) == 1
assert ent.update.called
async def test_update_state_adds_entities_with_update_before_add_false(hass):
"""Test if not call update before add to state machine."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
ent = MockEntity()
ent.update = Mock(spec_set=True)
await component.async_add_entities([ent], False)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids()) == 1
assert not ent.update.called
@asynctest.patch("homeassistant.helpers.entity_platform.async_track_time_interval")
async def test_set_scan_interval_via_platform(mock_track, hass):
"""Test the setting of the scan interval via platform."""
def platform_setup(hass, config, add_entities, discovery_info=None):
"""Test the platform setup."""
add_entities([MockEntity(should_poll=True)])
platform = MockPlatform(platform_setup)
platform.SCAN_INTERVAL = timedelta(seconds=30)
mock_entity_platform(hass, "test_domain.platform", platform)
component = EntityComponent(_LOGGER, DOMAIN, hass)
component.setup({DOMAIN: {"platform": "platform"}})
await hass.async_block_till_done()
assert mock_track.called
assert timedelta(seconds=30) == mock_track.call_args[0][2]
async def test_adding_entities_with_generator_and_thread_callback(hass):
"""Test generator in add_entities that calls thread method.
We should make sure we resolve the generator to a list before passing
it into an async context.
"""
component = EntityComponent(_LOGGER, DOMAIN, hass)
def create_entity(number):
"""Create entity helper."""
entity = MockEntity()
entity.entity_id = async_generate_entity_id(DOMAIN + ".{}", "Number", hass=hass)
return entity
await component.async_add_entities(create_entity(i) for i in range(2))
async def test_platform_warn_slow_setup(hass):
"""Warn we log when platform setup takes a long time."""
platform = MockPlatform()
mock_entity_platform(hass, "test_domain.platform", platform)
component = EntityComponent(_LOGGER, DOMAIN, hass)
with patch.object(hass.loop, "call_later", MagicMock()) as mock_call:
await component.async_setup({DOMAIN: {"platform": "platform"}})
assert mock_call.called
# mock_calls[0] is the warning message for component setup
# mock_calls[3] is the warning message for platform setup
timeout, logger_method = mock_call.mock_calls[3][1][:2]
assert timeout == entity_platform.SLOW_SETUP_WARNING
assert logger_method == _LOGGER.warning
assert mock_call().cancel.called
async def test_platform_error_slow_setup(hass, caplog):
"""Don't block startup more than SLOW_SETUP_MAX_WAIT."""
with patch.object(entity_platform, "SLOW_SETUP_MAX_WAIT", 0):
called = []
async def setup_platform(*args):
called.append(1)
await asyncio.sleep(1)
platform = MockPlatform(async_setup_platform=setup_platform)
component = EntityComponent(_LOGGER, DOMAIN, hass)
mock_entity_platform(hass, "test_domain.test_platform", platform)
await component.async_setup({DOMAIN: {"platform": "test_platform"}})
assert len(called) == 1
assert "test_domain.test_platform" not in hass.config.components
assert "test_platform is taking longer than 0 seconds" in caplog.text
async def test_updated_state_used_for_entity_id(hass):
"""Test that first update results used for entity ID generation."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
class MockEntityNameFetcher(MockEntity):
"""Mock entity that fetches a friendly name."""
async def async_update(self):
"""Mock update that assigns a name."""
self._values["name"] = "Living Room"
await component.async_add_entities([MockEntityNameFetcher()], True)
entity_ids = hass.states.async_entity_ids()
assert len(entity_ids) == 1
assert entity_ids[0] == "test_domain.living_room"
async def test_parallel_updates_async_platform(hass):
"""Test async platform does not have parallel_updates limit by default."""
platform = MockPlatform()
mock_entity_platform(hass, "test_domain.platform", platform)
component = EntityComponent(_LOGGER, DOMAIN, hass)
component._platforms = {}
await component.async_setup({DOMAIN: {"platform": "platform"}})
handle = list(component._platforms.values())[-1]
assert handle.parallel_updates is None
class AsyncEntity(MockEntity):
"""Mock entity that has async_update."""
async def async_update(self):
pass
entity = AsyncEntity()
await handle.async_add_entities([entity])
assert entity.parallel_updates is None
async def test_parallel_updates_async_platform_with_constant(hass):
"""Test async platform can set parallel_updates limit."""
platform = MockPlatform()
platform.PARALLEL_UPDATES = 2
mock_entity_platform(hass, "test_domain.platform", platform)
component = EntityComponent(_LOGGER, DOMAIN, hass)
component._platforms = {}
await component.async_setup({DOMAIN: {"platform": "platform"}})
handle = list(component._platforms.values())[-1]
class AsyncEntity(MockEntity):
"""Mock entity that has async_update."""
async def async_update(self):
pass
entity = AsyncEntity()
await handle.async_add_entities([entity])
assert entity.parallel_updates is not None
assert entity.parallel_updates._value == 2
async def test_parallel_updates_sync_platform(hass):
"""Test sync platform parallel_updates default set to 1."""
platform = MockPlatform()
mock_entity_platform(hass, "test_domain.platform", platform)
component = EntityComponent(_LOGGER, DOMAIN, hass)
component._platforms = {}
await component.async_setup({DOMAIN: {"platform": "platform"}})
handle = list(component._platforms.values())[-1]
class SyncEntity(MockEntity):
"""Mock entity that has update."""
async def update(self):
pass
entity = SyncEntity()
await handle.async_add_entities([entity])
assert entity.parallel_updates is not None
assert entity.parallel_updates._value == 1
async def test_parallel_updates_sync_platform_with_constant(hass):
"""Test sync platform can set parallel_updates limit."""
platform = MockPlatform()
platform.PARALLEL_UPDATES = 2
mock_entity_platform(hass, "test_domain.platform", platform)
component = EntityComponent(_LOGGER, DOMAIN, hass)
component._platforms = {}
await component.async_setup({DOMAIN: {"platform": "platform"}})
handle = list(component._platforms.values())[-1]
class SyncEntity(MockEntity):
"""Mock entity that has update."""
async def update(self):
pass
entity = SyncEntity()
await handle.async_add_entities([entity])
assert entity.parallel_updates is not None
assert entity.parallel_updates._value == 2
async def test_raise_error_on_update(hass):
"""Test the add entity if they raise an error on update."""
updates = []
component = EntityComponent(_LOGGER, DOMAIN, hass)
entity1 = MockEntity(name="test_1")
entity2 = MockEntity(name="test_2")
def _raise():
"""Raise an exception."""
raise AssertionError
entity1.update = _raise
entity2.update = lambda: updates.append(1)
await component.async_add_entities([entity1, entity2], True)
assert len(updates) == 1
assert 1 in updates
async def test_async_remove_with_platform(hass):
"""Remove an entity from a platform."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
entity1 = MockEntity(name="test_1")
await component.async_add_entities([entity1])
assert len(hass.states.async_entity_ids()) == 1
await entity1.async_remove()
assert len(hass.states.async_entity_ids()) == 0
async def test_not_adding_duplicate_entities_with_unique_id(hass):
"""Test for not adding duplicate entities."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
await component.async_add_entities(
[MockEntity(name="test1", unique_id="not_very_unique")]
)
assert len(hass.states.async_entity_ids()) == 1
await component.async_add_entities(
[MockEntity(name="test2", unique_id="not_very_unique")]
)
assert len(hass.states.async_entity_ids()) == 1
async def test_using_prescribed_entity_id(hass):
"""Test for using predefined entity ID."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
await component.async_add_entities(
[MockEntity(name="bla", entity_id="hello.world")]
)
assert "hello.world" in hass.states.async_entity_ids()
async def test_using_prescribed_entity_id_with_unique_id(hass):
"""Test for amending predefined entity ID because currently exists."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
await component.async_add_entities([MockEntity(entity_id="test_domain.world")])
await component.async_add_entities(
[MockEntity(entity_id="test_domain.world", unique_id="bla")]
)
assert "test_domain.world_2" in hass.states.async_entity_ids()
async def test_using_prescribed_entity_id_which_is_registered(hass):
"""Test not allowing predefined entity ID that already registered."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
registry = mock_registry(hass)
# Register test_domain.world
registry.async_get_or_create(DOMAIN, "test", "1234", suggested_object_id="world")
# This entity_id will be rewritten
await component.async_add_entities([MockEntity(entity_id="test_domain.world")])
assert "test_domain.world_2" in hass.states.async_entity_ids()
async def test_name_which_conflict_with_registered(hass):
"""Test not generating conflicting entity ID based on name."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
registry = mock_registry(hass)
# Register test_domain.world
registry.async_get_or_create(DOMAIN, "test", "1234", suggested_object_id="world")
await component.async_add_entities([MockEntity(name="world")])
assert "test_domain.world_2" in hass.states.async_entity_ids()
async def test_entity_with_name_and_entity_id_getting_registered(hass):
"""Ensure that entity ID is used for registration."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
await component.async_add_entities(
[MockEntity(unique_id="1234", name="bla", entity_id="test_domain.world")]
)
assert "test_domain.world" in hass.states.async_entity_ids()
async def test_overriding_name_from_registry(hass):
"""Test that we can override a name via the Entity Registry."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
mock_registry(
hass,
{
"test_domain.world": entity_registry.RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_domain",
name="Overridden",
)
},
)
await component.async_add_entities(
[MockEntity(unique_id="1234", name="Device Name")]
)
state = hass.states.get("test_domain.world")
assert state is not None
assert state.name == "Overridden"
async def test_registry_respect_entity_namespace(hass):
"""Test that the registry respects entity namespace."""
mock_registry(hass)
platform = MockEntityPlatform(hass, entity_namespace="ns")
entity = MockEntity(unique_id="1234", name="Device Name")
await platform.async_add_entities([entity])
assert entity.entity_id == "test_domain.ns_device_name"
async def test_registry_respect_entity_disabled(hass):
"""Test that the registry respects entity disabled."""
mock_registry(
hass,
{
"test_domain.world": entity_registry.RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
disabled_by=entity_registry.DISABLED_USER,
)
},
)
platform = MockEntityPlatform(hass)
entity = MockEntity(unique_id="1234")
await platform.async_add_entities([entity])
assert entity.entity_id == "test_domain.world"
assert hass.states.async_entity_ids() == []
async def test_entity_registry_updates_name(hass):
"""Test that updates on the entity registry update platform entities."""
registry = mock_registry(
hass,
{
"test_domain.world": entity_registry.RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
name="before update",
)
},
)
platform = MockEntityPlatform(hass)
entity = MockEntity(unique_id="1234")
await platform.async_add_entities([entity])
state = hass.states.get("test_domain.world")
assert state is not None
assert state.name == "before update"
registry.async_update_entity("test_domain.world", name="after update")
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("test_domain.world")
assert state.name == "after update"
async def test_setup_entry(hass):
"""Test we can setup an entry."""
registry = mock_registry(hass)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Mock setup entry method."""
async_add_entities([MockEntity(name="test1", unique_id="unique")])
return True
platform = MockPlatform(async_setup_entry=async_setup_entry)
config_entry = MockConfigEntry(entry_id="super-mock-id")
entity_platform = MockEntityPlatform(
hass, platform_name=config_entry.domain, platform=platform
)
assert await entity_platform.async_setup_entry(config_entry)
await hass.async_block_till_done()
full_name = f"{entity_platform.domain}.{config_entry.domain}"
assert full_name in hass.config.components
assert len(hass.states.async_entity_ids()) == 1
assert len(registry.entities) == 1
assert registry.entities["test_domain.test1"].config_entry_id == "super-mock-id"
async def test_setup_entry_platform_not_ready(hass, caplog):
"""Test when an entry is not ready yet."""
async_setup_entry = Mock(side_effect=PlatformNotReady)
platform = MockPlatform(async_setup_entry=async_setup_entry)
config_entry = MockConfigEntry()
ent_platform = MockEntityPlatform(
hass, platform_name=config_entry.domain, platform=platform
)
with patch.object(entity_platform, "async_call_later") as mock_call_later:
assert not await ent_platform.async_setup_entry(config_entry)
full_name = f"{ent_platform.domain}.{config_entry.domain}"
assert full_name not in hass.config.components
assert len(async_setup_entry.mock_calls) == 1
assert "Platform test not ready yet" in caplog.text
assert len(mock_call_later.mock_calls) == 1
async def test_reset_cancels_retry_setup(hass):
"""Test that resetting a platform will cancel scheduled a setup retry."""
async_setup_entry = Mock(side_effect=PlatformNotReady)
platform = MockPlatform(async_setup_entry=async_setup_entry)
config_entry = MockConfigEntry()
ent_platform = MockEntityPlatform(
hass, platform_name=config_entry.domain, platform=platform
)
with patch.object(entity_platform, "async_call_later") as mock_call_later:
assert not await ent_platform.async_setup_entry(config_entry)
assert len(mock_call_later.mock_calls) == 1
assert len(mock_call_later.return_value.mock_calls) == 0
assert ent_platform._async_cancel_retry_setup is not None
await ent_platform.async_reset()
assert len(mock_call_later.return_value.mock_calls) == 1
assert ent_platform._async_cancel_retry_setup is None
async def test_not_fails_with_adding_empty_entities_(hass):
"""Test for not fails on empty entities list."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
await component.async_add_entities([])
assert len(hass.states.async_entity_ids()) == 0
async def test_entity_registry_updates_entity_id(hass):
"""Test that updates on the entity registry update platform entities."""
registry = mock_registry(
hass,
{
"test_domain.world": entity_registry.RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
name="Some name",
)
},
)
platform = MockEntityPlatform(hass)
entity = MockEntity(unique_id="1234")
await platform.async_add_entities([entity])
state = hass.states.get("test_domain.world")
assert state is not None
assert state.name == "Some name"
registry.async_update_entity(
"test_domain.world", new_entity_id="test_domain.planet"
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert hass.states.get("test_domain.world") is None
assert hass.states.get("test_domain.planet") is not None
async def test_entity_registry_updates_invalid_entity_id(hass):
"""Test that we can't update to an invalid entity id."""
registry = mock_registry(
hass,
{
"test_domain.world": entity_registry.RegistryEntry(
entity_id="test_domain.world",
unique_id="1234",
# Using component.async_add_entities is equal to platform "domain"
platform="test_platform",
name="Some name",
),
"test_domain.existing": entity_registry.RegistryEntry(
entity_id="test_domain.existing",
unique_id="5678",
platform="test_platform",
),
},
)
platform = MockEntityPlatform(hass)
entity = MockEntity(unique_id="1234")
await platform.async_add_entities([entity])
state = hass.states.get("test_domain.world")
assert state is not None
assert state.name == "Some name"
with pytest.raises(ValueError):
registry.async_update_entity(
"test_domain.world", new_entity_id="test_domain.existing"
)
with pytest.raises(ValueError):
registry.async_update_entity(
"test_domain.world", new_entity_id="invalid_entity_id"
)
with pytest.raises(ValueError):
registry.async_update_entity(
"test_domain.world", new_entity_id="diff_domain.world"
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert hass.states.get("test_domain.world") is not None
assert hass.states.get("invalid_entity_id") is None
assert hass.states.get("diff_domain.world") is None
async def test_device_info_called(hass):
"""Test device info is forwarded correctly."""
registry = await hass.helpers.device_registry.async_get_registry()
via = registry.async_get_or_create(
config_entry_id="123",
connections=set(),
identifiers={("hue", "via-id")},
manufacturer="manufacturer",
model="via",
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Mock setup entry method."""
async_add_entities(
[
# Invalid device info
MockEntity(unique_id="abcd", device_info={}),
# Valid device info
MockEntity(
unique_id="qwer",
device_info={
"identifiers": {("hue", "1234")},
"connections": {("mac", "abcd")},
"manufacturer": "test-manuf",
"model": "test-model",
"name": "test-name",
"sw_version": "test-sw",
"via_device": ("hue", "via-id"),
},
),
]
)
return True
platform = MockPlatform(async_setup_entry=async_setup_entry)
config_entry = MockConfigEntry(entry_id="super-mock-id")
entity_platform = MockEntityPlatform(
hass, platform_name=config_entry.domain, platform=platform
)
assert await entity_platform.async_setup_entry(config_entry)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids()) == 2
device = registry.async_get_device({("hue", "1234")}, set())
assert device is not None
assert device.identifiers == {("hue", "1234")}
assert device.connections == {("mac", "abcd")}
assert device.manufacturer == "test-manuf"
assert device.model == "test-model"
assert device.name == "test-name"
assert device.sw_version == "test-sw"
assert device.via_device_id == via.id
async def test_device_info_not_overrides(hass):
"""Test device info is forwarded correctly."""
registry = await hass.helpers.device_registry.async_get_registry()
device = registry.async_get_or_create(
config_entry_id="bla",
connections={("mac", "abcd")},
manufacturer="test-manufacturer",
model="test-model",
)
assert device.manufacturer == "test-manufacturer"
assert device.model == "test-model"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Mock setup entry method."""
async_add_entities(
[
MockEntity(
unique_id="qwer", device_info={"connections": {("mac", "abcd")}}
)
]
)
return True
platform = MockPlatform(async_setup_entry=async_setup_entry)
config_entry = MockConfigEntry(entry_id="super-mock-id")
entity_platform = MockEntityPlatform(
hass, platform_name=config_entry.domain, platform=platform
)
assert await entity_platform.async_setup_entry(config_entry)
await hass.async_block_till_done()
device2 = registry.async_get_device(set(), {("mac", "abcd")})
assert device2 is not None
assert device.id == device2.id
assert device2.manufacturer == "test-manufacturer"
assert device2.model == "test-model"
async def test_entity_disabled_by_integration(hass):
"""Test entity disabled by integration."""
component = EntityComponent(_LOGGER, DOMAIN, hass, timedelta(seconds=20))
entity_default = MockEntity(unique_id="default")
entity_disabled = MockEntity(
unique_id="disabled", entity_registry_enabled_default=False
)
await component.async_add_entities([entity_default, entity_disabled])
registry = await hass.helpers.entity_registry.async_get_registry()
entry_default = registry.async_get_or_create(DOMAIN, DOMAIN, "default")
assert entry_default.disabled_by is None
entry_disabled = registry.async_get_or_create(DOMAIN, DOMAIN, "disabled")
assert entry_disabled.disabled_by == "integration"
async def test_entity_info_added_to_entity_registry(hass):
"""Test entity info is written to entity registry."""
component = EntityComponent(_LOGGER, DOMAIN, hass, timedelta(seconds=20))
entity_default = MockEntity(
unique_id="default",
capability_attributes={"max": 100},
supported_features=5,
device_class="mock-device-class",
unit_of_measurement=UNIT_PERCENTAGE,
)
await component.async_add_entities([entity_default])
registry = await hass.helpers.entity_registry.async_get_registry()
entry_default = registry.async_get_or_create(DOMAIN, DOMAIN, "default")
print(entry_default)
assert entry_default.capabilities == {"max": 100}
assert entry_default.supported_features == 5
assert entry_default.device_class == "mock-device-class"
assert entry_default.unit_of_measurement == UNIT_PERCENTAGE
async def test_override_restored_entities(hass):
"""Test that we allow overriding restored entities."""
registry = mock_registry(hass)
registry.async_get_or_create(
"test_domain", "test_domain", "1234", suggested_object_id="world"
)
hass.states.async_set("test_domain.world", "unavailable", {"restored": True})
component = EntityComponent(_LOGGER, DOMAIN, hass)
await component.async_add_entities(
[MockEntity(unique_id="1234", state="on", entity_id="test_domain.world")], True
)
state = hass.states.get("test_domain.world")
assert state.state == "on"
async def test_platform_with_no_setup(hass, caplog):
"""Test setting up a platform that does not support setup."""
entity_platform = MockEntityPlatform(
hass, domain="mock-integration", platform_name="mock-platform", platform=None
)
await entity_platform.async_setup(None)
assert (
"The mock-platform platform for the mock-integration integration does not support platform setup."
in caplog.text
)
async def test_platforms_sharing_services(hass):
"""Test platforms share services."""
entity_platform1 = MockEntityPlatform(
hass, domain="mock_integration", platform_name="mock_platform", platform=None
)
entity1 = MockEntity(entity_id="mock_integration.entity_1")
await entity_platform1.async_add_entities([entity1])
entity_platform2 = MockEntityPlatform(
hass, domain="mock_integration", platform_name="mock_platform", platform=None
)
entity2 = MockEntity(entity_id="mock_integration.entity_2")
await entity_platform2.async_add_entities([entity2])
entities = []
@callback
def handle_service(entity, data):
entities.append(entity)
entity_platform1.async_register_entity_service("hello", {}, handle_service)
entity_platform2.async_register_entity_service(
"hello", {}, Mock(side_effect=AssertionError("Should not be called"))
)
await hass.services.async_call(
"mock_platform", "hello", {"entity_id": "all"}, blocking=True
)
assert len(entities) == 2
assert entity1 in entities
assert entity2 in entities
| 33.960452
| 106
| 0.69659
|
37a5546718fa569571bc660c0d97b97ad1ce66e8
| 5,120
|
py
|
Python
|
python/jittor/test/test_grad.py
|
gitqifan/jittor
|
0a5bd61bf46179c1316b66d5e26498960bbd3b88
|
[
"Apache-2.0"
] | 5
|
2020-08-09T02:27:58.000Z
|
2021-01-13T16:04:32.000Z
|
python/jittor/test/test_grad.py
|
gitqifan/jittor
|
0a5bd61bf46179c1316b66d5e26498960bbd3b88
|
[
"Apache-2.0"
] | null | null | null |
python/jittor/test/test_grad.py
|
gitqifan/jittor
|
0a5bd61bf46179c1316b66d5e26498960bbd3b88
|
[
"Apache-2.0"
] | 1
|
2020-06-23T16:25:42.000Z
|
2020-06-23T16:25:42.000Z
|
# ***************************************************************
# Copyright (c) 2020 Jittor. Authors: Dun Liang <randonlang@gmail.com>. All Rights Reserved.
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
# ***************************************************************
import unittest
import jittor as jt
import numpy as np
from .test_core import expect_error
def equal_size(x, y):
return x.dtype == y.dtype and x.shape == y.shape
def ngrad(func, vars, eps):
out = func(vars)
dout = []
for i in range(len(vars)):
pvar = vars[i].astype("float64")
if type(pvar)==np.ndarray and pvar.size>1:
grad = []
var_f = pvar.flatten()
for j in range(len(var_f)):
var = pvar.flatten()
var[j] += eps
vars[i] = var.reshape(pvar.shape)
out2 = func(vars)
grad.append((out2-out)/eps)
dout.append(np.array(grad).reshape(pvar.shape))
else:
vars[i] = vars[i] + eps
out2 = func(vars)
dout.append((out2-out)/eps)
vars[i] = pvar
return out, dout
class TestGrad(unittest.TestCase):
def test_grad(self):
x = jt.array([1.0, 2.0])
y = jt.array([3.0, 4.0])
z = x*y
dx, dy, dz = jt.grad(z, [x,y,z])
assert equal_size(dx, x) and equal_size(dy, y), f"{x} {y} {dx} {dy}"
assert (dy.data == x.data).all(), f"{dy.data} {x.data}"
assert (dx.data == y.data).all(), f"{dx.data} {y.data}"
assert (dz.data == 1).all()
def test_check_float(self):
x = jt.array(1)
y = x*x
expect_error(lambda: jt.grad(y, [x]))
def test_grad2(self):
def test(n):
x = jt.array(2.0)
y = x
for _ in range(n-1): y = y*x
dx, = jt.grad(y, [x])
assert dx.data == n*2**(n-1), f"{dx.data} {x.data}, {y.data}"
test(5)
test(6)
test(7)
test(8)
def test_var_index(self):
x = jt.array(2.0)
y = x-x
dx, = jt.grad(y, [x])
assert dx.data == 0, dx.data
x = jt.array(2.0)
y = x/x
dx, = jt.grad(x, [y])
assert dx.data == 0
def test_random_graph(self):
def test(num_vars, num_ops, seed):
np.random.seed(seed)
vars = []
for _ in range(num_vars):
vars.append(np.random.rand(1))
def random_func(vars):
np.random.seed(seed+1)
vars = list(vars)
for i in range(num_ops):
v1 = len(vars)-1-np.random.randint(num_vars)
v2 = len(vars)-1-np.random.randint(num_vars)
rop = "+-*/"[np.random.randint(4)]
if (rop == '/' or rop == '-') and v1 is v2:
rop = '+'
vout = eval(f"vars[v1]{rop}vars[v2]")
vars.append(vout)
if type(vars[i]) == jt.Var:
for i in range(len(vars)):
vars[i].name("v"+str(i))
return vout
np_out, np_dout = ngrad(random_func, vars, 1e-7)
jt_vars = [ jt.array(v) for v in vars ]
jt_out = random_func(jt_vars)
assert (np.abs(jt_out.data-np_out) < 1e-5).all(), (jt_out.data, np_out)
jt_dout = jt.grad(jt_out, jt_vars)
jt_dout = [ v.data for v in jt_dout ]
for jt_d, np_d in zip(jt_dout, np_dout):
assert abs(jt_d - np_d) < 1e-3, f"{jt_d} {np_d}"
test(1,1,0)
# test(3,3,1)
test(3,6,0)
test(10,100,2)
test(30,100,4)
test(50,100,6)
def test_top_sort(self):
x = jt.array(2.0)
x.name('x')
y1 = x*x # 2
y1.name('y1')
y2 = x*x # 2
y2.name('y2')
y3 = y1*y2 # 4
y3.name('y3')
y4 = y3*y1 # 6
y4.name('y4')
y5 = y4*y1 # 8
y5.name('y5')
y6 = y5*y1 # 10
y6.name('y6')
vars = [x,y1,y2,y3,y4,y5,y6]
grads = [ g.data for g in jt.grad(y6, vars) ]
dx = grads[0]
assert dx == 10*2**9, f"{grads}"
def test_int_grad(self):
x = jt.array(2.0)
z = x*x*x*x*x
dx, = jt.grad(z, [x])
self.assertEqual(dx.data, 5*2**4)
y1 = jt.int(x)
y2 = jt.float(x)
z = x*x*y1*y1*y2
expect_error(lambda: jt.grad(z, [y1]))
dx, = jt.grad(z, [x])
self.assertEqual(dx.data, 48)
def test_nth_grad(self):
x = jt.array(2.0)
y = x*x*x*x
dx = jt.grad(y, x)
ddx = jt.grad(dx, x)
dddx = jt.grad(ddx, x)
self.assertEqual(y.data, 2**4)
self.assertEqual(dx.data, 4*2**3)
self.assertEqual(ddx.data, 4*3*2**2)
self.assertEqual(dddx.data, 4*3*2*2**1)
if __name__ == "__main__":
unittest.main()
| 32.405063
| 92
| 0.455664
|
fae01fafed1141d0aa4a4e4501cf7b4d6d813e1d
| 18,328
|
py
|
Python
|
networkx/algorithms/flow/tests/test_mincost.py
|
CrazyPython/networkx
|
cc5a81a1d437e490efaf23e4fb82ab49e05ca392
|
[
"BSD-3-Clause"
] | null | null | null |
networkx/algorithms/flow/tests/test_mincost.py
|
CrazyPython/networkx
|
cc5a81a1d437e490efaf23e4fb82ab49e05ca392
|
[
"BSD-3-Clause"
] | null | null | null |
networkx/algorithms/flow/tests/test_mincost.py
|
CrazyPython/networkx
|
cc5a81a1d437e490efaf23e4fb82ab49e05ca392
|
[
"BSD-3-Clause"
] | 2
|
2016-09-04T10:59:12.000Z
|
2020-02-17T07:43:04.000Z
|
# -*- coding: utf-8 -*-
import networkx as nx
from nose.tools import assert_equal, assert_raises
import os
class TestMinCostFlow:
def test_simple_digraph(self):
G = nx.DiGraph()
G.add_node('a', demand = -5)
G.add_node('d', demand = 5)
G.add_edge('a', 'b', weight = 3, capacity = 4)
G.add_edge('a', 'c', weight = 6, capacity = 10)
G.add_edge('b', 'd', weight = 1, capacity = 9)
G.add_edge('c', 'd', weight = 2, capacity = 5)
flowCost, H = nx.network_simplex(G)
soln = {'a': {'b': 4, 'c': 1},
'b': {'d': 4},
'c': {'d': 1},
'd': {}}
assert_equal(flowCost, 24)
assert_equal(nx.min_cost_flow_cost(G), 24)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 24)
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, 24)
assert_equal(nx.cost_of_flow(G, H), 24)
assert_equal(H, soln)
def test_negcycle_infcap(self):
G = nx.DiGraph()
G.add_node('s', demand = -5)
G.add_node('t', demand = 5)
G.add_edge('s', 'a', weight = 1, capacity = 3)
G.add_edge('a', 'b', weight = 3)
G.add_edge('c', 'a', weight = -6)
G.add_edge('b', 'd', weight = 1)
G.add_edge('d', 'c', weight = -2)
G.add_edge('d', 't', weight = 1, capacity = 3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
def test_sum_demands_not_zero(self):
G = nx.DiGraph()
G.add_node('s', demand = -5)
G.add_node('t', demand = 4)
G.add_edge('s', 'a', weight = 1, capacity = 3)
G.add_edge('a', 'b', weight = 3)
G.add_edge('a', 'c', weight = -6)
G.add_edge('b', 'd', weight = 1)
G.add_edge('c', 'd', weight = -2)
G.add_edge('d', 't', weight = 1, capacity = 3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
def test_no_flow_satisfying_demands(self):
G = nx.DiGraph()
G.add_node('s', demand = -5)
G.add_node('t', demand = 5)
G.add_edge('s', 'a', weight = 1, capacity = 3)
G.add_edge('a', 'b', weight = 3)
G.add_edge('a', 'c', weight = -6)
G.add_edge('b', 'd', weight = 1)
G.add_edge('c', 'd', weight = -2)
G.add_edge('d', 't', weight = 1, capacity = 3)
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
def test_transshipment(self):
G = nx.DiGraph()
G.add_node('a', demand = 1)
G.add_node('b', demand = -2)
G.add_node('c', demand = -2)
G.add_node('d', demand = 3)
G.add_node('e', demand = -4)
G.add_node('f', demand = -4)
G.add_node('g', demand = 3)
G.add_node('h', demand = 2)
G.add_node('r', demand = 3)
G.add_edge('a', 'c', weight = 3)
G.add_edge('r', 'a', weight = 2)
G.add_edge('b', 'a', weight = 9)
G.add_edge('r', 'c', weight = 0)
G.add_edge('b', 'r', weight = -6)
G.add_edge('c', 'd', weight = 5)
G.add_edge('e', 'r', weight = 4)
G.add_edge('e', 'f', weight = 3)
G.add_edge('h', 'b', weight = 4)
G.add_edge('f', 'd', weight = 7)
G.add_edge('f', 'h', weight = 12)
G.add_edge('g', 'd', weight = 12)
G.add_edge('f', 'g', weight = -1)
G.add_edge('h', 'g', weight = -10)
flowCost, H = nx.network_simplex(G)
soln = {'a': {'c': 0},
'b': {'a': 0, 'r': 2},
'c': {'d': 3},
'd': {},
'e': {'r': 3, 'f': 1},
'f': {'d': 0, 'g': 3, 'h': 2},
'g': {'d': 0},
'h': {'b': 0, 'g': 0},
'r': {'a': 1, 'c': 1}}
assert_equal(flowCost, 41)
assert_equal(nx.min_cost_flow_cost(G), 41)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 41)
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, 41)
assert_equal(nx.cost_of_flow(G, H), 41)
assert_equal(H, soln)
def test_max_flow_min_cost(self):
G = nx.DiGraph()
G.add_edge('s', 'a', bandwidth = 6)
G.add_edge('s', 'c', bandwidth = 10, cost = 10)
G.add_edge('a', 'b', cost = 6)
G.add_edge('b', 'd', bandwidth = 8, cost = 7)
G.add_edge('c', 'd', cost = 10)
G.add_edge('d', 't', bandwidth = 5, cost = 5)
soln = {'s': {'a': 5, 'c': 0},
'a': {'b': 5},
'b': {'d': 5},
'c': {'d': 0},
'd': {'t': 5},
't': {}}
flow = nx.max_flow_min_cost(G, 's', 't', capacity = 'bandwidth',
weight = 'cost')
assert_equal(flow, soln)
assert_equal(nx.cost_of_flow(G, flow, weight = 'cost'), 90)
G.add_edge('t', 's', cost = -100)
flowCost, flow = nx.capacity_scaling(G, capacity = 'bandwidth',
weight = 'cost')
G.remove_edge('t', 's')
assert_equal(flowCost, -410)
assert_equal(flow['t']['s'], 5)
del flow['t']['s']
assert_equal(flow, soln)
assert_equal(nx.cost_of_flow(G, flow, weight = 'cost'), 90)
def test_digraph1(self):
# From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied
# Mathematical Programming. Addison-Wesley, 1977.
G = nx.DiGraph()
G.add_node(1, demand = -20)
G.add_node(4, demand = 5)
G.add_node(5, demand = 15)
G.add_edges_from([(1, 2, {'capacity': 15, 'weight': 4}),
(1, 3, {'capacity': 8, 'weight': 4}),
(2, 3, {'weight': 2}),
(2, 4, {'capacity': 4, 'weight': 2}),
(2, 5, {'capacity': 10, 'weight': 6}),
(3, 4, {'capacity': 15, 'weight': 1}),
(3, 5, {'capacity': 5, 'weight': 3}),
(4, 5, {'weight': 2}),
(5, 3, {'capacity': 4, 'weight': 1})])
flowCost, H = nx.network_simplex(G)
soln = {1: {2: 12, 3: 8},
2: {3: 8, 4: 4, 5: 0},
3: {4: 11, 5: 5},
4: {5: 10},
5: {3: 0}}
assert_equal(flowCost, 150)
assert_equal(nx.min_cost_flow_cost(G), 150)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 150)
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, 150)
assert_equal(H, soln)
assert_equal(nx.cost_of_flow(G, H), 150)
def test_digraph2(self):
# Example from ticket #430 from mfrasca. Original source:
# http://www.cs.princeton.edu/courses/archive/spr03/cs226/lectures/mincost.4up.pdf, slide 11.
G = nx.DiGraph()
G.add_edge('s', 1, capacity=12)
G.add_edge('s', 2, capacity=6)
G.add_edge('s', 3, capacity=14)
G.add_edge(1, 2, capacity=11, weight=4)
G.add_edge(2, 3, capacity=9, weight=6)
G.add_edge(1, 4, capacity=5, weight=5)
G.add_edge(1, 5, capacity=2, weight=12)
G.add_edge(2, 5, capacity=4, weight=4)
G.add_edge(2, 6, capacity=2, weight=6)
G.add_edge(3, 6, capacity=31, weight=3)
G.add_edge(4, 5, capacity=18, weight=4)
G.add_edge(5, 6, capacity=9, weight=5)
G.add_edge(4, 't', capacity=3)
G.add_edge(5, 't', capacity=7)
G.add_edge(6, 't', capacity=22)
flow = nx.max_flow_min_cost(G, 's', 't')
soln = {1: {2: 6, 4: 5, 5: 1},
2: {3: 6, 5: 4, 6: 2},
3: {6: 20},
4: {5: 2, 't': 3},
5: {6: 0, 't': 7},
6: {'t': 22},
's': {1: 12, 2: 6, 3: 14},
't': {}}
assert_equal(flow, soln)
G.add_edge('t', 's', weight=-100)
flowCost, flow = nx.capacity_scaling(G)
G.remove_edge('t', 's')
assert_equal(flow['t']['s'], 32)
assert_equal(flowCost, -3007)
del flow['t']['s']
assert_equal(flow, soln)
assert_equal(nx.cost_of_flow(G, flow), 193)
def test_digraph3(self):
"""Combinatorial Optimization: Algorithms and Complexity,
Papadimitriou Steiglitz at page 140 has an example, 7.1, but that
admits multiple solutions, so I alter it a bit. From ticket #430
by mfrasca."""
G = nx.DiGraph()
G.add_edge('s', 'a')
G['s']['a'].update({0: 2, 1: 4})
G.add_edge('s', 'b')
G['s']['b'].update({0: 2, 1: 1})
G.add_edge('a', 'b')
G['a']['b'].update({0: 5, 1: 2})
G.add_edge('a', 't')
G['a']['t'].update({0: 1, 1: 5})
G.add_edge('b', 'a')
G['b']['a'].update({0: 1, 1: 3})
G.add_edge('b', 't')
G['b']['t'].update({0: 3, 1: 2})
"PS.ex.7.1: testing main function"
sol = nx.max_flow_min_cost(G, 's', 't', capacity=0, weight=1)
flow = sum(v for v in sol['s'].values())
assert_equal(4, flow)
assert_equal(23, nx.cost_of_flow(G, sol, weight=1))
assert_equal(sol['s'], {'a': 2, 'b': 2})
assert_equal(sol['a'], {'b': 1, 't': 1})
assert_equal(sol['b'], {'a': 0, 't': 3})
assert_equal(sol['t'], {})
G.add_edge('t', 's')
G['t']['s'].update({1: -100})
flowCost, sol = nx.capacity_scaling(G, capacity=0, weight=1)
G.remove_edge('t', 's')
flow = sum(v for v in sol['s'].values())
assert_equal(4, flow)
assert_equal(sol['t']['s'], 4)
assert_equal(flowCost, -377)
del sol['t']['s']
assert_equal(sol['s'], {'a': 2, 'b': 2})
assert_equal(sol['a'], {'b': 1, 't': 1})
assert_equal(sol['b'], {'a': 0, 't': 3})
assert_equal(sol['t'], {})
assert_equal(nx.cost_of_flow(G, sol, weight=1), 23)
def test_zero_capacity_edges(self):
"""Address issue raised in ticket #617 by arv."""
G = nx.DiGraph()
G.add_edges_from([(1, 2, {'capacity': 1, 'weight': 1}),
(1, 5, {'capacity': 1, 'weight': 1}),
(2, 3, {'capacity': 0, 'weight': 1}),
(2, 5, {'capacity': 1, 'weight': 1}),
(5, 3, {'capacity': 2, 'weight': 1}),
(5, 4, {'capacity': 0, 'weight': 1}),
(3, 4, {'capacity': 2, 'weight': 1})])
G.node[1]['demand'] = -1
G.node[2]['demand'] = -1
G.node[4]['demand'] = 2
flowCost, H = nx.network_simplex(G)
soln = {1: {2: 0, 5: 1},
2: {3: 0, 5: 1},
3: {4: 2},
4: {},
5: {3: 2, 4: 0}}
assert_equal(flowCost, 6)
assert_equal(nx.min_cost_flow_cost(G), 6)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 6)
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, 6)
assert_equal(H, soln)
assert_equal(nx.cost_of_flow(G, H), 6)
def test_digon(self):
"""Check if digons are handled properly. Taken from ticket
#618 by arv."""
nodes = [(1, {}),
(2, {'demand': -4}),
(3, {'demand': 4}),
]
edges = [(1, 2, {'capacity': 3, 'weight': 600000}),
(2, 1, {'capacity': 2, 'weight': 0}),
(2, 3, {'capacity': 5, 'weight': 714285}),
(3, 2, {'capacity': 2, 'weight': 0}),
]
G = nx.DiGraph(edges)
G.add_nodes_from(nodes)
flowCost, H = nx.network_simplex(G)
soln = {1: {2: 0},
2: {1: 0, 3: 4},
3: {2: 0}}
assert_equal(flowCost, 2857140)
assert_equal(nx.min_cost_flow_cost(G), 2857140)
assert_equal(H, soln)
assert_equal(nx.min_cost_flow(G), soln)
assert_equal(nx.cost_of_flow(G, H), 2857140)
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, 2857140)
assert_equal(H, soln)
assert_equal(nx.cost_of_flow(G, H), 2857140)
def test_infinite_capacity_neg_digon(self):
"""An infinite capacity negative cost digon results in an unbounded
instance."""
nodes = [(1, {}),
(2, {'demand': -4}),
(3, {'demand': 4}),
]
edges = [(1, 2, {'weight': -600}),
(2, 1, {'weight': 0}),
(2, 3, {'capacity': 5, 'weight': 714285}),
(3, 2, {'capacity': 2, 'weight': 0}),
]
G = nx.DiGraph(edges)
G.add_nodes_from(nodes)
assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G)
assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
def test_finite_capacity_neg_digon(self):
"""The digon should receive the maximum amount of flow it can handle.
Taken from ticket #749 by @chuongdo."""
G = nx.DiGraph()
G.add_edge('a', 'b', capacity=1, weight=-1)
G.add_edge('b', 'a', capacity=1, weight=-1)
min_cost = -2
assert_equal(nx.min_cost_flow_cost(G), min_cost)
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, -2)
assert_equal(H, {'a': {'b': 1}, 'b': {'a': 1}})
assert_equal(nx.cost_of_flow(G, H), -2)
def test_multidigraph(self):
"""Multidigraphs are acceptable."""
G = nx.MultiDiGraph()
G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight='capacity')
flowCost, H = nx.network_simplex(G)
assert_equal(flowCost, 0)
assert_equal(H, {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}})
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, 0)
assert_equal(H, {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}})
def test_negative_selfloops(self):
"""Negative selfloops should cause an exception if uncapacitated and
always be saturated otherwise.
"""
G = nx.DiGraph()
G.add_edge(1, 1, weight=-1)
assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G)
assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
G[1][1]['capacity'] = 2
flowCost, H = nx.network_simplex(G)
assert_equal(flowCost, -2)
assert_equal(H, {1: {1: 2}})
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, -2)
assert_equal(H, {1: {1: 2}})
G = nx.MultiDiGraph()
G.add_edge(1, 1, 'x', weight=-1)
G.add_edge(1, 1, 'y', weight=1)
assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G)
assert_raises(nx.NetworkXUnbounded, nx.capacity_scaling, G)
G[1][1]['x']['capacity'] = 2
flowCost, H = nx.network_simplex(G)
assert_equal(flowCost, -2)
assert_equal(H, {1: {1: {'x': 2, 'y': 0}}})
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, -2)
assert_equal(H, {1: {1: {'x': 2, 'y': 0}}})
def test_bone_shaped(self):
# From #1283
G = nx.DiGraph()
G.add_node(0, demand=-4)
G.add_node(1, demand=2)
G.add_node(2, demand=2)
G.add_node(3, demand=4)
G.add_node(4, demand=-2)
G.add_node(5, demand=-2)
G.add_edge(0, 1, capacity=4)
G.add_edge(0, 2, capacity=4)
G.add_edge(4, 3, capacity=4)
G.add_edge(5, 3, capacity=4)
G.add_edge(0, 3, capacity=0)
flowCost, H = nx.network_simplex(G)
assert_equal(flowCost, 0)
assert_equal(
H, {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}})
flowCost, H = nx.capacity_scaling(G)
assert_equal(flowCost, 0)
assert_equal(
H, {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}})
def test_exceptions(self):
G = nx.Graph()
assert_raises(nx.NetworkXNotImplemented, nx.network_simplex, G)
assert_raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G)
G = nx.MultiGraph()
assert_raises(nx.NetworkXNotImplemented, nx.network_simplex, G)
assert_raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G)
G = nx.DiGraph()
assert_raises(nx.NetworkXError, nx.network_simplex, G)
assert_raises(nx.NetworkXError, nx.capacity_scaling, G)
G.add_node(0, demand=float('inf'))
assert_raises(nx.NetworkXError, nx.network_simplex, G)
assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
G.node[0]['demand'] = 0
G.add_node(1, demand=0)
G.add_edge(0, 1, weight=-float('inf'))
assert_raises(nx.NetworkXError, nx.network_simplex, G)
assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
G[0][1]['weight'] = 0
G.add_edge(0, 0, weight=float('inf'))
assert_raises(nx.NetworkXError, nx.network_simplex, G)
#assert_raises(nx.NetworkXError, nx.capacity_scaling, G)
G[0][0]['weight'] = 0
G[0][1]['capacity'] = -1
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
#assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
G[0][1]['capacity'] = 0
G[0][0]['capacity'] = -1
assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G)
#assert_raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
def test_large(self):
fname = os.path.join(os.path.dirname(__file__), 'netgen-2.gpickle.bz2')
G = nx.read_gpickle(fname)
flowCost, flowDict = nx.network_simplex(G)
assert_equal(6749969302, flowCost)
assert_equal(6749969302, nx.cost_of_flow(G, flowDict))
flowCost, flowDict = nx.capacity_scaling(G)
assert_equal(6749969302, flowCost)
assert_equal(6749969302, nx.cost_of_flow(G, flowDict))
| 40.017467
| 101
| 0.509712
|
fdfd371002b3718846c9a441c3bb9e39a9b821d4
| 1,235
|
py
|
Python
|
zeus/api/resources/test_details.py
|
edgerepo/zeus
|
aea557bde167e95b505a42877422a652baee14c9
|
[
"Apache-2.0"
] | null | null | null |
zeus/api/resources/test_details.py
|
edgerepo/zeus
|
aea557bde167e95b505a42877422a652baee14c9
|
[
"Apache-2.0"
] | null | null | null |
zeus/api/resources/test_details.py
|
edgerepo/zeus
|
aea557bde167e95b505a42877422a652baee14c9
|
[
"Apache-2.0"
] | null | null | null |
from flask import Response
from sqlalchemy.orm import joinedload, undefer
from zeus.config import db
from zeus.models import TestCase
from .base import Resource
from ..schemas import TestCaseSchema
testcase_schema = TestCaseSchema(strict=True)
class TestDetailsResource(Resource):
def dispatch_request(self, test_id: str, *args, **kwargs) -> Response:
test = TestCase.query.options(undefer("message"), joinedload("job")).get(
test_id
)
if not test:
return self.not_found()
return Resource.dispatch_request(self, test, *args, **kwargs)
def get(self, test: TestCase):
"""
Return a test.
"""
return self.respond_with_schema(testcase_schema, test)
def put(self, test: TestCase):
"""
Update a test.
"""
result = self.schema_from_request(testcase_schema, partial=True)
if result.errors:
return self.respond(result.errors, 403)
for key, value in result.data.items():
if getattr(test, key) != value:
setattr(test, key, value)
db.session.add(test)
db.session.commit()
return self.respond_with_schema(testcase_schema, test)
| 28.72093
| 81
| 0.637247
|
5c8cf65054782518772c5caf42acaf56ef09305f
| 1,499
|
py
|
Python
|
migrations/005_make_end_time_nullable.py
|
mritsurgeon/frigate
|
bfecee9650f7aa962c3222ed4de466f8c4acdefe
|
[
"MIT"
] | 1
|
2022-02-23T00:01:24.000Z
|
2022-02-23T00:01:24.000Z
|
migrations/005_make_end_time_nullable.py
|
mritsurgeon/frigate
|
bfecee9650f7aa962c3222ed4de466f8c4acdefe
|
[
"MIT"
] | null | null | null |
migrations/005_make_end_time_nullable.py
|
mritsurgeon/frigate
|
bfecee9650f7aa962c3222ed4de466f8c4acdefe
|
[
"MIT"
] | null | null | null |
"""Peewee migrations -- 004_add_bbox_region_area.py.
Some examples (model - class or model name)::
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
import datetime as dt
import peewee as pw
from playhouse.sqlite_ext import *
from decimal import ROUND_HALF_EVEN
from frigate.models import Event
try:
import playhouse.postgres_ext as pw_pext
except ImportError:
pass
SQL = pw.SQL
def migrate(migrator, database, fake=False, **kwargs):
migrator.drop_not_null(Event, "end_time")
def rollback(migrator, database, fake=False, **kwargs):
pass
| 34.068182
| 97
| 0.691127
|
d88fe1108666843db9b345c03945083d38223436
| 11,516
|
py
|
Python
|
website/addons/zotero/tests/test_models.py
|
lbanner/osf.io
|
1898ef0ff8bd91713e94c60e7463b5f81ac62caa
|
[
"Apache-2.0"
] | null | null | null |
website/addons/zotero/tests/test_models.py
|
lbanner/osf.io
|
1898ef0ff8bd91713e94c60e7463b5f81ac62caa
|
[
"Apache-2.0"
] | null | null | null |
website/addons/zotero/tests/test_models.py
|
lbanner/osf.io
|
1898ef0ff8bd91713e94c60e7463b5f81ac62caa
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import mock
from nose.tools import * # noqa
from framework.exceptions import PermissionsError
from tests.base import OsfTestCase
from tests.factories import UserFactory, ProjectFactory
from website.addons.zotero.tests.factories import (
ZoteroAccountFactory, ZoteroUserSettingsFactory,
ExternalAccountFactory,
ZoteroNodeSettingsFactory
)
from website.addons.zotero.provider import ZoteroCitationsProvider
import datetime
from website.addons.zotero import model
class ZoteroProviderTestCase(OsfTestCase):
def setUp(self):
super(ZoteroProviderTestCase, self).setUp()
self.provider = model.Zotero()
def test_handle_callback(self):
mock_response = {
'userID': 'Fake User ID',
'username': 'Fake User Name',
}
res = self.provider.handle_callback(mock_response)
assert_equal(res.get('display_name'), 'Fake User Name')
assert_equal(res.get('provider_id'), 'Fake User ID')
def test_citation_lists(self):
mock_client = mock.Mock()
mock_folders = [
{
'data': {
'name': 'Fake Folder',
'key': 'Fake Key',
}
}
]
mock_client.collections.return_value = mock_folders
self.provider._client = mock_client
mock_account = mock.Mock()
self.provider.account = mock_account
res = self.provider.citation_lists(ZoteroCitationsProvider()._extract_folder)
assert_equal(
res[1]['name'],
'Fake Folder'
)
assert_equal(
res[1]['id'],
'Fake Key'
)
class ZoteroNodeSettingsTestCase(OsfTestCase):
def setUp(self):
super(ZoteroNodeSettingsTestCase, self).setUp()
self.node = ProjectFactory()
self.node_settings = model.ZoteroNodeSettings(owner=self.node)
self.node_settings.save()
self.user = self.node.creator
self.user_settings = self.user.get_or_add_addon('zotero')
def tearDown(self):
super(ZoteroNodeSettingsTestCase, self).tearDown()
self.user_settings.remove()
self.node_settings.remove()
self.node.remove()
self.user.remove()
@mock.patch('website.addons.zotero.model.Zotero')
def test_api_not_cached(self, mock_zotero):
# The first call to .api returns a new object
api = self.node_settings.api
mock_zotero.assert_called_once()
assert_equal(api, mock_zotero())
@mock.patch('website.addons.zotero.model.Zotero')
def test_api_cached(self, mock_zotero):
# Repeated calls to .api returns the same object
self.node_settings._api = 'testapi'
api = self.node_settings.api
assert_false(mock_zotero.called)
assert_equal(api, 'testapi')
def test_set_auth(self):
external_account = ExternalAccountFactory()
self.user.external_accounts.append(external_account)
self.user.save()
# this should be reset after the call
self.node_settings.zotero_list_id = 'anything'
self.node_settings.set_auth(
external_account=external_account,
user=self.user
)
# this instance is updated
assert_equal(
self.node_settings.external_account,
external_account
)
assert_equal(
self.node_settings.user_settings,
self.user_settings
)
assert_is_none(
self.node_settings.zotero_list_id
)
# user_settings was updated
# TODO: The call to grant_oauth_access in set_auth should be mocked
assert_true(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=external_account,
)
)
def test_set_auth_wrong_user(self):
external_account = ExternalAccountFactory()
self.user.external_accounts.append(external_account)
self.user.save()
with assert_raises(PermissionsError):
self.node_settings.set_auth(
external_account=external_account,
user=UserFactory()
)
def test_clear_auth(self):
self.node_settings.external_account = ExternalAccountFactory()
self.node_settings.zotero_list_id = 'something'
self.node_settings.user_settings = self.user_settings
self.node_settings.save()
self.node_settings.clear_auth()
assert_is_none(self.node_settings.external_account)
assert_is_none(self.node_settings.zotero_list_id)
assert_is_none(self.node_settings.user_settings)
def test_set_target_folder(self):
external_account = ExternalAccountFactory()
self.user.external_accounts.append(external_account)
self.user.save()
self.node_settings.set_auth(
external_account=external_account,
user=self.user
)
assert_is_none(self.node_settings.zotero_list_id)
self.node_settings.set_target_folder('fake-folder-id')
# instance was updated
assert_equal(
self.node_settings.zotero_list_id,
'fake-folder-id',
)
# user_settings was updated
# TODO: the call to grant_oauth_access should be mocked
assert_true(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=external_account,
metadata={'folder': 'fake-folder-id'}
)
)
def test_has_auth_false(self):
external_account = ExternalAccountFactory()
assert_false(self.node_settings.has_auth)
# both external_account and user_settings must be set to have auth
self.node_settings.external_account = external_account
assert_false(self.node_settings.has_auth)
self.node_settings.external_account = None
self.node_settings.user_settings = self.user_settings
assert_false(self.node_settings.has_auth)
# set_auth must be called to have auth
self.node_settings.external_account = external_account
self.node_settings.user_settings = self.user_settings
assert_false(self.node_settings.has_auth)
def test_has_auth_true(self):
external_account = ExternalAccountFactory()
self.user.external_accounts.append(external_account)
self.node_settings.set_auth(external_account, self.user)
# zotero_list_id should have no effect
self.node_settings.zotero_list_id = None
assert_true(self.node_settings.has_auth)
# zotero_list_id should have no effect
self.node_settings.zotero_list_id = 'totally fake ID'
assert_true(self.node_settings.has_auth)
def test_selected_folder_name_root(self):
self.node_settings.zotero_list_id = 'ROOT'
assert_equal(
self.node_settings.selected_folder_name,
"All Documents"
)
def test_selected_folder_name_empty(self):
self.node_settings.zotero_list_id = None
assert_equal(
self.node_settings.selected_folder_name,
''
)
@mock.patch('website.addons.zotero.model.Zotero._folder_metadata')
def test_selected_folder_name(self, mock_folder_metadata):
# Mock the return from api call to get the folder's name
mock_folder = {'data': {'name': 'Fake Folder'}}
# Add the mocked return object to the mocked api client
mock_folder_metadata.return_value = mock_folder
self.node_settings.zotero_list_id = 'fake-list-id'
assert_equal(
self.node_settings.selected_folder_name,
'Fake Folder'
)
class ZoteroUserSettingsTestCase(OsfTestCase):
def test_get_connected_accounts(self):
# Get all Zotero accounts for user
user_accounts = [ZoteroAccountFactory(), ZoteroAccountFactory()]
user = UserFactory(external_accounts=user_accounts)
user_addon = ZoteroUserSettingsFactory(owner=user)
assert_equal(user_addon._get_connected_accounts(), user_accounts)
def test_to_json(self):
# All values are passed to the user settings view
user_accounts = [ZoteroAccountFactory(), ZoteroAccountFactory()]
user = UserFactory(external_accounts=user_accounts)
user_addon = ZoteroUserSettingsFactory(owner=user)
res = user_addon.to_json(user)
for account in user_accounts:
assert_in(
{
'id': account._id,
'provider_id': account.provider_id,
'display_name': account.display_name
},
res['accounts'],
)
def _prep_oauth_case(self):
self.node = ProjectFactory()
self.user = self.node.creator
self.external_account = ExternalAccountFactory()
self.user.external_accounts.append(self.external_account)
self.user.save()
self.user_settings = self.user.get_or_add_addon('zotero')
def test_grant_oauth_access_no_metadata(self):
self._prep_oauth_case()
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
)
self.user_settings.save()
assert_equal(
self.user_settings.oauth_grants,
{self.node._id: {self.external_account._id: {}}},
)
def test_grant_oauth_access_metadata(self):
self._prep_oauth_case()
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake_folder_id'}
)
self.user_settings.save()
assert_equal(
self.user_settings.oauth_grants,
{
self.node._id: {
self.external_account._id: {'folder': 'fake_folder_id'}
},
}
)
def test_verify_oauth_access_no_metadata(self):
self._prep_oauth_case()
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
)
self.user_settings.save()
assert_true(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.external_account
)
)
assert_false(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=ExternalAccountFactory()
)
)
def test_verify_oauth_access_metadata(self):
self._prep_oauth_case()
self.user_settings.grant_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake_folder_id'}
)
self.user_settings.save()
assert_true(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'fake_folder_id'}
)
)
assert_false(
self.user_settings.verify_oauth_access(
node=self.node,
external_account=self.external_account,
metadata={'folder': 'another_folder_id'}
)
)
| 31.378747
| 85
| 0.633293
|
d723e47448ec2194d9fe5d5625cd517bb1d05547
| 77,110
|
py
|
Python
|
deps/python/2.7/Lib/lib-tk/Tix.py
|
eljefedelrodeodeljefe/node-cpython
|
a6e5414fa4c089c30135c3a7db3eaf1e1d600f68
|
[
"MIT"
] | 13
|
2015-11-12T14:43:03.000Z
|
2021-04-30T07:02:11.000Z
|
deps/python/2.7/Lib/lib-tk/Tix.py
|
eljefedelrodeodeljefe/node-cpython
|
a6e5414fa4c089c30135c3a7db3eaf1e1d600f68
|
[
"MIT"
] | 5
|
2015-05-23T13:07:01.000Z
|
2016-01-06T16:23:05.000Z
|
deps/python/2.7/Lib/lib-tk/Tix.py
|
eljefedelrodeodeljefe/node-cpython
|
a6e5414fa4c089c30135c3a7db3eaf1e1d600f68
|
[
"MIT"
] | 5
|
2015-10-30T21:25:24.000Z
|
2017-03-25T15:50:55.000Z
|
# -*-mode: python; fill-column: 75; tab-width: 8; coding: iso-latin-1-unix -*-
#
# $Id$
#
# Tix.py -- Tix widget wrappers.
#
# For Tix, see http://tix.sourceforge.net
#
# - Sudhir Shenoy (sshenoy@gol.com), Dec. 1995.
# based on an idea of Jean-Marc Lugrin (lugrin@ms.com)
#
# NOTE: In order to minimize changes to Tkinter.py, some of the code here
# (TixWidget.__init__) has been taken from Tkinter (Widget.__init__)
# and will break if there are major changes in Tkinter.
#
# The Tix widgets are represented by a class hierarchy in python with proper
# inheritance of base classes.
#
# As a result after creating a 'w = StdButtonBox', I can write
# w.ok['text'] = 'Who Cares'
# or w.ok['bg'] = w['bg']
# or even w.ok.invoke()
# etc.
#
# Compare the demo tixwidgets.py to the original Tcl program and you will
# appreciate the advantages.
#
from Tkinter import *
from Tkinter import _flatten, _cnfmerge, _default_root
# WARNING - TkVersion is a limited precision floating point number
if TkVersion < 3.999:
raise ImportError, "This version of Tix.py requires Tk 4.0 or higher"
import _tkinter # If this fails your Python may not be configured for Tk
# Some more constants (for consistency with Tkinter)
WINDOW = 'window'
TEXT = 'text'
STATUS = 'status'
IMMEDIATE = 'immediate'
IMAGE = 'image'
IMAGETEXT = 'imagetext'
BALLOON = 'balloon'
AUTO = 'auto'
ACROSSTOP = 'acrosstop'
# A few useful constants for the Grid widget
ASCII = 'ascii'
CELL = 'cell'
COLUMN = 'column'
DECREASING = 'decreasing'
INCREASING = 'increasing'
INTEGER = 'integer'
MAIN = 'main'
MAX = 'max'
REAL = 'real'
ROW = 'row'
S_REGION = 's-region'
X_REGION = 'x-region'
Y_REGION = 'y-region'
# Some constants used by Tkinter dooneevent()
TCL_DONT_WAIT = 1 << 1
TCL_WINDOW_EVENTS = 1 << 2
TCL_FILE_EVENTS = 1 << 3
TCL_TIMER_EVENTS = 1 << 4
TCL_IDLE_EVENTS = 1 << 5
TCL_ALL_EVENTS = 0
# BEWARE - this is implemented by copying some code from the Widget class
# in Tkinter (to override Widget initialization) and is therefore
# liable to break.
import Tkinter, os
# Could probably add this to Tkinter.Misc
class tixCommand:
"""The tix commands provide access to miscellaneous elements
of Tix's internal state and the Tix application context.
Most of the information manipulated by these commands pertains
to the application as a whole, or to a screen or
display, rather than to a particular window.
This is a mixin class, assumed to be mixed to Tkinter.Tk
that supports the self.tk.call method.
"""
def tix_addbitmapdir(self, directory):
"""Tix maintains a list of directories under which
the tix_getimage and tix_getbitmap commands will
search for image files. The standard bitmap directory
is $TIX_LIBRARY/bitmaps. The addbitmapdir command
adds directory into this list. By using this
command, the image files of an applications can
also be located using the tix_getimage or tix_getbitmap
command.
"""
return self.tk.call('tix', 'addbitmapdir', directory)
def tix_cget(self, option):
"""Returns the current value of the configuration
option given by option. Option may be any of the
options described in the CONFIGURATION OPTIONS section.
"""
return self.tk.call('tix', 'cget', option)
def tix_configure(self, cnf=None, **kw):
"""Query or modify the configuration options of the Tix application
context. If no option is specified, returns a dictionary all of the
available options. If option is specified with no value, then the
command returns a list describing the one named option (this list
will be identical to the corresponding sublist of the value
returned if no option is specified). If one or more option-value
pairs are specified, then the command modifies the given option(s)
to have the given value(s); in this case the command returns an
empty string. Option may be any of the configuration options.
"""
# Copied from Tkinter.py
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
return self._getconfigure('tix', 'configure')
if isinstance(cnf, StringType):
return self._getconfigure1('tix', 'configure', '-'+cnf)
return self.tk.call(('tix', 'configure') + self._options(cnf))
def tix_filedialog(self, dlgclass=None):
"""Returns the file selection dialog that may be shared among
different calls from this application. This command will create a
file selection dialog widget when it is called the first time. This
dialog will be returned by all subsequent calls to tix_filedialog.
An optional dlgclass parameter can be passed to specified what type
of file selection dialog widget is desired. Possible options are
tix FileSelectDialog or tixExFileSelectDialog.
"""
if dlgclass is not None:
return self.tk.call('tix', 'filedialog', dlgclass)
else:
return self.tk.call('tix', 'filedialog')
def tix_getbitmap(self, name):
"""Locates a bitmap file of the name name.xpm or name in one of the
bitmap directories (see the tix_addbitmapdir command above). By
using tix_getbitmap, you can avoid hard coding the pathnames of the
bitmap files in your application. When successful, it returns the
complete pathname of the bitmap file, prefixed with the character
'@'. The returned value can be used to configure the -bitmap
option of the TK and Tix widgets.
"""
return self.tk.call('tix', 'getbitmap', name)
def tix_getimage(self, name):
"""Locates an image file of the name name.xpm, name.xbm or name.ppm
in one of the bitmap directories (see the addbitmapdir command
above). If more than one file with the same name (but different
extensions) exist, then the image type is chosen according to the
depth of the X display: xbm images are chosen on monochrome
displays and color images are chosen on color displays. By using
tix_ getimage, you can avoid hard coding the pathnames of the
image files in your application. When successful, this command
returns the name of the newly created image, which can be used to
configure the -image option of the Tk and Tix widgets.
"""
return self.tk.call('tix', 'getimage', name)
def tix_option_get(self, name):
"""Gets the options maintained by the Tix
scheme mechanism. Available options include:
active_bg active_fg bg
bold_font dark1_bg dark1_fg
dark2_bg dark2_fg disabled_fg
fg fixed_font font
inactive_bg inactive_fg input1_bg
input2_bg italic_font light1_bg
light1_fg light2_bg light2_fg
menu_font output1_bg output2_bg
select_bg select_fg selector
"""
# could use self.tk.globalgetvar('tixOption', name)
return self.tk.call('tix', 'option', 'get', name)
def tix_resetoptions(self, newScheme, newFontSet, newScmPrio=None):
"""Resets the scheme and fontset of the Tix application to
newScheme and newFontSet, respectively. This affects only those
widgets created after this call. Therefore, it is best to call the
resetoptions command before the creation of any widgets in a Tix
application.
The optional parameter newScmPrio can be given to reset the
priority level of the Tk options set by the Tix schemes.
Because of the way Tk handles the X option database, after Tix has
been has imported and inited, it is not possible to reset the color
schemes and font sets using the tix config command. Instead, the
tix_resetoptions command must be used.
"""
if newScmPrio is not None:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet, newScmPrio)
else:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet)
class Tk(Tkinter.Tk, tixCommand):
"""Toplevel widget of Tix which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
def __init__(self, screenName=None, baseName=None, className='Tix'):
Tkinter.Tk.__init__(self, screenName, baseName, className)
tixlib = os.environ.get('TIX_LIBRARY')
self.tk.eval('global auto_path; lappend auto_path [file dir [info nameof]]')
if tixlib is not None:
self.tk.eval('global auto_path; lappend auto_path {%s}' % tixlib)
self.tk.eval('global tcl_pkgPath; lappend tcl_pkgPath {%s}' % tixlib)
# Load Tix - this should work dynamically or statically
# If it's static, tcl/tix8.1/pkgIndex.tcl should have
# 'load {} Tix'
# If it's dynamic under Unix, tcl/tix8.1/pkgIndex.tcl should have
# 'load libtix8.1.8.3.so Tix'
self.tk.eval('package require Tix')
def destroy(self):
# For safety, remove the delete_window binding before destroy
self.protocol("WM_DELETE_WINDOW", "")
Tkinter.Tk.destroy(self)
# The Tix 'tixForm' geometry manager
class Form:
"""The Tix Form geometry manager
Widgets can be arranged by specifying attachments to other widgets.
See Tix documentation for complete details"""
def config(self, cnf={}, **kw):
self.tk.call('tixForm', self._w, *self._options(cnf, kw))
form = config
def __setitem__(self, key, value):
Form.form(self, {key: value})
def check(self):
return self.tk.call('tixForm', 'check', self._w)
def forget(self):
self.tk.call('tixForm', 'forget', self._w)
def grid(self, xsize=0, ysize=0):
if (not xsize) and (not ysize):
x = self.tk.call('tixForm', 'grid', self._w)
y = self.tk.splitlist(x)
z = ()
for x in y:
z = z + (self.tk.getint(x),)
return z
return self.tk.call('tixForm', 'grid', self._w, xsize, ysize)
def info(self, option=None):
if not option:
return self.tk.call('tixForm', 'info', self._w)
if option[0] != '-':
option = '-' + option
return self.tk.call('tixForm', 'info', self._w, option)
def slaves(self):
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'tixForm', 'slaves', self._w)))
Tkinter.Widget.__bases__ = Tkinter.Widget.__bases__ + (Form,)
class TixWidget(Tkinter.Widget):
"""A TixWidget class is used to package all (or most) Tix widgets.
Widget initialization is extended in two ways:
1) It is possible to give a list of options which must be part of
the creation command (so called Tix 'static' options). These cannot be
given as a 'config' command later.
2) It is possible to give the name of an existing TK widget. These are
child widgets created automatically by a Tix mega-widget. The Tk call
to create these widgets is therefore bypassed in TixWidget.__init__
Both options are for use by subclasses only.
"""
def __init__ (self, master=None, widgetName=None,
static_options=None, cnf={}, kw={}):
# Merge keywords and dictionary arguments
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
# Move static options into extra. static_options must be
# a list of keywords (or None).
extra=()
# 'options' is always a static option
if static_options:
static_options.append('options')
else:
static_options = ['options']
for k,v in cnf.items()[:]:
if k in static_options:
extra = extra + ('-' + k, v)
del cnf[k]
self.widgetName = widgetName
Widget._setup(self, master, cnf)
# If widgetName is None, this is a dummy creation call where the
# corresponding Tk widget has already been created by Tix
if widgetName:
self.tk.call(widgetName, self._w, *extra)
# Non-static options - to be done via a 'config' command
if cnf:
Widget.config(self, cnf)
# Dictionary to hold subwidget names for easier access. We can't
# use the children list because the public Tix names may not be the
# same as the pathname component
self.subwidget_list = {}
# We set up an attribute access function so that it is possible to
# do w.ok['text'] = 'Hello' rather than w.subwidget('ok')['text'] = 'Hello'
# when w is a StdButtonBox.
# We can even do w.ok.invoke() because w.ok is subclassed from the
# Button class if you go through the proper constructors
def __getattr__(self, name):
if name in self.subwidget_list:
return self.subwidget_list[name]
raise AttributeError, name
def set_silent(self, value):
"""Set a variable without calling its action routine"""
self.tk.call('tixSetSilent', self._w, value)
def subwidget(self, name):
"""Return the named subwidget (which must have been created by
the sub-class)."""
n = self._subwidget_name(name)
if not n:
raise TclError, "Subwidget " + name + " not child of " + self._name
# Remove header of name and leading dot
n = n[len(self._w)+1:]
return self._nametowidget(n)
def subwidgets_all(self):
"""Return all subwidgets."""
names = self._subwidget_names()
if not names:
return []
retlist = []
for name in names:
name = name[len(self._w)+1:]
try:
retlist.append(self._nametowidget(name))
except:
# some of the widgets are unknown e.g. border in LabelFrame
pass
return retlist
def _subwidget_name(self,name):
"""Get a subwidget name (returns a String, not a Widget !)"""
try:
return self.tk.call(self._w, 'subwidget', name)
except TclError:
return None
def _subwidget_names(self):
"""Return the name of all subwidgets."""
try:
x = self.tk.call(self._w, 'subwidgets', '-all')
return self.tk.splitlist(x)
except TclError:
return None
def config_all(self, option, value):
"""Set configuration options for all subwidgets (and self)."""
if option == '':
return
elif not isinstance(option, StringType):
option = repr(option)
if not isinstance(value, StringType):
value = repr(value)
names = self._subwidget_names()
for name in names:
self.tk.call(name, 'configure', '-' + option, value)
# These are missing from Tkinter
def image_create(self, imgtype, cnf={}, master=None, **kw):
if not master:
master = Tkinter._default_root
if not master:
raise RuntimeError, 'Too early to create image'
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
return master.tk.call(('image', 'create', imgtype,) + options)
def image_delete(self, imgname):
try:
self.tk.call('image', 'delete', imgname)
except TclError:
# May happen if the root was destroyed
pass
# Subwidgets are child widgets created automatically by mega-widgets.
# In python, we have to create these subwidgets manually to mirror their
# existence in Tk/Tix.
class TixSubWidget(TixWidget):
"""Subwidget class.
This is used to mirror child widgets automatically created
by Tix/Tk as part of a mega-widget in Python (which is not informed
of this)"""
def __init__(self, master, name,
destroy_physically=1, check_intermediate=1):
if check_intermediate:
path = master._subwidget_name(name)
try:
path = path[len(master._w)+1:]
plist = path.split('.')
except:
plist = []
if not check_intermediate:
# immediate descendant
TixWidget.__init__(self, master, None, None, {'name' : name})
else:
# Ensure that the intermediate widgets exist
parent = master
for i in range(len(plist) - 1):
n = '.'.join(plist[:i+1])
try:
w = master._nametowidget(n)
parent = w
except KeyError:
# Create the intermediate widget
parent = TixSubWidget(parent, plist[i],
destroy_physically=0,
check_intermediate=0)
# The Tk widget name is in plist, not in name
if plist:
name = plist[-1]
TixWidget.__init__(self, parent, None, None, {'name' : name})
self.destroy_physically = destroy_physically
def destroy(self):
# For some widgets e.g., a NoteBook, when we call destructors,
# we must be careful not to destroy the frame widget since this
# also destroys the parent NoteBook thus leading to an exception
# in Tkinter when it finally calls Tcl to destroy the NoteBook
for c in self.children.values(): c.destroy()
if self._name in self.master.children:
del self.master.children[self._name]
if self._name in self.master.subwidget_list:
del self.master.subwidget_list[self._name]
if self.destroy_physically:
# This is bypassed only for a few widgets
self.tk.call('destroy', self._w)
# Useful class to create a display style - later shared by many items.
# Contributed by Steffen Kremser
class DisplayStyle:
"""DisplayStyle - handle configuration options shared by
(multiple) Display Items"""
def __init__(self, itemtype, cnf={}, **kw):
master = _default_root # global from Tkinter
if not master and 'refwindow' in cnf: master=cnf['refwindow']
elif not master and 'refwindow' in kw: master= kw['refwindow']
elif not master: raise RuntimeError, "Too early to create display style: no root window"
self.tk = master.tk
self.stylename = self.tk.call('tixDisplayStyle', itemtype,
*self._options(cnf,kw) )
def __str__(self):
return self.stylename
def _options(self, cnf, kw):
if kw and cnf:
cnf = _cnfmerge((cnf, kw))
elif kw:
cnf = kw
opts = ()
for k, v in cnf.items():
opts = opts + ('-'+k, v)
return opts
def delete(self):
self.tk.call(self.stylename, 'delete')
def __setitem__(self,key,value):
self.tk.call(self.stylename, 'configure', '-%s'%key, value)
def config(self, cnf={}, **kw):
return self._getconfigure(
self.stylename, 'configure', *self._options(cnf,kw))
def __getitem__(self,key):
return self.tk.call(self.stylename, 'cget', '-%s'%key)
######################################################
### The Tix Widget classes - in alphabetical order ###
######################################################
class Balloon(TixWidget):
"""Balloon help widget.
Subwidget Class
--------- -----
label Label
message Message"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master=None, cnf={}, **kw):
# static seem to be -installcolormap -initwait -statusbar -cursor
static = ['options', 'installcolormap', 'initwait', 'statusbar',
'cursor']
TixWidget.__init__(self, master, 'tixBalloon', static, cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label',
destroy_physically=0)
self.subwidget_list['message'] = _dummyLabel(self, 'message',
destroy_physically=0)
def bind_widget(self, widget, cnf={}, **kw):
"""Bind balloon widget to another.
One balloon widget may be bound to several widgets at the same time"""
self.tk.call(self._w, 'bind', widget._w, *self._options(cnf, kw))
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
class ButtonBox(TixWidget):
"""ButtonBox - A container for pushbuttons.
Subwidgets are the buttons added with the add method.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixButtonBox',
['orientation', 'options'], cnf, kw)
def add(self, name, cnf={}, **kw):
"""Add a button with given name to box."""
btn = self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return btn
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class ComboBox(TixWidget):
"""ComboBox - an Entry field with a dropdown menu. The user can select a
choice by either typing in the entry subwidget or selecting from the
listbox subwidget.
Subwidget Class
--------- -----
entry Entry
arrow Button
slistbox ScrolledListBox
tick Button
cross Button : present if created with the fancy option"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixComboBox',
['editable', 'dropdown', 'fancy', 'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
# align
def add_history(self, str):
self.tk.call(self._w, 'addhistory', str)
def append_history(self, str):
self.tk.call(self._w, 'appendhistory', str)
def insert(self, index, str):
self.tk.call(self._w, 'insert', index, str)
def pick(self, index):
self.tk.call(self._w, 'pick', index)
class Control(TixWidget):
"""Control - An entry field with value change arrows. The user can
adjust the value by pressing the two arrow buttons or by entering
the value directly into the entry. The new value will be checked
against the user-defined upper and lower limits.
Subwidget Class
--------- -----
incr Button
decr Button
entry Entry
label Label"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixControl', ['options'], cnf, kw)
self.subwidget_list['incr'] = _dummyButton(self, 'incr')
self.subwidget_list['decr'] = _dummyButton(self, 'decr')
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def decrement(self):
self.tk.call(self._w, 'decr')
def increment(self):
self.tk.call(self._w, 'incr')
def invoke(self):
self.tk.call(self._w, 'invoke')
def update(self):
self.tk.call(self._w, 'update')
class DirList(TixWidget):
"""DirList - displays a list view of a directory, its previous
directories and its sub-directories. The user can choose one of
the directories displayed in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirList', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirTree(TixWidget):
"""DirTree - Directory Listing in a hierarchical view.
Displays a tree view of a directory, its previous directories and its
sub-directories. The user can choose one of the directories displayed
in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirTree', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirSelectBox(TixWidget):
"""DirSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class ExFileSelectBox(TixWidget):
"""ExFileSelectBox - MS Windows style file select box.
It provides a convenient method for the user to select files.
Subwidget Class
--------- -----
cancel Button
ok Button
hidden Checkbutton
types ComboBox
dir ComboBox
file ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
def filter(self):
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class DirSelectDialog(TixWidget):
"""The DirSelectDialog widget presents the directories in the file
system in a dialog window. The user can use this dialog window to
navigate through the file system to select the desired directory.
Subwidgets Class
---------- -----
dirbox DirSelectDialog"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectDialog',
['options'], cnf, kw)
self.subwidget_list['dirbox'] = _dummyDirSelectBox(self, 'dirbox')
# cancel and ok buttons are missing
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
# Should inherit from a Dialog class
class ExFileSelectDialog(TixWidget):
"""ExFileSelectDialog - MS Windows style file select dialog.
It provides a convenient method for the user to select files.
Subwidgets Class
---------- -----
fsbox ExFileSelectBox"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['fsbox'] = _dummyExFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileSelectBox(TixWidget):
"""ExFileSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
def apply_filter(self): # name of subwidget is same as command
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class FileSelectDialog(TixWidget):
"""FileSelectDialog - Motif style file select dialog.
Subwidgets Class
---------- -----
btns StdButtonBox
fsbox FileSelectBox"""
# FIXME: It should inherit -superclass tixStdDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['btns'] = _dummyStdButtonBox(self, 'btns')
self.subwidget_list['fsbox'] = _dummyFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileEntry(TixWidget):
"""FileEntry - Entry field with button that invokes a FileSelectDialog.
The user can type in the filename manually. Alternatively, the user can
press the button widget that sits next to the entry, which will bring
up a file selection dialog.
Subwidgets Class
---------- -----
button Button
entry Entry"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileEntry',
['dialogtype', 'options'], cnf, kw)
self.subwidget_list['button'] = _dummyButton(self, 'button')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def invoke(self):
self.tk.call(self._w, 'invoke')
def file_dialog(self):
# FIXME: return python object
pass
class HList(TixWidget, XView, YView):
"""HList - Hierarchy display widget can be used to display any data
that have a hierarchical structure, for example, file system directory
trees. The list entries are indented and connected by branch lines
according to their places in the hierarchy.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixHList',
['columns', 'options'], cnf, kw)
def add(self, entry, cnf={}, **kw):
return self.tk.call(self._w, 'add', entry, *self._options(cnf, kw))
def add_child(self, parent=None, cnf={}, **kw):
if not parent:
parent = ''
return self.tk.call(
self._w, 'addchild', parent, *self._options(cnf, kw))
def anchor_set(self, entry):
self.tk.call(self._w, 'anchor', 'set', entry)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def column_width(self, col=0, width=None, chars=None):
if not chars:
return self.tk.call(self._w, 'column', 'width', col, width)
else:
return self.tk.call(self._w, 'column', 'width', col,
'-char', chars)
def delete_all(self):
self.tk.call(self._w, 'delete', 'all')
def delete_entry(self, entry):
self.tk.call(self._w, 'delete', 'entry', entry)
def delete_offsprings(self, entry):
self.tk.call(self._w, 'delete', 'offsprings', entry)
def delete_siblings(self, entry):
self.tk.call(self._w, 'delete', 'siblings', entry)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def header_create(self, col, cnf={}, **kw):
self.tk.call(self._w, 'header', 'create', col, *self._options(cnf, kw))
def header_configure(self, col, cnf={}, **kw):
if cnf is None:
return self._getconfigure(self._w, 'header', 'configure', col)
self.tk.call(self._w, 'header', 'configure', col,
*self._options(cnf, kw))
def header_cget(self, col, opt):
return self.tk.call(self._w, 'header', 'cget', col, opt)
def header_exists(self, col):
return self.tk.call(self._w, 'header', 'exists', col)
def header_delete(self, col):
self.tk.call(self._w, 'header', 'delete', col)
def header_size(self, col):
return self.tk.call(self._w, 'header', 'size', col)
def hide_entry(self, entry):
self.tk.call(self._w, 'hide', 'entry', entry)
def indicator_create(self, entry, cnf={}, **kw):
self.tk.call(
self._w, 'indicator', 'create', entry, *self._options(cnf, kw))
def indicator_configure(self, entry, cnf={}, **kw):
if cnf is None:
return self._getconfigure(
self._w, 'indicator', 'configure', entry)
self.tk.call(
self._w, 'indicator', 'configure', entry, *self._options(cnf, kw))
def indicator_cget(self, entry, opt):
return self.tk.call(self._w, 'indicator', 'cget', entry, opt)
def indicator_exists(self, entry):
return self.tk.call (self._w, 'indicator', 'exists', entry)
def indicator_delete(self, entry):
self.tk.call(self._w, 'indicator', 'delete', entry)
def indicator_size(self, entry):
return self.tk.call(self._w, 'indicator', 'size', entry)
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_bbox(self, entry):
return self._getints(
self.tk.call(self._w, 'info', 'bbox', entry)) or None
def info_children(self, entry=None):
c = self.tk.call(self._w, 'info', 'children', entry)
return self.tk.splitlist(c)
def info_data(self, entry):
return self.tk.call(self._w, 'info', 'data', entry)
def info_dragsite(self):
return self.tk.call(self._w, 'info', 'dragsite')
def info_dropsite(self):
return self.tk.call(self._w, 'info', 'dropsite')
def info_exists(self, entry):
return self.tk.call(self._w, 'info', 'exists', entry)
def info_hidden(self, entry):
return self.tk.call(self._w, 'info', 'hidden', entry)
def info_next(self, entry):
return self.tk.call(self._w, 'info', 'next', entry)
def info_parent(self, entry):
return self.tk.call(self._w, 'info', 'parent', entry)
def info_prev(self, entry):
return self.tk.call(self._w, 'info', 'prev', entry)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def item_cget(self, entry, col, opt):
return self.tk.call(self._w, 'item', 'cget', entry, col, opt)
def item_configure(self, entry, col, cnf={}, **kw):
if cnf is None:
return self._getconfigure(self._w, 'item', 'configure', entry, col)
self.tk.call(self._w, 'item', 'configure', entry, col,
*self._options(cnf, kw))
def item_create(self, entry, col, cnf={}, **kw):
self.tk.call(
self._w, 'item', 'create', entry, col, *self._options(cnf, kw))
def item_exists(self, entry, col):
return self.tk.call(self._w, 'item', 'exists', entry, col)
def item_delete(self, entry, col):
self.tk.call(self._w, 'item', 'delete', entry, col)
def entrycget(self, entry, opt):
return self.tk.call(self._w, 'entrycget', entry, opt)
def entryconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return self._getconfigure(self._w, 'entryconfigure', entry)
self.tk.call(self._w, 'entryconfigure', entry,
*self._options(cnf, kw))
def nearest(self, y):
return self.tk.call(self._w, 'nearest', y)
def see(self, entry):
self.tk.call(self._w, 'see', entry)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, entry):
return self.tk.call(self._w, 'selection', 'includes', entry)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
def show_entry(self, entry):
return self.tk.call(self._w, 'show', 'entry', entry)
class InputOnly(TixWidget):
"""InputOnly - Invisible widget. Unix only.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixInputOnly', None, cnf, kw)
class LabelEntry(TixWidget):
"""LabelEntry - Entry field with label. Packages an entry widget
and a label into one mega widget. It can beused be used to simplify
the creation of ``entry-form'' type of interface.
Subwidgets Class
---------- -----
label Label
entry Entry"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelEntry',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
class LabelFrame(TixWidget):
"""LabelFrame - Labelled Frame container. Packages a frame widget
and a label into one mega widget. To create widgets inside a
LabelFrame widget, one creates the new widgets relative to the
frame subwidget and manage them inside the frame subwidget.
Subwidgets Class
---------- -----
label Label
frame Frame"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelFrame',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['frame'] = _dummyFrame(self, 'frame')
class ListNoteBook(TixWidget):
"""A ListNoteBook widget is very similar to the TixNoteBook widget:
it can be used to display many windows in a limited space using a
notebook metaphor. The notebook is divided into a stack of pages
(windows). At one time only one of these pages can be shown.
The user can navigate through these pages by
choosing the name of the desired page in the hlist subwidget."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixListNoteBook', ['options'], cnf, kw)
# Is this necessary? It's not an exposed subwidget in Tix.
self.subwidget_list['pane'] = _dummyPanedWindow(self, 'pane',
destroy_physically=0)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['shlist'] = _dummyScrolledHList(self, 'shlist')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
class Meter(TixWidget):
"""The Meter widget can be used to show the progress of a background
job which may take a long time to execute.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixMeter',
['options'], cnf, kw)
class NoteBook(TixWidget):
"""NoteBook - Multi-page container widget (tabbed notebook metaphor).
Subwidgets Class
---------- -----
nbframe NoteBookFrame
<pages> page widgets added dynamically with the add method"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self,master,'tixNoteBook', ['options'], cnf, kw)
self.subwidget_list['nbframe'] = TixSubWidget(self, 'nbframe',
destroy_physically=0)
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
def raised(self):
return self.tk.call(self._w, 'raised')
class NoteBookFrame(TixWidget):
# FIXME: This is dangerous to expose to be called on its own.
pass
class OptionMenu(TixWidget):
"""OptionMenu - creates a menu button of options.
Subwidget Class
--------- -----
menubutton Menubutton
menu Menu"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixOptionMenu',
['labelside', 'options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def add_command(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'command', name, *self._options(cnf, kw))
def add_separator(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'separator', name, *self._options(cnf, kw))
def delete(self, name):
self.tk.call(self._w, 'delete', name)
def disable(self, name):
self.tk.call(self._w, 'disable', name)
def enable(self, name):
self.tk.call(self._w, 'enable', name)
class PanedWindow(TixWidget):
"""PanedWindow - Multi-pane container widget
allows the user to interactively manipulate the sizes of several
panes. The panes can be arranged either vertically or horizontally.The
user changes the sizes of the panes by dragging the resize handle
between two panes.
Subwidgets Class
---------- -----
<panes> g/p widgets added dynamically with the add method."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPanedWindow', ['orientation', 'options'], cnf, kw)
# add delete forget panecget paneconfigure panes setsize
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name,
check_intermediate=0)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def forget(self, name):
self.tk.call(self._w, 'forget', name)
def panecget(self, entry, opt):
return self.tk.call(self._w, 'panecget', entry, opt)
def paneconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return self._getconfigure(self._w, 'paneconfigure', entry)
self.tk.call(self._w, 'paneconfigure', entry, *self._options(cnf, kw))
def panes(self):
names = self.tk.splitlist(self.tk.call(self._w, 'panes'))
return [self.subwidget(x) for x in names]
class PopupMenu(TixWidget):
"""PopupMenu widget can be used as a replacement of the tk_popup command.
The advantage of the Tix PopupMenu widget is it requires less application
code to manipulate.
Subwidgets Class
---------- -----
menubutton Menubutton
menu Menu"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPopupMenu', ['options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def bind_widget(self, widget):
self.tk.call(self._w, 'bind', widget._w)
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
def post_widget(self, widget, x, y):
self.tk.call(self._w, 'post', widget._w, x, y)
class ResizeHandle(TixWidget):
"""Internal widget to draw resize handles on Scrolled widgets."""
def __init__(self, master, cnf={}, **kw):
# There seems to be a Tix bug rejecting the configure method
# Let's try making the flags -static
flags = ['options', 'command', 'cursorfg', 'cursorbg',
'handlesize', 'hintcolor', 'hintwidth',
'x', 'y']
# In fact, x y height width are configurable
TixWidget.__init__(self, master, 'tixResizeHandle',
flags, cnf, kw)
def attach_widget(self, widget):
self.tk.call(self._w, 'attachwidget', widget._w)
def detach_widget(self, widget):
self.tk.call(self._w, 'detachwidget', widget._w)
def hide(self, widget):
self.tk.call(self._w, 'hide', widget._w)
def show(self, widget):
self.tk.call(self._w, 'show', widget._w)
class ScrolledHList(TixWidget):
"""ScrolledHList - HList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledHList', ['options'],
cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledListBox(TixWidget):
"""ScrolledListBox - Listbox with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledListBox', ['options'], cnf, kw)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledText(TixWidget):
"""ScrolledText - Text with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledText', ['options'], cnf, kw)
self.subwidget_list['text'] = _dummyText(self, 'text')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledTList(TixWidget):
"""ScrolledTList - TList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledTList', ['options'],
cnf, kw)
self.subwidget_list['tlist'] = _dummyTList(self, 'tlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledWindow(TixWidget):
"""ScrolledWindow - Window with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledWindow', ['options'], cnf, kw)
self.subwidget_list['window'] = _dummyFrame(self, 'window')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class Select(TixWidget):
"""Select - Container of button subwidgets. It can be used to provide
radio-box or check-box style of selection options for the user.
Subwidgets are buttons added dynamically using the add method."""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixSelect',
['allowzero', 'radio', 'orientation', 'labelside',
'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return self.subwidget_list[name]
def invoke(self, name):
self.tk.call(self._w, 'invoke', name)
class Shell(TixWidget):
"""Toplevel window.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixShell', ['options', 'title'], cnf, kw)
class DialogShell(TixWidget):
"""Toplevel window, with popup popdown and center methods.
It tells the window manager that it is a dialog window and should be
treated specially. The exact treatment depends on the treatment of
the window manager.
Subwidgets - None"""
# FIXME: It should inherit from Shell
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master,
'tixDialogShell',
['options', 'title', 'mapped',
'minheight', 'minwidth',
'parent', 'transient'], cnf, kw)
def popdown(self):
self.tk.call(self._w, 'popdown')
def popup(self):
self.tk.call(self._w, 'popup')
def center(self):
self.tk.call(self._w, 'center')
class StdButtonBox(TixWidget):
"""StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) """
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixStdButtonBox',
['orientation', 'options'], cnf, kw)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class TList(TixWidget, XView, YView):
"""TList - Hierarchy display widget which can be
used to display data in a tabular format. The list entries of a TList
widget are similar to the entries in the Tk listbox widget. The main
differences are (1) the TList widget can display the list entries in a
two dimensional format and (2) you can use graphical images as well as
multiple colors and fonts for the list entries.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTList', ['options'], cnf, kw)
def active_set(self, index):
self.tk.call(self._w, 'active', 'set', index)
def active_clear(self):
self.tk.call(self._w, 'active', 'clear')
def anchor_set(self, index):
self.tk.call(self._w, 'anchor', 'set', index)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def delete(self, from_, to=None):
self.tk.call(self._w, 'delete', from_, to)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def insert(self, index, cnf={}, **kw):
self.tk.call(self._w, 'insert', index, *self._options(cnf, kw))
def info_active(self):
return self.tk.call(self._w, 'info', 'active')
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_down(self, index):
return self.tk.call(self._w, 'info', 'down', index)
def info_left(self, index):
return self.tk.call(self._w, 'info', 'left', index)
def info_right(self, index):
return self.tk.call(self._w, 'info', 'right', index)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def info_size(self):
return self.tk.call(self._w, 'info', 'size')
def info_up(self, index):
return self.tk.call(self._w, 'info', 'up', index)
def nearest(self, x, y):
return self.tk.call(self._w, 'nearest', x, y)
def see(self, index):
self.tk.call(self._w, 'see', index)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, index):
return self.tk.call(self._w, 'selection', 'includes', index)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
class Tree(TixWidget):
"""Tree - The tixTree widget can be used to display hierarchical
data in a tree form. The user can adjust
the view of the tree by opening or closing parts of the tree."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTree',
['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def setmode(self, entrypath, mode='none'):
'''This command is used to indicate whether the entry given by
entryPath has children entries and whether the children are visible. mode
must be one of open, close or none. If mode is set to open, a (+)
indicator is drawn next to the entry. If mode is set to close, a (-)
indicator is drawn next to the entry. If mode is set to none, no
indicators will be drawn for this entry. The default mode is none. The
open mode indicates the entry has hidden children and this entry can be
opened by the user. The close mode indicates that all the children of the
entry are now visible and the entry can be closed by the user.'''
self.tk.call(self._w, 'setmode', entrypath, mode)
# Could try subclassing Tree for CheckList - would need another arg to init
class CheckList(TixWidget):
"""The CheckList widget
displays a list of items to be selected by the user. CheckList acts
similarly to the Tk checkbutton or radiobutton widgets, except it is
capable of handling many more items than checkbuttons or radiobuttons.
"""
# FIXME: It should inherit -superclass tixTree
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixCheckList',
['options', 'radio'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def getselection(self, mode='on'):
'''Returns a list of items whose status matches status. If status is
not specified, the list of items in the "on" status will be returned.
Mode can be on, off, default'''
c = self.tk.split(self.tk.call(self._w, 'getselection', mode))
return self.tk.splitlist(c)
def getstatus(self, entrypath):
'''Returns the current status of entryPath.'''
return self.tk.call(self._w, 'getstatus', entrypath)
def setstatus(self, entrypath, mode='on'):
'''Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.'''
self.tk.call(self._w, 'setstatus', entrypath, mode)
###########################################################################
### The subclassing below is used to instantiate the subwidgets in each ###
### mega widget. This allows us to access their methods directly. ###
###########################################################################
class _dummyButton(Button, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyCheckbutton(Checkbutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyEntry(Entry, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyFrame(Frame, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyLabel(Label, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyListbox(Listbox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenu(Menu, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenubutton(Menubutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrollbar(Scrollbar, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyText(Text, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledListBox(ScrolledListBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyHList(HList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledHList(ScrolledHList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyTList(TList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, ['fancy',destroy_physically])
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
#cross Button : present if created with the fancy option
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
class _dummyDirList(DirList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyDirSelectBox(DirSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class _dummyExFileSelectBox(ExFileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
class _dummyFileSelectBox(FileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
class _dummyFileComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dircbx'] = _dummyComboBox(self, 'dircbx')
class _dummyStdButtonBox(StdButtonBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
class _dummyNoteBookFrame(NoteBookFrame, TixSubWidget):
def __init__(self, master, name, destroy_physically=0):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyPanedWindow(PanedWindow, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
########################
### Utility Routines ###
########################
#mike Should tixDestroy be exposed as a wrapper? - but not for widgets.
def OptionName(widget):
'''Returns the qualified path name for the widget. Normally used to set
default options for subwidgets. See tixwidgets.py'''
return widget.tk.call('tixOptionName', widget._w)
# Called with a dictionary argument of the form
# {'*.c':'C source files', '*.txt':'Text Files', '*':'All files'}
# returns a string which can be used to configure the fsbox file types
# in an ExFileSelectBox. i.e.,
# '{{*} {* - All files}} {{*.c} {*.c - C source files}} {{*.txt} {*.txt - Text Files}}'
def FileTypeList(dict):
s = ''
for type in dict.keys():
s = s + '{{' + type + '} {' + type + ' - ' + dict[type] + '}} '
return s
# Still to be done:
# tixIconView
class CObjView(TixWidget):
"""This file implements the Canvas Object View widget. This is a base
class of IconView. It implements automatic placement/adjustment of the
scrollbars according to the canvas objects inside the canvas subwidget.
The scrollbars are adjusted so that the canvas is just large enough
to see all the objects.
"""
# FIXME: It should inherit -superclass tixScrolledWidget
pass
class Grid(TixWidget, XView, YView):
'''The Tix Grid command creates a new window and makes it into a
tixGrid widget. Additional options, may be specified on the command
line or in the option database to configure aspects such as its cursor
and relief.
A Grid widget displays its contents in a two dimensional grid of cells.
Each cell may contain one Tix display item, which may be in text,
graphics or other formats. See the DisplayStyle class for more information
about Tix display items. Individual cells, or groups of cells, can be
formatted with a wide range of attributes, such as its color, relief and
border.
Subwidgets - None'''
# valid specific resources as of Tk 8.4
# editdonecmd, editnotifycmd, floatingcols, floatingrows, formatcmd,
# highlightbackground, highlightcolor, leftmargin, itemtype, selectmode,
# selectunit, topmargin,
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixGrid', static, cnf, kw)
# valid options as of Tk 8.4
# anchor, bdtype, cget, configure, delete, dragsite, dropsite, entrycget,
# edit, entryconfigure, format, geometryinfo, info, index, move, nearest,
# selection, set, size, unset, xview, yview
def anchor_clear(self):
"""Removes the selection anchor."""
self.tk.call(self, 'anchor', 'clear')
def anchor_get(self):
"Get the (x,y) coordinate of the current anchor cell"
return self._getints(self.tk.call(self, 'anchor', 'get'))
def anchor_set(self, x, y):
"""Set the selection anchor to the cell at (x, y)."""
self.tk.call(self, 'anchor', 'set', x, y)
def delete_row(self, from_, to=None):
"""Delete rows between from_ and to inclusive.
If to is not provided, delete only row at from_"""
if to is None:
self.tk.call(self, 'delete', 'row', from_)
else:
self.tk.call(self, 'delete', 'row', from_, to)
def delete_column(self, from_, to=None):
"""Delete columns between from_ and to inclusive.
If to is not provided, delete only column at from_"""
if to is None:
self.tk.call(self, 'delete', 'column', from_)
else:
self.tk.call(self, 'delete', 'column', from_, to)
def edit_apply(self):
"""If any cell is being edited, de-highlight the cell and applies
the changes."""
self.tk.call(self, 'edit', 'apply')
def edit_set(self, x, y):
"""Highlights the cell at (x, y) for editing, if the -editnotify
command returns True for this cell."""
self.tk.call(self, 'edit', 'set', x, y)
def entrycget(self, x, y, option):
"Get the option value for cell at (x,y)"
if option and option[0] != '-':
option = '-' + option
return self.tk.call(self, 'entrycget', x, y, option)
def entryconfigure(self, x, y, cnf=None, **kw):
return self._configure(('entryconfigure', x, y), cnf, kw)
# def format
# def index
def info_exists(self, x, y):
"Return True if display item exists at (x,y)"
return self._getboolean(self.tk.call(self, 'info', 'exists', x, y))
def info_bbox(self, x, y):
# This seems to always return '', at least for 'text' displayitems
return self.tk.call(self, 'info', 'bbox', x, y)
def move_column(self, from_, to, offset):
"""Moves the range of columns from position FROM through TO by
the distance indicated by OFFSET. For example, move_column(2, 4, 1)
moves the columns 2,3,4 to columns 3,4,5."""
self.tk.call(self, 'move', 'column', from_, to, offset)
def move_row(self, from_, to, offset):
"""Moves the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5."""
self.tk.call(self, 'move', 'row', from_, to, offset)
def nearest(self, x, y):
"Return coordinate of cell nearest pixel coordinate (x,y)"
return self._getints(self.tk.call(self, 'nearest', x, y))
# def selection adjust
# def selection clear
# def selection includes
# def selection set
# def selection toggle
def set(self, x, y, itemtype=None, **kw):
args= self._options(self.cnf, kw)
if itemtype is not None:
args= ('-itemtype', itemtype) + args
self.tk.call(self, 'set', x, y, *args)
def size_column(self, index, **kw):
"""Queries or sets the size of the column given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given column.
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all columns.
When no option-value pair is given, this command returns a tuple
containing the current size setting of the given column. When
option-value pairs are given, the corresponding options of the
size setting of the given column are changed. Options may be one
of the follwing:
pad0 pixels
Specifies the paddings to the left of a column.
pad1 pixels
Specifies the paddings to the right of a column.
size val
Specifies the width of a column. Val may be:
"auto" -- the width of the column is set to the
width of the widest cell in the column;
a valid Tk screen distance unit;
or a real number following by the word chars
(e.g. 3.4chars) that sets the width of the column to the
given number of characters."""
return self.tk.split(self.tk.call(self._w, 'size', 'column', index,
*self._options({}, kw)))
def size_row(self, index, **kw):
"""Queries or sets the size of the row given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given row .
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all rows.
When no option-value pair is given, this command returns a list con-
taining the current size setting of the given row . When option-value
pairs are given, the corresponding options of the size setting of the
given row are changed. Options may be one of the follwing:
pad0 pixels
Specifies the paddings to the top of a row.
pad1 pixels
Specifies the paddings to the bottom of a row.
size val
Specifies the height of a row. Val may be:
"auto" -- the height of the row is set to the
height of the highest cell in the row;
a valid Tk screen distance unit;
or a real number following by the word chars
(e.g. 3.4chars) that sets the height of the row to the
given number of characters."""
return self.tk.split(self.tk.call(
self, 'size', 'row', index, *self._options({}, kw)))
def unset(self, x, y):
"""Clears the cell at (x, y) by removing its display item."""
self.tk.call(self._w, 'unset', x, y)
class ScrolledGrid(Grid):
'''Scrolled Grid widgets'''
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixScrolledGrid', static, cnf, kw)
| 39.624872
| 96
| 0.62333
|
2d4d7dd9b943d1b0639ceaab83e3882ad0815cd3
| 1,947
|
py
|
Python
|
examples/dhclient.py
|
li-ma/pyroute2
|
48b85e39d675c18c05eb209229db082316aa760a
|
[
"Apache-2.0"
] | null | null | null |
examples/dhclient.py
|
li-ma/pyroute2
|
48b85e39d675c18c05eb209229db082316aa760a
|
[
"Apache-2.0"
] | null | null | null |
examples/dhclient.py
|
li-ma/pyroute2
|
48b85e39d675c18c05eb209229db082316aa760a
|
[
"Apache-2.0"
] | null | null | null |
import sys
import select
from pprint import pprint
from pyroute2.dhcp import BOOTREQUEST
from pyroute2.dhcp import DHCPDISCOVER
from pyroute2.dhcp import DHCPOFFER
from pyroute2.dhcp import DHCPREQUEST
from pyroute2.dhcp import DHCPACK
from pyroute2.dhcp.dhcp4msg import dhcp4msg
from pyroute2.dhcp.dhcp4socket import DHCP4Socket
def req(s, poll, msg, expect):
do_req = True
xid = None
while True:
# get transaction id
if do_req:
xid = s.put(msg)['xid']
# wait for response
events = poll.poll(2)
for (fd, event) in events:
response = s.get()
if response['xid'] != xid:
do_req = False
continue
if response['options']['message_type'] != expect:
raise Exception("DHCP protocol error")
return response
do_req = True
def action(ifname):
s = DHCP4Socket(ifname)
poll = select.poll()
poll.register(s, select.POLLIN | select.POLLPRI)
# DISCOVER
discover = dhcp4msg({'op': BOOTREQUEST,
'chaddr': s.l2addr,
'options': {'message_type': DHCPDISCOVER,
'parameter_list': [1, 3, 6, 12, 15, 28]}})
reply = req(s, poll, discover, expect=DHCPOFFER)
# REQUEST
request = dhcp4msg({'op': BOOTREQUEST,
'chaddr': s.l2addr,
'options': {'message_type': DHCPREQUEST,
'requested_ip': reply['yiaddr'],
'server_id': reply['options']['server_id'],
'parameter_list': [1, 3, 6, 12, 15, 28]}})
reply = req(s, poll, request, expect=DHCPACK)
pprint(reply)
s.close()
return reply
if __name__ == '__main__':
if len(sys.argv) > 1:
ifname = sys.argv[1]
else:
ifname = 'eth0'
action(ifname)
| 29.953846
| 79
| 0.546482
|
bb3929a528dd9a8fe45a1509be0c380735c4e7e7
| 1,365
|
py
|
Python
|
youtubevidz/urls.py
|
MsNahid/Youtube-Hall
|
6a2c17801aac932020bc11ad66bb7a2f0af08c7f
|
[
"MIT"
] | null | null | null |
youtubevidz/urls.py
|
MsNahid/Youtube-Hall
|
6a2c17801aac932020bc11ad66bb7a2f0af08c7f
|
[
"MIT"
] | null | null | null |
youtubevidz/urls.py
|
MsNahid/Youtube-Hall
|
6a2c17801aac932020bc11ad66bb7a2f0af08c7f
|
[
"MIT"
] | null | null | null |
"""youtubevidz URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin, auth
from django.contrib.auth import views as auth_views
from django.urls import path
from halls import views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.home, name='home'),
# AUTH
path('signup', views.SignUp.as_view(), name='signup'),
path('login', auth_views.LoginView.as_view(), name='login'),
path('logout', auth_views.LogoutView.as_view(), name='logout'),
#Hall
path('halloffame/create', views.createHallsView.as_view(), name='create_hall'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 36.891892
| 84
| 0.698901
|
9b09c9131955fd799a3e07b80f9abc16795f5395
| 494
|
py
|
Python
|
pebble/PblCommand.py
|
Nikolabenakova90/libpebble
|
e935e9aa50fe7dde25bbee8cbe0e7606378edbb8
|
[
"MIT"
] | 167
|
2015-01-02T00:36:07.000Z
|
2021-07-08T00:20:10.000Z
|
pebble/PblCommand.py
|
Nikolabenakova90/libpebble
|
e935e9aa50fe7dde25bbee8cbe0e7606378edbb8
|
[
"MIT"
] | 7
|
2015-01-01T17:58:40.000Z
|
2022-02-04T01:57:38.000Z
|
pebble/PblCommand.py
|
Nikolabenakova90/libpebble
|
e935e9aa50fe7dde25bbee8cbe0e7606378edbb8
|
[
"MIT"
] | 34
|
2015-01-23T13:39:20.000Z
|
2022-01-26T10:23:31.000Z
|
import os
class PblCommand:
name = ''
help = ''
def run(args):
pass
def configure_subparser(self, parser):
parser.add_argument('--sdk', help='Path to Pebble SDK (ie: ~/pebble-dev/PebbleSDK-2.X/)')
def sdk_path(self, args):
"""
Tries to guess the location of the Pebble SDK
"""
if args.sdk:
return args.sdk
else:
return os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
| 22.454545
| 97
| 0.550607
|
f44140ad0121845e9c45499ceebd0c458e33344e
| 2,030
|
py
|
Python
|
demo/analysis.py
|
rintoj/ai
|
a9f4e9edca6ffcbcd79186f5a61a55dfdaeefedd
|
[
"MIT"
] | null | null | null |
demo/analysis.py
|
rintoj/ai
|
a9f4e9edca6ffcbcd79186f5a61a55dfdaeefedd
|
[
"MIT"
] | null | null | null |
demo/analysis.py
|
rintoj/ai
|
a9f4e9edca6ffcbcd79186f5a61a55dfdaeefedd
|
[
"MIT"
] | null | null | null |
# analysis.py
# -----------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
######################
# ANALYSIS QUESTIONS #
######################
# Set the given parameters to obtain the specified policies through
# value iteration.
def question2():
answerDiscount = 0.9
answerNoise = 0.0
return answerDiscount, answerNoise
def question3a():
answerDiscount = 0.3
answerNoise = 0.0
answerLivingReward = 0.0
return answerDiscount, answerNoise, answerLivingReward
# If not possible, return 'NOT POSSIBLE'
def question3b():
answerDiscount = 0.3
answerNoise = 0.1
answerLivingReward = -0.9
return answerDiscount, answerNoise, answerLivingReward
# If not possible, return 'NOT POSSIBLE'
def question3c():
answerDiscount = 0.9
answerNoise = 0.0
answerLivingReward = 0.0
return answerDiscount, answerNoise, answerLivingReward
# If not possible, return 'NOT POSSIBLE'
def question3d():
answerDiscount = 0.9
answerNoise = 0.1
answerLivingReward = 0.1
return answerDiscount, answerNoise, answerLivingReward
# If not possible, return 'NOT POSSIBLE'
def question3e():
answerDiscount = 0.0
answerNoise = 0.0
answerLivingReward = 0.1
return answerDiscount, answerNoise, answerLivingReward
# If not possible, return 'NOT POSSIBLE'
def question6():
answerEpsilon = 0.5
answerLearningRate = 0.5
return 'NOT POSSIBLE' #answerEpsilon, answerLearningRate
# If not possible, return 'NOT POSSIBLE'
if __name__ == '__main__':
print 'Answers to analysis questions:'
import analysis
for q in [q for q in dir(analysis) if q.startswith('question')]:
response = getattr(analysis, q)()
print ' Question %s:\t%s' % (q, str(response))
| 29.852941
| 78
| 0.718719
|
ce34e913ae10504c1ea08c6c105f04117de0cd7e
| 21
|
py
|
Python
|
aliyun-python-sdk-petadata/aliyunsdkpetadata/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-petadata/aliyunsdkpetadata/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-petadata/aliyunsdkpetadata/__init__.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
__version__ = '1.2.1'
| 21
| 21
| 0.666667
|
355d3cae6499b918ce903c3b334c51c30cd75486
| 2,667
|
py
|
Python
|
any/unit_test.py
|
assassinen/coursera_mfti_python
|
eee7b3c55256f391c1be32924fa1ad3364b307f2
|
[
"Apache-2.0"
] | null | null | null |
any/unit_test.py
|
assassinen/coursera_mfti_python
|
eee7b3c55256f391c1be32924fa1ad3364b307f2
|
[
"Apache-2.0"
] | null | null | null |
any/unit_test.py
|
assassinen/coursera_mfti_python
|
eee7b3c55256f391c1be32924fa1ad3364b307f2
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
import unittest
class Point(object):
def __init__(self, x, y):
self.x = float(x)
self.y = float(y)
def __str__(self):
return '({0}, {1})'.format(self.x, self.y)
def __eq__(self, other):
return True if ((self.x == other.x) and (self.y == other.y)) else False
def __ne__(self, other):
return True if ((self.x != other.x) or (self.y != other.y)) else False
class TestPoint(unittest.TestCase):
def setUp(self):
self.A = Point(5, 6)
self.B = Point(6, 10)
self.C = Point(5.0, 6.0)
self.D = Point(-5, -6)
def test_init(self):
print(self.A.x, self.A.y)
# print(self.assertEqual(self.A.x, self.A.y), (float(5), float(6)))
self.assertEqual((self.A.x, self.A.y), (float(5), float(6)), "Полученные значения не являются вещественными!!!")
self.assertEqual((self.B.x, self.B.y), (float(6), float(10)),
"Полученные значения не являются вещественными!!!")
self.assertEqual((self.C.x, self.C.y), (float(5), float(6)), "Полученные значения не являются вещественными!!!")
self.assertEqual((self.D.x, self.D.y), (float(-5), float(-6)),
"Полученные значения не являются вещественными!!!")
def test_str(self):
self.assertTrue(str(self.A) == "(5.0, 6.0)", "Неправильный вывод на экран!!!")
self.assertTrue(str(self.B) == "(6.0, 10.0)", "Неправильный вывод на экран!!!")
self.assertTrue(str(self.C) == "(5.0, 6.0)", "Неправильный вывод на экран!!!")
self.assertTrue(str(self.D) == "(-5.0, -6.0)", "Неправильный вывод на экран!!!")
def test_eq(self):
self.assertTrue(self.A == self.C,
"Данные две точки равны, а в результате тестирования, они оказались неравными!!!")
self.assertFalse(self.A == self.B,
"Данные две точки неравны, а в результате тестирования, они оказались равными!!!")
self.assertFalse(self.A == self.D,
"Данные две точки неравны, а в результате тестирования, они оказались равными!!!")
def test_ne(self):
self.assertFalse(self.A != self.C,
"Данные две точки равны, а в результате тестирования, они оказались неравными!!!")
self.assertTrue(self.A != self.B,
"Данные две точки неравны, а в результате тестирования, они оказались равными!!!")
self.assertTrue(self.A != self.D,
"Данные две точки неравны, а в результате тестирования, они оказались равными!!!")
if __name__ == '__main__':
unittest.main()
| 44.45
| 120
| 0.578178
|
1a299a0c8958629dd4047e98745410f5d66e4eb6
| 647
|
py
|
Python
|
var/spack/repos/builtin/packages/gmtsar/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 9
|
2018-04-18T07:51:40.000Z
|
2021-09-10T03:56:57.000Z
|
var/spack/repos/builtin/packages/gmtsar/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 907
|
2018-04-18T11:17:57.000Z
|
2022-03-31T13:20:25.000Z
|
var/spack/repos/builtin/packages/gmtsar/package.py
|
xiki-tempula/spack
|
9d66c05e93ab8a933fc59915040c0e0c86a4aac4
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 29
|
2018-11-05T16:14:23.000Z
|
2022-02-03T16:07:09.000Z
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Gmtsar(CMakePackage):
"""GMTSAR is an open source (GNU General Public License) InSAR processing
system designed for users familiar with Generic Mapping Tools (GMT).
"""
homepage = "https://topex.ucsd.edu/gmtsar/"
url = "https://elenacreinisch.com/gmtsar/GMTSAR-5.6.tar.gz"
version('5.6', sha256='0f7326f46aedf1e8e4dc80dd03f1ae8681f52a8253dc4a00a943aec14562994b')
depends_on('gmt')
| 30.809524
| 93
| 0.731066
|
7813e5421911d0e838934fd13a56c6b894b7db01
| 2,193
|
py
|
Python
|
ijal_interlinear/tests/test_DegenerateLine.py
|
davidjamesbeck/IJAL-interlinear
|
cb5dbb1d6aea98cce76668aa868a9189f31baf3f
|
[
"BSD-2-Clause"
] | null | null | null |
ijal_interlinear/tests/test_DegenerateLine.py
|
davidjamesbeck/IJAL-interlinear
|
cb5dbb1d6aea98cce76668aa868a9189f31baf3f
|
[
"BSD-2-Clause"
] | null | null | null |
ijal_interlinear/tests/test_DegenerateLine.py
|
davidjamesbeck/IJAL-interlinear
|
cb5dbb1d6aea98cce76668aa868a9189f31baf3f
|
[
"BSD-2-Clause"
] | null | null | null |
# test_CanonicalLine.py
#----------------------------------------------------------------------------------------------------
import re
import sys
sys.path.append("..")
from line import *
from degenerateLine import *
import importlib
pd.set_option('display.width', 1000)
#----------------------------------------------------------------------------------------------------
def runTests():
test_constructor()
test_toHTML()
def test_constructor():
"""
MonkeyAndThunder starts off with a few introductory lines in Spanish, with English translation.
No words, no glosses, just a line with time slots, and one child element, the free translation
"""
print("--- test_constructor")
filename = "../testData/monkeyAndThunder/AYA1_MonkeyandThunder.eaf"
doc = etree.parse(filename)
x0 = DegenerateLine(doc, 0)
assert(x0.getTierCount() == 2)
#print(x0.getTable())
def test_toHTML(displayPage=False):
"""
create a barebones webpage, and htmlDoc, then render a DegenerateLine into it
"""
print("--- test_toHTML")
filename = "../testData/monkeyAndThunder/AYA1_MonkeyandThunder.eaf"
doc = etree.parse(filename)
x0 = DegenerateLine(doc, 0)
htmlDoc = Doc()
htmlDoc.asis('<!DOCTYPE html>')
with htmlDoc.tag('html', lang="en"):
with htmlDoc.tag('head'):
htmlDoc.asis('<link rel="stylesheet" href="ijal.css">')
with htmlDoc.tag('body'):
x0.toHtml(htmlDoc)
htmlText = htmlDoc.getvalue()
assert(htmlText.find("Por ejemplo") > 100)
assert(htmlText.find("For example") > 200)
assert(htmlText.count("line-content") == 1)
assert(htmlText.count("speech-tier") == 1)
assert(htmlText.count("freeTranslation-tier") == 1)
# three divs only: line-content, speech-tier, freeTranslation-tier
assert(htmlText.count("<div class") == 3)
if(displayPage):
f = open("degenerate.html", "w")
f.write(indent(htmlText))
f.close()
os.system("open %s" % "degenerate.html")
#----------------------------------------------------------------------------------------------------
if __name__ == '__main__':
runTests()
| 32.731343
| 101
| 0.559964
|
912f88065dd6e7dab013c4056340790049150bf7
| 3,125
|
py
|
Python
|
minibot/scripts/SwarmMaster.py
|
cornell-cup/cs-minibot
|
bf44124f103570a9796430f2978b12dd2c0405c2
|
[
"Apache-2.0"
] | 9
|
2017-10-01T17:30:10.000Z
|
2020-07-16T04:32:37.000Z
|
minibot/scripts/SwarmMaster.py
|
cornell-cup/cs-minibot
|
bf44124f103570a9796430f2978b12dd2c0405c2
|
[
"Apache-2.0"
] | 53
|
2017-10-03T02:11:51.000Z
|
2018-03-25T01:56:30.000Z
|
minibot/scripts/SwarmMaster.py
|
cornell-cup/cs-minibot
|
bf44124f103570a9796430f2978b12dd2c0405c2
|
[
"Apache-2.0"
] | 2
|
2017-10-03T15:43:46.000Z
|
2018-03-17T19:25:36.000Z
|
from hardware.communication.ZMQ import ZMQExchange
from hardware.communication.TCP import TCP
from threading import Thread
from peripherals.colorsensor import ColorSensor
import time
threads = []
count = {"F":0,"B":0,"L":0,"R":0}
def run(bot):
# Sets up TCP connection between master and minions. Starts publisher-side
# connection.
# always set the mediator first
z = ZMQExchange()
z.setMediator()
z.setBroadcaster()
TCP.tcp.send_to_basestation("SwarmIP", z.getIP("wlan0"))
mediateThread = Thread(target=z.mediate)
mediateThread.start()
threads.append(mediateThread)
#echobot(bot,z)
colorbot(bot,z)
def colorbot(bot,z):
speed = 10
cs = bot.get_sensor_by_name("ColorSensor")
cs.calibrate()
pinkFirstTime = True
orangeFirstTime = True
try:
while(True):
c = cs.read_color()
if(c=="RED"):
# stop
msg = (0,0)
count["F"]=0
count["B"]=0
count["L"]=0
count["R"]=0
speed = 10
elif(c=="GREEN"):
# forwards
count["F"]+=1
count["B"]=0
count["L"]=0
count["R"]=0
speed = increment_speed("F",3,speed,15)
msg = (speed,speed)
elif(c=="BLUE"):
# backwards
count["F"]=0
count["B"]+=1
count["L"]=0
count["R"]=0
speed = increment_speed("B",3,speed,15)
msg = (-speed,-speed)
elif(c=="YELLOW"):
# turn left
count["F"]=0
count["B"]=0
count["L"]+=1
count["R"]=0
speed = increment_speed("L",3,speed,15)
msg = (-speed,speed)
elif(c=="VIOLET"):
# turn right
count["F"]=0
count["B"]=0
count["L"]=0
count["R"]+=1
speed = increment_speed("R",3,speed,15)
msg = (speed,-speed)
z.broadcast(msg)
time.sleep(0.2)
finally:
cleanup(z)
def increment_speed(direction, inc_time, speed, inc_amt):
""" Given a direction, increments the speed after inc_time amount of seconds by
inc_amt increase of power to the motors.
"""
if(count[direction]>(inc_time*5)):
count[direction] = 0
if(speed<50):
speed += inc_amt
print("Speed increased: " + str(speed))
return speed
def echobot(bot,z):
try:
while(True):
# msg is a tuple of left motor and right motor, respectively.
msg = bot.get_actuator_by_name("two_wheel_movement").get_value()
print("MSG: " + msg)
z.broadcast(msg)
time.sleep(0.1)
if not TCP.tcp.isConnected():
break
finally:
cleanup(z)
def cleanup(z):
for t in threads:
t.join(0.1)
z.stopZMQExchange()
| 27.654867
| 83
| 0.48992
|
90480542f7cd684726f7d1d3fdf0093cebf4f6f0
| 2,396
|
py
|
Python
|
setup_sub.py
|
shimwell/pyne
|
0c2714d7c0d1b5e20be6ae6527da2c660dd6b1b3
|
[
"MIT"
] | 182
|
2015-01-03T15:53:31.000Z
|
2022-03-22T16:23:18.000Z
|
setup_sub.py
|
shimwell/pyne
|
0c2714d7c0d1b5e20be6ae6527da2c660dd6b1b3
|
[
"MIT"
] | 781
|
2015-01-13T02:47:11.000Z
|
2022-03-22T17:29:29.000Z
|
setup_sub.py
|
shimwell/pyne
|
0c2714d7c0d1b5e20be6ae6527da2c660dd6b1b3
|
[
"MIT"
] | 153
|
2015-01-15T21:34:43.000Z
|
2021-12-21T22:19:38.000Z
|
#!/usr/bin/env python
"""Welcome to PyNE's setup.py sub script."""
from __future__ import print_function
import io
import os
import re
import sys
import imp
import shutil
import tarfile
import argparse
import platform
import warnings
import subprocess
from glob import glob
from distutils import core, dir_util, sysconfig
from contextlib import contextmanager
if sys.version_info[0] < 3:
from urllib import urlopen
else:
from urllib.request import urlopen
from distutils.core import setup
from pyne.pyne_version import PYNE_VERSION
IS_NT = os.name == 'nt'
def main():
scripts = [os.path.join('scripts', f) for f in os.listdir('scripts')]
scripts = [s for s in scripts if (os.name == 'nt' and s.endswith('.bat'))
or (os.name != 'nt' and
not s.endswith('.bat'))]
packages = ['pyne', 'pyne.dbgen', 'pyne.apigen', 'pyne.xs',
'pyne.transmute', 'pyne.gui', 'pyne.cli', 'pyne.fortranformat']
pack_dir = {
'pyne': 'pyne',
'pyne.xs': 'pyne/xs',
'pyne.gui': 'pyne/gui',
'pyne.cli': 'pyne/cli',
'pyne.dbgen': 'pyne/dbgen',
'pyne.apigen': 'pyne/apigen',
'pyne.transmute': 'pyne/transmute',
'pyne.fortranformat': 'pyne/fortranformat',
}
extpttn = ['*.dll', '*.so', '*.dylib', '*.pyd', '*.pyo']
pack_data = {
'lib': extpttn,
'pyne': ['*.pxd',
#'include/*.h', 'include/*.pxi', 'include/*/*.h',
#'include/*/*/*.h', 'include/*/*/*/*.h',
'*.json', '*.inp',
#'_includes/*.txt', '_includes/*.pxd', '_includes/*/*',
#'_includes/*/*/*'
] + extpttn,
'pyne.xs': ['*.pxd'] + extpttn,
'pyne.gui': ['*.pyw'],
'pyne.dbgen': ['*.html', '*.csv', 'abundances.txt', 'mass.mas16', '*.dat'],
}
setup_kwargs = {
"name": "pyne",
"version": PYNE_VERSION,
"description": 'The Nuclear Engineering Toolkit',
"author": 'PyNE Development Team',
"author_email": 'pyne-dev@googlegroups.com',
"url": 'http://pyne.github.com/',
"packages": packages,
"package_dir": pack_dir,
"package_data": pack_data,
"scripts": scripts,
}
rtn = setup(**setup_kwargs)
if __name__ == "__main__":
main()
| 30.329114
| 83
| 0.541319
|
50e53ac83012aefe97d8d210acbb50a56f6ff6c9
| 929
|
py
|
Python
|
mopidy_notify/__init__.py
|
phijor/mopidy-notify
|
22c6c00dc1f27ad71de1ea38d7973fdfc67331f1
|
[
"Apache-2.0"
] | null | null | null |
mopidy_notify/__init__.py
|
phijor/mopidy-notify
|
22c6c00dc1f27ad71de1ea38d7973fdfc67331f1
|
[
"Apache-2.0"
] | null | null | null |
mopidy_notify/__init__.py
|
phijor/mopidy-notify
|
22c6c00dc1f27ad71de1ea38d7973fdfc67331f1
|
[
"Apache-2.0"
] | null | null | null |
import logging
import pathlib
import pkg_resources
from mopidy import config, ext
__version__ = pkg_resources.get_distribution("Mopidy-Notify").version
# TODO: If you need to log, use loggers named after the current Python module
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = "Mopidy-Notify"
ext_name = "notify"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["max_icon_size"] = config.Integer(minimum=0)
schema["fallback_icon"] = config.Path()
schema["track_summary"] = config.String()
schema["track_message"] = config.String()
return schema
def setup(self, registry):
from .frontend import NotifyFrontend
registry.add("frontend", NotifyFrontend)
| 27.323529
| 77
| 0.700753
|
02fed1497694144a40d113915743bd61fa10ca91
| 528
|
py
|
Python
|
main.py
|
Tmw/edward
|
0a58022d0bbf1f80abecb880f7565acaa5cebfde
|
[
"MIT"
] | 20
|
2019-01-07T08:36:57.000Z
|
2021-06-15T09:21:37.000Z
|
main.py
|
Tmw/edward
|
0a58022d0bbf1f80abecb880f7565acaa5cebfde
|
[
"MIT"
] | 1
|
2019-01-17T12:34:29.000Z
|
2019-01-17T12:34:29.000Z
|
main.py
|
Tmw/edward
|
0a58022d0bbf1f80abecb880f7565acaa5cebfde
|
[
"MIT"
] | 2
|
2020-01-14T07:30:01.000Z
|
2020-03-03T17:13:16.000Z
|
from edward import Edward
import os
import signal
DEFAULT_MAX_THREADS = 2
def main():
token = os.getenv("SLACK_TOKEN")
threads = os.getenv("THREADS", DEFAULT_MAX_THREADS)
if token is None:
raise RuntimeError("SLACK_TOKEN not set")
edward = Edward(slack_token=token, max_threads=threads)
stopper = lambda *args: edward.stop()
signal.signal(signal.SIGINT, stopper)
signal.signal(signal.SIGTERM, stopper)
edward.start()
# Kick off main program
if __name__ == "__main__":
main()
| 19.555556
| 59
| 0.69697
|
b01cf0c5b06b89f848414853673af7d269bdd755
| 1,572
|
py
|
Python
|
catalyst/utils/tests/test_swa.py
|
and-kul/catalyst
|
51428d7756e62b9b8ee5379f38e9fd576eeb36e5
|
[
"Apache-2.0"
] | 2
|
2019-04-19T21:34:31.000Z
|
2019-05-02T22:50:25.000Z
|
catalyst/utils/tests/test_swa.py
|
and-kul/catalyst
|
51428d7756e62b9b8ee5379f38e9fd576eeb36e5
|
[
"Apache-2.0"
] | 1
|
2021-01-07T16:13:45.000Z
|
2021-01-21T09:27:54.000Z
|
catalyst/utils/tests/test_swa.py
|
and-kul/catalyst
|
51428d7756e62b9b8ee5379f38e9fd576eeb36e5
|
[
"Apache-2.0"
] | 1
|
2021-01-07T02:50:38.000Z
|
2021-01-07T02:50:38.000Z
|
import os
from pathlib import Path
import shutil
import unittest
import torch
import torch.nn as nn
from catalyst.utils.checkpoint import load_checkpoint
from catalyst.utils.swa import get_averaged_weights_by_path_mask
class Net(nn.Module):
"""Dummy network class."""
def __init__(self, init_weight=4):
"""Initialization of network and filling it with given numbers."""
super(Net, self).__init__()
self.fc = nn.Linear(2, 1)
self.fc.weight.data.fill_(init_weight)
self.fc.bias.data.fill_(init_weight)
class TestSwa(unittest.TestCase):
"""Test SWA class."""
def setUp(self):
"""Test set up."""
net1 = Net(init_weight=2.0)
net2 = Net(init_weight=5.0)
os.mkdir("./checkpoints")
torch.save(net1.state_dict(), "./checkpoints/net1.pth")
torch.save(net2.state_dict(), "./checkpoints/net2.pth")
def tearDown(self):
"""Test tear down."""
shutil.rmtree("./checkpoints")
def test_averaging(self):
"""Test SWA method."""
weights = get_averaged_weights_by_path_mask(
logdir=Path("./"), path_mask="net*"
)
torch.save(weights, str("./checkpoints/swa_weights.pth"))
model = Net()
model.load_state_dict(load_checkpoint("./checkpoints/swa_weights.pth"))
self.assertEqual(float(model.fc.weight.data[0][0]), 3.5)
self.assertEqual(float(model.fc.weight.data[0][1]), 3.5)
self.assertEqual(float(model.fc.bias.data[0]), 3.5)
if __name__ == "__main__":
unittest.main()
| 29.111111
| 79
| 0.641221
|
43e49d7bb7544f7f445d875a7b1e6f2c240920ce
| 1,429
|
py
|
Python
|
pgcli/key_bindings.py
|
czchen/debian-pgcli
|
67498d4e8f6d153de7f2f73380d2b749c550c247
|
[
"BSD-3-Clause"
] | null | null | null |
pgcli/key_bindings.py
|
czchen/debian-pgcli
|
67498d4e8f6d153de7f2f73380d2b749c550c247
|
[
"BSD-3-Clause"
] | null | null | null |
pgcli/key_bindings.py
|
czchen/debian-pgcli
|
67498d4e8f6d153de7f2f73380d2b749c550c247
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
from prompt_toolkit.keys import Keys
from prompt_toolkit.key_binding.manager import KeyBindingManager
_logger = logging.getLogger(__name__)
def pgcli_bindings(vi_mode=False):
"""
Custom key bindings for pgcli.
"""
key_binding_manager = KeyBindingManager(enable_vi_mode=vi_mode)
@key_binding_manager.registry.add_binding(Keys.F2)
def _(event):
"""
Enable/Disable SmartCompletion Mode.
"""
_logger.debug('Detected F2 key.')
buf = event.cli.current_buffer
buf.completer.smart_completion = not buf.completer.smart_completion
@key_binding_manager.registry.add_binding(Keys.F3)
def _(event):
"""
Enable/Disable Multiline Mode.
"""
_logger.debug('Detected F3 key.')
buf = event.cli.current_buffer
buf.always_multiline = not buf.always_multiline
@key_binding_manager.registry.add_binding(Keys.F4)
def _(event):
"""
Toggle between Vi and Emacs mode.
"""
_logger.debug('Detected F4 key.')
key_binding_manager.enable_vi_mode = not key_binding_manager.enable_vi_mode
@key_binding_manager.registry.add_binding(Keys.ControlSpace)
def _(event):
"""
Force autocompletion at cursor.
"""
_logger.debug('Detected <C-Space> key.')
event.cli.current_buffer.complete_next()
return key_binding_manager
| 29.770833
| 83
| 0.673898
|
e950bda2fd349c9e08d0660e9fb68760bcc37beb
| 13,626
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/network/v20180701/network_interface.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/network/v20180701/network_interface.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/network/v20180701/network_interface.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['NetworkInterface']
class NetworkInterface(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dns_settings: Optional[pulumi.Input[pulumi.InputType['NetworkInterfaceDnsSettingsArgs']]] = None,
enable_accelerated_networking: Optional[pulumi.Input[bool]] = None,
enable_ip_forwarding: Optional[pulumi.Input[bool]] = None,
etag: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkInterfaceIPConfigurationArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
mac_address: Optional[pulumi.Input[str]] = None,
network_interface_name: Optional[pulumi.Input[str]] = None,
network_security_group: Optional[pulumi.Input[pulumi.InputType['NetworkSecurityGroupArgs']]] = None,
primary: Optional[pulumi.Input[bool]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machine: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A network interface in a resource group.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['NetworkInterfaceDnsSettingsArgs']] dns_settings: The DNS settings in network interface.
:param pulumi.Input[bool] enable_accelerated_networking: If the network interface is accelerated networking enabled.
:param pulumi.Input[bool] enable_ip_forwarding: Indicates whether IP forwarding is enabled on this network interface.
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkInterfaceIPConfigurationArgs']]]] ip_configurations: A list of IPConfigurations of the network interface.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] mac_address: The MAC address of the network interface.
:param pulumi.Input[str] network_interface_name: The name of the network interface.
:param pulumi.Input[pulumi.InputType['NetworkSecurityGroupArgs']] network_security_group: The reference of the NetworkSecurityGroup resource.
:param pulumi.Input[bool] primary: Gets whether this is a primary network interface on a virtual machine.
:param pulumi.Input[str] provisioning_state: The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] resource_guid: The resource GUID property of the network interface resource.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[pulumi.InputType['SubResourceArgs']] virtual_machine: The reference of a virtual machine.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['dns_settings'] = dns_settings
__props__['enable_accelerated_networking'] = enable_accelerated_networking
__props__['enable_ip_forwarding'] = enable_ip_forwarding
__props__['etag'] = etag
__props__['id'] = id
__props__['ip_configurations'] = ip_configurations
__props__['location'] = location
__props__['mac_address'] = mac_address
__props__['network_interface_name'] = network_interface_name
__props__['network_security_group'] = network_security_group
__props__['primary'] = primary
__props__['provisioning_state'] = provisioning_state
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['resource_guid'] = resource_guid
__props__['tags'] = tags
__props__['virtual_machine'] = virtual_machine
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/latest:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20150615:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20160330:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20160601:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20160901:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20161201:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20170301:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20170601:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20170801:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20170901:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20171001:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20171101:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20180101:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20180201:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20180401:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20180601:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20180801:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20181001:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20181101:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20181201:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20190201:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20190401:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20190601:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20190701:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20190801:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20190901:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20191101:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20191201:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20200301:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20200401:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20200501:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20200601:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20200701:NetworkInterface"), pulumi.Alias(type_="azure-nextgen:network/v20200801:NetworkInterface")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(NetworkInterface, __self__).__init__(
'azure-nextgen:network/v20180701:NetworkInterface',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'NetworkInterface':
"""
Get an existing NetworkInterface resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return NetworkInterface(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dnsSettings")
def dns_settings(self) -> pulumi.Output[Optional['outputs.NetworkInterfaceDnsSettingsResponse']]:
"""
The DNS settings in network interface.
"""
return pulumi.get(self, "dns_settings")
@property
@pulumi.getter(name="enableAcceleratedNetworking")
def enable_accelerated_networking(self) -> pulumi.Output[Optional[bool]]:
"""
If the network interface is accelerated networking enabled.
"""
return pulumi.get(self, "enable_accelerated_networking")
@property
@pulumi.getter(name="enableIPForwarding")
def enable_ip_forwarding(self) -> pulumi.Output[Optional[bool]]:
"""
Indicates whether IP forwarding is enabled on this network interface.
"""
return pulumi.get(self, "enable_ip_forwarding")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> pulumi.Output[Optional[Sequence['outputs.NetworkInterfaceIPConfigurationResponse']]]:
"""
A list of IPConfigurations of the network interface.
"""
return pulumi.get(self, "ip_configurations")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> pulumi.Output[Optional[str]]:
"""
The MAC address of the network interface.
"""
return pulumi.get(self, "mac_address")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkSecurityGroup")
def network_security_group(self) -> pulumi.Output[Optional['outputs.NetworkSecurityGroupResponse']]:
"""
The reference of the NetworkSecurityGroup resource.
"""
return pulumi.get(self, "network_security_group")
@property
@pulumi.getter
def primary(self) -> pulumi.Output[Optional[bool]]:
"""
Gets whether this is a primary network interface on a virtual machine.
"""
return pulumi.get(self, "primary")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> pulumi.Output[Optional[str]]:
"""
The resource GUID property of the network interface resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="virtualMachine")
def virtual_machine(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]:
"""
The reference of a virtual machine.
"""
return pulumi.get(self, "virtual_machine")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 54.286853
| 2,639
| 0.690445
|
3a31e8259e821dac3c003fb6f4d1178dc29b1349
| 28,226
|
py
|
Python
|
lib/python3.8/site-packages/ansible_collections/cisco/meraki/plugins/modules/meraki_mr_rf_profile.py
|
cjsteel/python3-venv-ansible-2.10.5
|
c95395c4cae844dc66fddde9b4343966f4b2ecd5
|
[
"Apache-1.1"
] | null | null | null |
lib/python3.8/site-packages/ansible_collections/cisco/meraki/plugins/modules/meraki_mr_rf_profile.py
|
cjsteel/python3-venv-ansible-2.10.5
|
c95395c4cae844dc66fddde9b4343966f4b2ecd5
|
[
"Apache-1.1"
] | null | null | null |
lib/python3.8/site-packages/ansible_collections/cisco/meraki/plugins/modules/meraki_mr_rf_profile.py
|
cjsteel/python3-venv-ansible-2.10.5
|
c95395c4cae844dc66fddde9b4343966f4b2ecd5
|
[
"Apache-1.1"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Kevin Breit (@kbreit) <kevin.breit@kevinbreit.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: meraki_mr_rf_profile
short_description: Manage RF profiles for Meraki wireless networks
description:
- Allows for configuration of radio frequency (RF) profiles in Meraki MR wireless networks.
options:
state:
description:
- Query, edit, or delete wireless RF profile settings.
type: str
choices: [ present, query, absent]
default: present
net_name:
description:
- Name of network.
type: str
net_id:
description:
- ID of network.
type: str
profile_id:
description:
- Unique identifier of existing RF profile.
type: str
aliases: [ id ]
band_selection_type:
description:
- Sets whether band selection is assigned per access point or SSID.
- This param is required on creation.
choices: [ ssid, ap ]
type: str
min_bitrate_type:
description:
- Type of minimum bitrate.
choices: [ band, ssid ]
type: str
name:
description:
- The unique name of the new profile.
- This param is required on creation.
type: str
client_balancing_enabled:
description:
- Steers client to best available access point.
type: bool
ap_band_settings:
description:
- Settings that will be enabled if selectionType is set to 'ap'.
type: dict
suboptions:
mode:
description:
- Sets which RF band the AP will support.
choices: [ 2.4ghz, 5ghz, dual ]
aliases: [ band_operation_mode ]
type: str
band_steering_enabled:
description:
- Steers client to most open band.
type: bool
five_ghz_settings:
description:
- Settings related to 5Ghz band.
type: dict
suboptions:
max_power:
description:
- Sets max power (dBm) of 5Ghz band.
- Can be integer between 8 and 30.
type: int
min_power:
description:
- Sets minmimum power (dBm) of 5Ghz band.
- Can be integer between 8 and 30.
type: int
min_bitrate:
description:
- Sets minimum bitrate (Mbps) of 5Ghz band.
choices: [ 6, 9, 12, 18, 24, 36, 48, 54 ]
type: int
rxsop:
description:
- The RX-SOP level controls the sensitivity of the radio.
- It is strongly recommended to use RX-SOP only after consulting a wireless expert.
- RX-SOP can be configured in the range of -65 to -95 (dBm).
type: int
channel_width:
description:
- Sets channel width (MHz) for 5Ghz band.
choices: [ auto, 20, 40, 80 ]
type: str
valid_auto_channels:
description:
- Sets valid auto channels for 5Ghz band.
type: list
elements: int
choices: [36,
40,
44,
48,
52,
56,
60,
64,
100,
104,
108,
112,
116,
120,
124,
128,
132,
136,
140,
144,
149,
153,
157,
161,
165]
two_four_ghz_settings:
description:
- Settings related to 2.4Ghz band
type: dict
suboptions:
max_power:
description:
- Sets max power (dBm) of 2.4Ghz band.
- Can be integer between 5 and 30.
type: int
min_power:
description:
- Sets minmimum power (dBm) of 2.4Ghz band.
- Can be integer between 5 and 30.
type: int
min_bitrate:
description:
- Sets minimum bitrate (Mbps) of 2.4Ghz band.
choices: [ 1, 2, 5.5, 6, 9, 11, 12, 18, 24, 36, 48, 54 ]
type: float
rxsop:
description:
- The RX-SOP level controls the sensitivity of the radio.
- It is strongly recommended to use RX-SOP only after consulting a wireless expert.
- RX-SOP can be configured in the range of -65 to -95 (dBm).
type: int
ax_enabled:
description:
- Determines whether ax radio on 2.4Ghz band is on or off.
type: bool
valid_auto_channels:
description:
- Sets valid auto channels for 2.4Ghz band.
choices: [ 1, 6, 11 ]
type: list
elements: int
author:
- Kevin Breit (@kbreit)
extends_documentation_fragment: cisco.meraki.meraki
'''
EXAMPLES = r'''
- name: Create RF profile in check mode
meraki_mr_rf_profile:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: present
name: Test Profile
band_selection_type: ap
client_balancing_enabled: True
ap_band_settings:
mode: dual
band_steering_enabled: true
five_ghz_settings:
max_power: 10
min_bitrate: 12
min_power: 8
rxsop: -65
channel_width: 20
valid_auto_channels:
- 36
- 40
- 44
two_four_ghz_settings:
max_power: 10
min_bitrate: 12
min_power: 8
rxsop: -65
ax_enabled: false
valid_auto_channels:
- 1
delegate_to: localhost
- name: Query all RF profiles
meraki_mr_rf_profile:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: query
delegate_to: localhost
- name: Query one RF profile by ID
meraki_mr_rf_profile:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: query
profile_id: '{{ profile_id }}'
delegate_to: localhost
- name: Update profile
meraki_mr_rf_profile:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: present
profile_id: 12345
band_selection_type: ap
client_balancing_enabled: True
ap_band_settings:
mode: dual
band_steering_enabled: true
five_ghz_settings:
max_power: 10
min_bitrate: 12
min_power: 8
rxsop: -65
channel_width: 20
valid_auto_channels:
- 36
- 44
two_four_ghz_settings:
max_power: 10
min_bitrate: 12
min_power: 8
rxsop: -75
ax_enabled: false
valid_auto_channels:
- 1
delegate_to: localhost
- name: Delete RF profile
meraki_mr_rf_profile:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: absent
profile_id: 12345
delegate_to: localhost
'''
RETURN = r'''
data:
description: List of wireless RF profile settings.
returned: success
type: complex
contains:
id:
description:
- Unique identifier of existing RF profile.
type: str
returned: success
sample: 12345
band_selection_type:
description:
- Sets whether band selection is assigned per access point or SSID.
- This param is required on creation.
type: str
returned: success
sample: ap
min_bitrate_type:
description:
- Type of minimum bitrate.
type: str
returned: success
sample: ssid
name:
description:
- The unique name of the new profile.
- This param is required on creation.
type: str
returned: success
sample: Guest RF profile
client_balancing_enabled:
description:
- Steers client to best available access point.
type: bool
returned: success
sample: true
ap_band_settings:
description:
- Settings that will be enabled if selectionType is set to 'ap'.
type: complex
returned: success
contains:
mode:
description:
- Sets which RF band the AP will support.
type: str
returned: success
sample: dual
band_steering_enabled:
description:
- Steers client to most open band.
type: bool
returned: success
sample: true
five_ghz_settings:
description:
- Settings related to 5Ghz band.
type: complex
returned: success
contains:
max_power:
description:
- Sets max power (dBm) of 5Ghz band.
- Can be integer between 8 and 30.
type: int
returned: success
sample: 12
min_power:
description:
- Sets minmimum power (dBm) of 5Ghz band.
- Can be integer between 8 and 30.
type: int
returned: success
sample: 12
min_bitrate:
description:
- Sets minimum bitrate (Mbps) of 5Ghz band.
type: int
returned: success
sample: 6
rxsop:
description:
- The RX-SOP level controls the sensitivity of the radio.
type: int
returned: success
sample: -70
channel_width:
description:
- Sets channel width (MHz) for 5Ghz band.
type: str
returned: success
sample: auto
valid_auto_channels:
description:
- Sets valid auto channels for 5Ghz band.
type: list
returned: success
two_four_ghz_settings:
description:
- Settings related to 2.4Ghz band
type: complex
returned: success
contains:
max_power:
description:
- Sets max power (dBm) of 2.4Ghz band.
type: int
returned: success
sample: 12
min_power:
description:
- Sets minmimum power (dBm) of 2.4Ghz band.
type: int
returned: success
sample: 12
min_bitrate:
description:
- Sets minimum bitrate (Mbps) of 2.4Ghz band.
type: float
returned: success
sample: 5.5
rxsop:
description:
- The RX-SOP level controls the sensitivity of the radio.
type: int
returned: success
sample: -70
ax_enabled:
description:
- Determines whether ax radio on 2.4Ghz band is on or off.
type: bool
returned: success
sample: true
valid_auto_channels:
description:
- Sets valid auto channels for 2.4Ghz band.
type: list
returned: success
sample: 6
'''
from ansible.module_utils.basic import AnsibleModule, json
from ansible.module_utils.common.dict_transformations import snake_dict_to_camel_dict
from ansible_collections.cisco.meraki.plugins.module_utils.network.meraki.meraki import MerakiModule, meraki_argument_spec
from re import sub
def get_profile(meraki, profiles, name):
for profile in profiles:
if profile['name'] == name:
return profile
return None
def construct_payload(meraki):
payload = {}
if meraki.params['name'] is not None:
payload['name'] = meraki.params['name']
if meraki.params['band_selection_type'] is not None:
payload['bandSelectionType'] = meraki.params['band_selection_type']
if meraki.params['min_bitrate_type'] is not None:
payload['minBitrateType'] = meraki.params['min_bitrate_type']
if meraki.params['client_balancing_enabled'] is not None:
payload['clientBalancingEnabled'] = meraki.params['client_balancing_enabled']
if meraki.params['ap_band_settings'] is not None:
payload['apBandSettings'] = {}
if meraki.params['ap_band_settings']['mode'] is not None:
payload['apBandSettings']['bandOperationMode'] = meraki.params['ap_band_settings']['mode']
if meraki.params['ap_band_settings']['band_steering_enabled'] is not None:
payload['apBandSettings']['bandSteeringEnabled'] = meraki.params['ap_band_settings']['band_steering_enabled']
if meraki.params['five_ghz_settings'] is not None:
payload['fiveGhzSettings'] = {}
if meraki.params['five_ghz_settings']['max_power'] is not None:
payload['fiveGhzSettings']['maxPower'] = meraki.params['five_ghz_settings']['max_power']
if meraki.params['five_ghz_settings']['min_bitrate'] is not None:
payload['fiveGhzSettings']['minBitrate'] = meraki.params['five_ghz_settings']['min_bitrate']
if meraki.params['five_ghz_settings']['min_power'] is not None:
payload['fiveGhzSettings']['minPower'] = meraki.params['five_ghz_settings']['min_power']
if meraki.params['five_ghz_settings']['rxsop'] is not None:
payload['fiveGhzSettings']['rxsop'] = meraki.params['five_ghz_settings']['rxsop']
if meraki.params['five_ghz_settings']['channel_width'] is not None:
payload['fiveGhzSettings']['channelWidth'] = meraki.params['five_ghz_settings']['channel_width']
if meraki.params['five_ghz_settings']['valid_auto_channels'] is not None:
payload['fiveGhzSettings']['validAutoChannels'] = meraki.params['five_ghz_settings']['valid_auto_channels']
if meraki.params['two_four_ghz_settings'] is not None:
payload['twoFourGhzSettings'] = {}
if meraki.params['two_four_ghz_settings']['max_power'] is not None:
payload['twoFourGhzSettings']['maxPower'] = meraki.params['two_four_ghz_settings']['max_power']
if meraki.params['two_four_ghz_settings']['min_bitrate'] is not None:
payload['twoFourGhzSettings']['minBitrate'] = meraki.params['two_four_ghz_settings']['min_bitrate']
if meraki.params['two_four_ghz_settings']['min_power'] is not None:
payload['twoFourGhzSettings']['minPower'] = meraki.params['two_four_ghz_settings']['min_power']
if meraki.params['two_four_ghz_settings']['rxsop'] is not None:
payload['twoFourGhzSettings']['rxsop'] = meraki.params['two_four_ghz_settings']['rxsop']
if meraki.params['two_four_ghz_settings']['ax_enabled'] is not None:
payload['twoFourGhzSettings']['axEnabled'] = meraki.params['two_four_ghz_settings']['ax_enabled']
if meraki.params['two_four_ghz_settings']['valid_auto_channels'] is not None:
payload['twoFourGhzSettings']['validAutoChannels'] = meraki.params['two_four_ghz_settings']['valid_auto_channels']
return payload
def main():
# define the available arguments/parameters that a user can pass to
# the module
band_arg_spec = dict(mode=dict(type='str', aliases=['band_operation_mode'], choices=['2.4ghz', '5ghz', 'dual']),
band_steering_enabled=dict(type='bool'),
)
five_arg_spec = dict(max_power=dict(type='int'),
min_bitrate=dict(type='int', choices=[6, 9, 12, 18, 24, 36, 48, 54]),
min_power=dict(type='int'),
rxsop=dict(type='int'),
channel_width=dict(type='str', choices=['auto', '20', '40', '80']),
valid_auto_channels=dict(type='list', elements='int', choices=[36,
40,
44,
48,
52,
56,
60,
64,
100,
104,
108,
112,
116,
120,
124,
128,
132,
136,
140,
144,
149,
153,
157,
161,
165]),
)
two_arg_spec = dict(max_power=dict(type='int'),
min_bitrate=dict(type='float', choices=[1,
2,
5.5,
6,
9,
11,
12,
18,
24,
36,
48,
54]),
min_power=dict(type='int'),
rxsop=dict(type='int'),
ax_enabled=dict(type='bool'),
valid_auto_channels=dict(type='list', elements='int', choices=[1, 6, 11]),
)
argument_spec = meraki_argument_spec()
argument_spec.update(state=dict(type='str', choices=['present', 'query', 'absent'], default='present'),
org_name=dict(type='str', aliases=['organization']),
org_id=dict(type='str'),
net_name=dict(type='str'),
net_id=dict(type='str'),
profile_id=dict(type='str', aliases=['id']),
band_selection_type=dict(type='str', choices=['ssid', 'ap']),
min_bitrate_type=dict(type='str', choices=['band', 'ssid']),
name=dict(type='str'),
client_balancing_enabled=dict(type='bool'),
ap_band_settings=dict(type='dict', options=band_arg_spec),
five_ghz_settings=dict(type='dict', options=five_arg_spec),
two_four_ghz_settings=dict(type='dict', options=two_arg_spec),
)
# the AnsibleModule object will be our abstraction working with Ansible
# this includes instantiation, a couple of common attr would be the
# args/params passed to the execution, as well as if the module
# supports check mode
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
)
meraki = MerakiModule(module, function='mr_rf_profile')
meraki.params['follow_redirects'] = 'all'
query_all_urls = {'mr_rf_profile': '/networks/{net_id}/wireless/rfProfiles'}
query_urls = {'mr_rf_profile': '/networks/{net_id}/wireless/rfProfiles/{profile_id}'}
create_urls = {'mr_rf_profile': '/networks/{net_id}/wireless/rfProfiles'}
update_urls = {'mr_rf_profile': '/networks/{net_id}/wireless/rfProfiles/{profile_id}'}
delete_urls = {'mr_rf_profile': '/networks/{net_id}/wireless/rfProfiles/{profile_id}'}
meraki.url_catalog['get_all'].update(query_all_urls)
meraki.url_catalog['get_one'].update(query_urls)
meraki.url_catalog['create'] = create_urls
meraki.url_catalog['update'] = update_urls
meraki.url_catalog['delete'] = delete_urls
if meraki.params['five_ghz_settings'] is not None:
if meraki.params['five_ghz_settings']['max_power'] is not None:
if meraki.params['five_ghz_settings']['max_power'] < 8 or meraki.params['five_ghz_settings']['max_power'] > 30:
meraki.fail_json(msg="5ghz max power must be between 8 and 30.")
if meraki.params['five_ghz_settings']['min_power'] is not None:
if meraki.params['five_ghz_settings']['min_power'] < 8 or meraki.params['five_ghz_settings']['min_power'] > 30:
meraki.fail_json(msg="5ghz min power must be between 8 and 30.")
if meraki.params['five_ghz_settings']['rxsop'] is not None:
if meraki.params['five_ghz_settings']['rxsop'] < -95 or meraki.params['five_ghz_settings']['rxsop'] > -65:
meraki.fail_json(msg="5ghz min power must be between 8 and 30.")
if meraki.params['two_four_ghz_settings'] is not None:
if meraki.params['two_four_ghz_settings']['max_power'] is not None:
if meraki.params['two_four_ghz_settings']['max_power'] < 5 or meraki.params['two_four_ghz_settings']['max_power'] > 30:
meraki.fail_json(msg="5ghz max power must be between 5 and 30.")
if meraki.params['two_four_ghz_settings']['min_power'] is not None:
if meraki.params['two_four_ghz_settings']['min_power'] < 5 or meraki.params['two_four_ghz_settings']['min_power'] > 30:
meraki.fail_json(msg="5ghz min power must be between 5 and 30.")
if meraki.params['two_four_ghz_settings']['rxsop'] is not None:
if meraki.params['two_four_ghz_settings']['rxsop'] < -95 or meraki.params['two_four_ghz_settings']['rxsop'] > -65:
meraki.fail_json(msg="5ghz min power must be between 8 and 30.")
org_id = meraki.params['org_id']
net_id = meraki.params['net_id']
profile_id = meraki.params['profile_id']
profile = None
profiles = None
if org_id is None:
org_id = meraki.get_org_id(meraki.params['org_name'])
if net_id is None:
nets = meraki.get_nets(org_id=org_id)
net_id = meraki.get_net_id(org_id, meraki.params['net_name'], data=nets)
if profile_id is None:
path = meraki.construct_path('get_all', net_id=net_id)
profiles = meraki.request(path, method='GET')
profile = get_profile(meraki, profiles, meraki.params['name'])
if meraki.params['state'] == 'query':
if profile_id is not None:
path = meraki.construct_path('get_one', net_id=net_id, custom={'profile_id': profile_id})
result = meraki.request(path, method='GET')
meraki.result['data'] = result
meraki.exit_json(**meraki.result)
if profiles is None:
path = meraki.construct_path('get_all', net_id=net_id)
profiles = meraki.request(path, method='GET')
meraki.result['data'] = profiles
meraki.exit_json(**meraki.result)
elif meraki.params['state'] == 'present':
payload = construct_payload(meraki)
if profile_id is None: # Create a new RF profile
if meraki.check_mode is True:
meraki.result['data'] = payload
meraki.result['changed'] = True
meraki.exit_json(**meraki.result)
path = meraki.construct_path('create', net_id=net_id)
response = meraki.request(path, method='POST', payload=json.dumps(payload))
meraki.result['data'] = response
meraki.result['changed'] = True
meraki.exit_json(**meraki.result)
else:
path = meraki.construct_path('get_one', net_id=net_id, custom={'profile_id': profile_id})
original = meraki.request(path, method='GET')
if meraki.is_update_required(original, payload) is True:
if meraki.check_mode is True:
meraki.result['data'] = payload
meraki.result['changed'] = True
meraki.exit_json(**meraki.result)
path = meraki.construct_path('update', net_id=net_id, custom={'profile_id': profile_id})
response = meraki.request(path, method='PUT', payload=json.dumps(payload))
meraki.result['data'] = response
meraki.result['changed'] = True
meraki.exit_json(**meraki.result)
else:
meraki.result['data'] = original
meraki.exit_json(**meraki.result)
elif meraki.params['state'] == 'absent':
if meraki.check_mode is True:
meraki.result['data'] = {}
meraki.result['changed'] = True
meraki.exit_json(**meraki.result)
path = meraki.construct_path('delete', net_id=net_id, custom={'profile_id': profile_id})
response = meraki.request(path, method='DELETE')
meraki.result['data'] = {}
meraki.result['changed'] = True
meraki.exit_json(**meraki.result)
# in the event of a successful module execution, you will want to
# simple AnsibleModule.exit_json(), passing the key/value results
meraki.exit_json(**meraki.result)
if __name__ == '__main__':
main()
| 42.509036
| 131
| 0.501346
|
4495b6fc53b4473f1c062d55368705173714b041
| 17,198
|
py
|
Python
|
src/azul/service/responseobjects/cart_item_manager.py
|
VIIgit/azul
|
bb61965f625c667979a2f255f6bc39dcafaaf40b
|
[
"Apache-2.0"
] | null | null | null |
src/azul/service/responseobjects/cart_item_manager.py
|
VIIgit/azul
|
bb61965f625c667979a2f255f6bc39dcafaaf40b
|
[
"Apache-2.0"
] | null | null | null |
src/azul/service/responseobjects/cart_item_manager.py
|
VIIgit/azul
|
bb61965f625c667979a2f255f6bc39dcafaaf40b
|
[
"Apache-2.0"
] | null | null | null |
import base64
import hashlib
import json
import logging
import uuid
from azul import config
from azul.es import ESClientFactory
from azul.service.responseobjects.dynamo_data_access import DynamoDataAccessor
from azul.service.responseobjects.elastic_request_builder import ElasticTransformDump
from azul.service.step_function_helper import StepFunctionHelper
from azul.service.user_service import UserService, UpdateError
logger = logging.getLogger(__name__)
class CartItemManager:
"""
Helper functions to handle read/write/update of carts and cart items
"""
step_function_helper = StepFunctionHelper()
def __init__(self):
self.dynamo_accessor = DynamoDataAccessor()
self.user_service = UserService()
@staticmethod
def encode_params(params):
return base64.urlsafe_b64encode(bytes(json.dumps(params), encoding='utf-8')).decode('utf-8')
@staticmethod
def decode_token(token):
return json.loads(base64.urlsafe_b64decode(token).decode('utf-8'))
@staticmethod
def convert_resume_token_to_exclusive_start_key(resume_token:str):
if resume_token is None:
return None
return json.loads(base64.b64decode(resume_token).decode('utf-8'))
@staticmethod
def convert_last_evaluated_key_to_resume_token(last_evaluated_key):
if last_evaluated_key is None:
return None
return base64.b64encode(json.dumps(last_evaluated_key).encode('utf-8')).decode('utf-8')
def create_cart(self, user_id:str, cart_name:str, default:bool) -> str:
"""
Add a cart to the cart table and return the ID of the created cart
An error will be raised if the user already has a cart of the same name or
if a default cart is being created while one already exists.
"""
query_dict = {'UserId': user_id, 'CartName': cart_name}
if self.dynamo_accessor.count(table_name=config.dynamo_cart_table_name,
key_conditions=query_dict,
index_name='UserCartNameIndex') > 0:
raise DuplicateItemError(f'Cart `{cart_name}` already exists')
cart_id = str(uuid.uuid4())
if default:
try:
self.user_service.update(user_id, default_cart_id=cart_id)
except UpdateError:
# As DynamoDB client doesn't differentiate errors caused by
# failing the key condition ("Key") or the condition expression
# ("ConditionExpression"). The method will attempt to update
# the user object again by ensuring that the user object exists
# before the update.
self.user_service.get_or_create(user_id)
try:
self.user_service.update(user_id, default_cart_id=cart_id)
except UpdateError:
# At this point, the user already has a default cart.
return self.get_default_cart(user_id)['CartId']
self.dynamo_accessor.insert_item(config.dynamo_cart_table_name,
item={'CartId': cart_id, **query_dict})
return cart_id
def get_cart(self, user_id, cart_id):
cart = self.dynamo_accessor.get_item(config.dynamo_cart_table_name,
keys={'UserId': user_id, 'CartId': cart_id})
if cart is None:
raise ResourceAccessError('Cart does not exist')
return cart
def get_default_cart(self, user_id):
user = self.user_service.get_or_create(user_id)
if user['DefaultCartId'] is None:
raise ResourceAccessError('Cart does not exist')
cart = self.dynamo_accessor.get_item(config.dynamo_cart_table_name,
keys={'UserId': user_id, 'CartId': user['DefaultCartId']})
if cart is None:
raise ResourceAccessError('Cart does not exist')
return cart
def get_or_create_default_cart(self, user_id):
user = self.user_service.get_or_create(user_id)
cart_id = user['DefaultCartId'] or self.create_cart(user_id, 'Default Cart', default=True)
return self.dynamo_accessor.get_item(config.dynamo_cart_table_name,
keys={'UserId': user_id, 'CartId': cart_id})
def get_user_carts(self, user_id):
return list(self.dynamo_accessor.query(table_name=config.dynamo_cart_table_name,
key_conditions={'UserId': user_id},
index_name='UserIndex'))
def delete_cart(self, user_id, cart_id):
default_cart_id = self.user_service.get_or_create(user_id)['DefaultCartId']
if default_cart_id == cart_id:
self.user_service.update(user_id, default_cart_id=None)
self.dynamo_accessor.delete_by_key(config.dynamo_cart_item_table_name,
{'CartId': cart_id})
return self.dynamo_accessor.delete_item(config.dynamo_cart_table_name,
{'UserId': user_id, 'CartId': cart_id})
def update_cart(self, user_id, cart_id, update_attributes, validate_attributes=True):
"""
Update the attributes of a cart and return the updated item
Only accepted attributes will be updated and any others will be ignored
"""
if cart_id is None:
cart = self.get_or_create_default_cart(user_id)
else:
cart = self.get_cart(user_id, cart_id)
real_cart_id = cart['CartId']
if validate_attributes:
accepted_attributes = {'CartName', 'Description'}
for key in list(update_attributes.keys()):
if key not in accepted_attributes:
del update_attributes[key]
if 'CartName' in update_attributes.keys():
matching_carts = list(self.dynamo_accessor.query(table_name=config.dynamo_cart_table_name,
key_conditions={
'UserId': user_id,
'CartName': update_attributes['CartName']
},
index_name='UserCartNameIndex'))
# There cannot be more than one matching cart because of the index's keys
if len(matching_carts) > 0 and matching_carts[0]['CartId'] != real_cart_id:
raise DuplicateItemError(f'Cart `{update_attributes["CartName"]}` already exists')
return self.dynamo_accessor.update_item(config.dynamo_cart_table_name,
{'UserId': user_id, 'CartId': real_cart_id},
update_values=update_attributes)
def create_cart_item_id(self, cart_id, entity_id, entity_type, bundle_uuid, bundle_version):
return hashlib.sha256(f'{cart_id}/{entity_id}/{bundle_uuid}/{bundle_version}/{entity_type}'.encode('utf-8')).hexdigest()
def add_cart_item(self, user_id, cart_id, entity_id, entity_type, entity_version):
"""
Add an item to a cart and return the created item ID
An error will be raised if the cart does not exist or does not belong to the user
"""
# TODO: Cart item should have some user readable name
if cart_id is None:
cart = self.get_or_create_default_cart(user_id)
else:
cart = self.get_cart(user_id, cart_id)
real_cart_id = cart['CartId']
if not entity_version:
# When entity_version is not given, this method will check the data integrity and retrieve the version.
entity = ESClientFactory.get().get(index=config.es_index_name(entity_type, True),
id=entity_id,
_source=True,
_source_include=['contents.files.uuid', # data file UUID
'contents.files.version', # data file version
'contents.projects.document_id', # metadata file UUID
'contents.samples.document_id', # metadata file UUID
]
)['_source']
normalized_entity = self.extract_entity_info(entity_type, entity)
entity_version = normalized_entity['version']
new_item = self.transform_entity_to_cart_item(real_cart_id, entity_type, entity_id, entity_version)
self.dynamo_accessor.insert_item(config.dynamo_cart_item_table_name, new_item)
return new_item['CartItemId']
@staticmethod
def extract_entity_info(entity_type:str, entity):
normalized_entity = dict(uuid=None, version=None)
content = entity['contents'][entity_type][0]
if entity_type == 'files':
normalized_entity.update(dict(uuid=content['uuid'],
version=content['version']))
elif entity_type in ('samples', 'projects'):
print(content)
normalized_entity['uuid'] = content['document_id']
else:
raise ValueError('entity_type must be one of files, samples, or projects')
return normalized_entity
@staticmethod
def transform_entity_to_cart_item(cart_id:str, entity_type:str, entity_id:str, entity_version:str):
return {
'CartItemId': f'{entity_id}:{entity_version or ""}', # Range Key
'CartId': cart_id, # Hash Key
'EntityId': entity_id,
'EntityVersion': entity_version,
'EntityType': entity_type
}
def get_cart_items(self, user_id, cart_id):
"""
Get all items in a cart
An error will be raised if the cart does not exist or does not belong to the user
"""
if cart_id is None:
cart = self.get_or_create_default_cart(user_id)
else:
cart = self.get_cart(user_id, cart_id)
real_cart_id = cart['CartId']
return list(self.dynamo_accessor.query(table_name=config.dynamo_cart_item_table_name,
key_conditions={'CartId': real_cart_id}))
def get_cart_item_count(self, user_id, cart_id):
if cart_id is None:
cart = self.get_or_create_default_cart(user_id)
else:
cart = self.get_cart(user_id, cart_id)
real_cart_id = cart['CartId']
return self.dynamo_accessor.count(table_name=config.dynamo_cart_item_table_name,
key_conditions={'CartId': real_cart_id},
select=['EntityType'])
def get_paginable_cart_items(self, user_id, cart_id, page_size:int=20, exclusive_start_key=None, resume_token=None):
"""
Get cart items (with pagination).
:param user_id: User ID
:param cart_id: Cart ID (UUID)
:param page_size: Requested Query Limit
:param exclusive_start_key: the exclusive start key (like an offset in
MySQL), recommended for in-code operations
:param resume_token: the base64-encoded string of exclusive_start_key
recommended for using with external clients
:return: Return a dictionary of search result with ``items`` (cart
items), ``last_evaluated_key`` (last evaluated key, null if
it is the last page), ``resume_token`` (the base64-encoded
string of ``last_evaluated_key``) and ``page_length`` (the
returning page size)
The ``page_length`` attribute in the returning dictionary is designed
to provide the actual number of returned items as DynamoDB may return
less than what the client asks because of the the maximum size of 1 MB
for query. See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html.
``exclusive_start_key`` and ``resume_token`` must not be defined at
the same time. Otherwise, the method will throw ``ValueError`.
"""
if exclusive_start_key and resume_token:
raise ValueError('exclusive_start_key or resume_token must be defined at the same time.')
if resume_token is not None:
exclusive_start_key = self.convert_resume_token_to_exclusive_start_key(resume_token)
if cart_id is None:
cart = self.get_or_create_default_cart(user_id)
else:
cart = self.get_cart(user_id, cart_id)
real_cart_id = cart['CartId']
page_query = dict(
table_name=config.dynamo_cart_item_table_name,
key_conditions={'CartId': real_cart_id},
exclusive_start_key=exclusive_start_key,
select=['CartItemId',
'EntityId',
'EntityVersion',
'EntityType'],
limit=page_size
)
page = next(self.dynamo_accessor.make_query(**page_query))
items = [item for item in page.items]
last_evaluated_key = page.last_evaluated_key
return dict(items=items,
last_evaluated_key=last_evaluated_key,
resume_token=self.convert_last_evaluated_key_to_resume_token(last_evaluated_key),
page_length=len(items))
def delete_cart_item(self, user_id, cart_id, item_id):
"""
Delete an item from a cart and return the deleted item if it exists, None otherwise
An error will be raised if the cart does not exist or does not belong to the user
"""
if cart_id is None:
cart = self.get_or_create_default_cart(user_id)
else:
cart = self.get_cart(user_id, cart_id)
real_cart_id = cart['CartId']
return self.dynamo_accessor.delete_item(config.dynamo_cart_item_table_name,
keys={'CartId': real_cart_id, 'CartItemId': item_id})
def transform_hit_to_cart_item(self, hit, entity_type, cart_id):
"""
Transform a hit from ES to the schema for the cart item table
"""
entity = self.extract_entity_info(entity_type, hit)
return self.transform_entity_to_cart_item(cart_id, entity_type, entity['uuid'], entity['version'])
def start_batch_cart_item_write(self, user_id, cart_id, entity_type, filters, item_count, batch_size):
"""
Trigger the job that will write the cart items and return a token to be used to check the job status
"""
if cart_id is None:
cart = self.get_or_create_default_cart(user_id)
else:
cart = self.get_cart(user_id, cart_id)
real_cart_id = cart['CartId']
execution_id = str(uuid.uuid4())
execution_input = {
'filters': filters,
'entity_type': entity_type,
'cart_id': real_cart_id,
'item_count': item_count,
'batch_size': batch_size
}
self.step_function_helper.start_execution(config.cart_item_state_machine_name,
execution_name=execution_id,
execution_input=execution_input)
return self.encode_params({'execution_id': execution_id})
def get_batch_cart_item_write_status(self, token):
params = self.decode_token(token)
execution_id = params['execution_id']
return self.step_function_helper.describe_execution(config.cart_item_state_machine_name, execution_id)['status']
def write_cart_item_batch(self, entity_type, filters, cart_id, batch_size, search_after):
"""
Query ES for one page of items matching the entity type and filters and return
the number of items written and the search_after for the next page
"""
es_td = ElasticTransformDump()
hits, next_search_after = es_td.transform_cart_item_request(entity_type=entity_type,
filters=filters,
search_after=search_after,
size=batch_size)
self.dynamo_accessor.batch_write(config.dynamo_cart_item_table_name,
[self.transform_hit_to_cart_item(hit, entity_type, cart_id) for hit in hits])
return len(hits), next_search_after
class ResourceAccessError(Exception):
def __init__(self, msg):
self.msg = msg
class DuplicateItemError(Exception):
def __init__(self, msg):
self.msg = msg
| 49.705202
| 128
| 0.603152
|
fb58a82b8623838488aabb614315603fe0024ba4
| 1,661
|
py
|
Python
|
simpledecorators/Async.py
|
shaddyx/simpleDecorators
|
4d4b042c956a3b6d11a03937e44ce89c4d7fc4ea
|
[
"MIT"
] | 1
|
2016-10-15T19:03:03.000Z
|
2016-10-15T19:03:03.000Z
|
simpledecorators/Async.py
|
shaddyx/simpleDecorators
|
4d4b042c956a3b6d11a03937e44ce89c4d7fc4ea
|
[
"MIT"
] | null | null | null |
simpledecorators/Async.py
|
shaddyx/simpleDecorators
|
4d4b042c956a3b6d11a03937e44ce89c4d7fc4ea
|
[
"MIT"
] | null | null | null |
from functools import wraps
from threading import Thread
class AsyncFuture(object):
complete=False
working=False
error=None
result=None
def Async(executor=None):
"""
@type executor: simpledecorators.ThreadPool
@rtype: AsyncFuture
"""
def asyncDecorator (func):
@wraps(func)
def wrapped(*args, **kwargs):
future = AsyncFuture();
def threadWrapper():
future.working = True
try:
future.result=func(*args, **kwargs)
future.complete = True
except Exception as e:
future.error=True
finally:
future.working=False
if not executor:
thread = Thread(target=threadWrapper)
thread.daemon = True
thread.start()
else:
executor.add_task(threadWrapper)
return future
return wrapped
return asyncDecorator
if __name__ == "__main__":
from time import sleep
from ThreadPool import *
try:
xrange
except NameError:
xrange = range
class TestClass():
@Async()
def testDecorated(self):
print (345)
testClass = TestClass()
testClass.testDecorated()
@Async(executor=ThreadPool(5))
def func(a, b):
print ("func called")
sleep(1)
print ("func exit:" + str(a))
@Async()
def funcWithoutExecutor(a):
print (a)
for x in xrange(1, 10):
funcWithoutExecutor("noExecutor:" + str(x))
for x in xrange(1, 15):
func(x, 2)
| 25.553846
| 55
| 0.539434
|
0ca36eb996eb76aac9ad93040d269f55631a4c65
| 2,301
|
py
|
Python
|
tests/common/markers.py
|
ravi-mosaicml/ravi-composer
|
d100053198524672f628c3959a8c4e51a9302e2d
|
[
"Apache-2.0"
] | 1
|
2021-11-09T22:58:46.000Z
|
2021-11-09T22:58:46.000Z
|
tests/common/markers.py
|
ravi-mosaicml/ravi-composer
|
d100053198524672f628c3959a8c4e51a9302e2d
|
[
"Apache-2.0"
] | null | null | null |
tests/common/markers.py
|
ravi-mosaicml/ravi-composer
|
d100053198524672f628c3959a8c4e51a9302e2d
|
[
"Apache-2.0"
] | null | null | null |
"""Pytest marker helpers."""
from typing import Callable
import pytest
from composer.core import Precision
def device(*args, precision=False):
"""Decorator for device and optionally precision.
Input choices are ('cpu', 'gpu'), or if precision=True,
also accept ('gpu-amp', 'gpu-fp32', and 'cpu-fp32').
Returns the parameter "device", or if precision=True,
also returns the parameter "precision".
"""
# convert cpu-fp32 and gpu-fp32 to cpu, gpu
if not precision and any(['-' in arg for arg in args]):
raise ValueError('-fp32 and -amp tags must be removed if precision=False')
args = [arg.replace('-fp32', '') for arg in args]
if precision:
devices = {
'cpu': pytest.param('cpu', Precision.FP32, id="cpu-fp32"),
'gpu': pytest.param('gpu', Precision.FP32, id="gpu-fp32", marks=pytest.mark.gpu),
'gpu-amp': pytest.param('gpu', Precision.AMP, id='gpu-amp', marks=pytest.mark.gpu)
}
name = "device,precision"
else:
devices = {
'cpu': pytest.param('cpu', id="cpu"),
'gpu': pytest.param('gpu', id="gpu", marks=pytest.mark.gpu),
}
name = "device"
parameters = [devices[arg] for arg in args]
def decorator(test):
if not parameters:
return test
return pytest.mark.parametrize(name, parameters)(test)
return decorator
def world_size(*world_sizes: int, param_name: str = "world_size"):
"""Decorator to mark tests with a given world size. This helper automatically sets the `pytest.mark.world_size`
marker.
Args:
world_sizes (int): The world sizes.
param_name (str, optional): The parameter name for the `world_size` parameter. Defaults to ``'world_size'``.
Example:
>>> @world_size(1, 2)
def test_something(world_size: int):
...
"""
parameters = []
for world_size in world_sizes:
if world_size == 1:
parameters.append(pytest.param(1))
else:
parameters.append(pytest.param(2, marks=pytest.mark.world_size(2)))
def decorator(test: Callable):
if len(parameters) == 0:
return test
return pytest.mark.parametrize(param_name, parameters)(test)
return decorator
| 30.276316
| 116
| 0.617123
|
80a7973dfc29efa502748396e20a92b6a34bc74b
| 12,215
|
py
|
Python
|
plotSpeed.py
|
dib-lab/2020-paper-mqf-benchmarks
|
29245836d142b4912c120f3e3899042e972e959c
|
[
"BSD-3-Clause"
] | 1
|
2020-07-15T20:27:53.000Z
|
2020-07-15T20:27:53.000Z
|
plotSpeed.py
|
dib-lab/2020-paper-mqf-benchmarks
|
29245836d142b4912c120f3e3899042e972e959c
|
[
"BSD-3-Clause"
] | null | null | null |
plotSpeed.py
|
dib-lab/2020-paper-mqf-benchmarks
|
29245836d142b4912c120f3e3899042e972e959c
|
[
"BSD-3-Clause"
] | 1
|
2021-03-22T01:09:08.000Z
|
2021-03-22T01:09:08.000Z
|
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.patches import Patch
import sys
inputFile=open(sys.argv[1]).readlines()
opType=sys.argv[2]
inputFile=[list(map(float,x.split("\t"))) for x in inputFile ]
def prepare(arr2):
arr=arr2.copy()
bottoms=[]
for l in arr:
bottoms.append([0]*len(l))
for i in range(len(arr[0])):
tmp=sorted(range(len(arr)),key=lambda x:arr[x][i])
for k in range(1,4):
for j in range(k):
arr[tmp[k]][i]-=arr[tmp[j]][i]
bottoms[tmp[k]][i]+=arr[tmp[j]][i]
return (arr,bottoms)
mqf_res=prepare([inputFile[0],inputFile[1],inputFile[2],inputFile[3]])
# mqf_fpr_0_01 = np.array(inputFile[0])
# mqf_fpr_0_001 = np.array(inputFile[1])
# mqf_fpr_0_0001 = np.array(inputFile[2])
# mqf_fpr_0_001-=mqf_fpr_0_0001
# mqf_fpr_0_01-=mqf_fpr_0_001+mqf_fpr_0_0001
cqf_res=prepare([inputFile[4],inputFile[5],inputFile[6],inputFile[7]])
# cqf_fpr_0_01 = np.array(inputFile[3])
# cqf_fpr_0_001 = np.array(inputFile[4])
# cqf_fpr_0_0001 = np.array(inputFile[5])
# cqf_fpr_0_001-=cqf_fpr_0_0001
# cqf_fpr_0_01-=cqf_fpr_0_001+cqf_fpr_0_0001
bmqf_res=prepare([inputFile[8],inputFile[9],inputFile[10],inputFile[11]])
# bmqf_fpr_0_01 = np.array(inputFile[6])
# bmqf_fpr_0_001 = np.array(inputFile[7])
# bmqf_fpr_0_0001 = np.array(inputFile[8])
# bmqf_fpr_0_001-=bmqf_fpr_0_0001
# bmqf_fpr_0_01-=bmqf_fpr_0_001+bmqf_fpr_0_0001
# there is a problem here because bmqf_fpr_0_01 > bmqf_fpr_0_001
CountminKhmer_res=prepare([inputFile[12],inputFile[13],inputFile[14],inputFile[15]])
# CountminKhmer_fpr_0_01 = np.array(inputFile[9])
# CountminKhmer_fpr_0_001 = np.array(inputFile[10])
# CountminKhmer_fpr_0_0001 = np.array(inputFile[11])
# CountminKhmer_fpr_0_001-=CountminKhmer_fpr_0_0001
# CountminKhmer_fpr_0_01-=CountminKhmer_fpr_0_001+CountminKhmer_fpr_0_0001
Countmin_res=prepare([inputFile[16],inputFile[17],inputFile[18],inputFile[19]])
# Countmin_fpr_0_01 = np.array(inputFile[12])
# Countmin_fpr_0_001 = np.array(inputFile[13])
# Countmin_fpr_0_0001 = np.array(inputFile[14])
# Countmin_fpr_0_001-=Countmin_fpr_0_0001
# Countmin_fpr_0_01-=Countmin_fpr_0_001+Countmin_fpr_0_0001
distributions = ['Zipfian Z=2', 'Zipfian Z=3', 'Zipfian Z=5','Real Kmers']
fig, ax = plt.subplots()
bar_width = 0.35
epsilon = .035
line_width = 1
opacity = 1
mqf_bar_positions = np.arange(len(mqf_res[0][0]))*2.5
cqf_bar_positions = mqf_bar_positions + bar_width
bmqf_bar_positions = mqf_bar_positions + 2*bar_width
CountminKhmer_bar_positions = mqf_bar_positions + 3*bar_width
Countmin_bar_positions = mqf_bar_positions + 4*bar_width
mqfColor='#d73027'
cqfColor='#fc8d59'
bmqfColor='#fee090'
CountminKhmerColor='#91bfdb'
CountminColor='#4575b4'
# make bar plots
mqf_fpr_0_0001_bar = plt.bar(mqf_bar_positions, mqf_res[0][3], bar_width-epsilon,
color=mqfColor,
edgecolor=mqfColor,
linewidth=line_width,
bottom=mqf_res[1][3],
label='MQF FPR 0.0001')
mqf_fpr_0_001_bar = plt.bar(mqf_bar_positions, mqf_res[0][2], bar_width-epsilon,
bottom=mqf_res[1][2],
alpha=opacity,
color='white',
edgecolor=mqfColor,
linewidth=line_width,
hatch='//',
label='MQF FPR 0.001')
mqf_fpr_0_01_bar = plt.bar(mqf_bar_positions, mqf_res[0][1], bar_width-epsilon,
bottom=mqf_res[1][1],
alpha=opacity,
color='white',
edgecolor=mqfColor,
linewidth=line_width,
hatch='0',
label='MQF FPR 0.01')
mqf_fpr_0_1_bar = plt.bar(mqf_bar_positions, mqf_res[0][0], bar_width-epsilon,
bottom=mqf_res[1][0],
alpha=opacity,
color='white',
edgecolor=mqfColor,
linewidth=line_width,
hatch='.',
label='MQF FPR 0.1')
cqf_fpr_0_0001_bar = plt.bar(cqf_bar_positions, cqf_res[0][3], bar_width- epsilon,
color=cqfColor,
bottom=cqf_res[1][3],
linewidth=line_width,
edgecolor=cqfColor,
ecolor="#0000DD",
label='CQF FPR 0.0001')
cqf_fpr_0_001_bar = plt.bar(cqf_bar_positions, cqf_res[0][2], bar_width-epsilon,
bottom=cqf_res[1][2],
color="white",
hatch='//',
edgecolor=cqfColor,
ecolor="#0000DD",
linewidth=line_width,
label='CQF FPR 0.001')
cqf_fpr_0_01_bar = plt.bar(cqf_bar_positions, cqf_res[0][1], bar_width-epsilon,
bottom=cqf_res[1][1],
color="white",
hatch='0',
edgecolor=cqfColor,
linewidth=line_width,
label='CQF FPR 0.01')
cqf_fpr_0_1_bar = plt.bar(cqf_bar_positions, cqf_res[0][0], bar_width-epsilon,
bottom=cqf_res[1][0],
color="white",
hatch='.',
edgecolor=cqfColor,
linewidth=line_width,
label='CQF FPR 0.1')
CountminKhmer_fpr_0_0001_bar = plt.bar(CountminKhmer_bar_positions, CountminKhmer_res[0][3], bar_width- epsilon,
color=CountminKhmerColor,
bottom=CountminKhmer_res[1][3],
edgecolor=CountminKhmerColor,
linewidth=line_width,
label='CMS Khmer FPR 0.0001')
CountminKhmer_fpr_0_001_bar = plt.bar(CountminKhmer_bar_positions, CountminKhmer_res[0][2], bar_width-epsilon,
bottom=CountminKhmer_res[1][2],
alpha=opacity,
color='white',
edgecolor=CountminKhmerColor,
linewidth=line_width,
hatch='//',
label='CMS Khmer FPR 0.001')
CountminKhmer_fpr_0_01_bar = plt.bar(CountminKhmer_bar_positions, CountminKhmer_res[0][1], bar_width-epsilon,
bottom=CountminKhmer_res[1][1],
alpha=opacity,
color='white',
edgecolor=CountminKhmerColor,
linewidth=line_width,
hatch='0',
label='CMS Khmer FPR 0.01')
CountminKhmer_fpr_0_1_bar = plt.bar(CountminKhmer_bar_positions, CountminKhmer_res[0][0], bar_width-epsilon,
bottom=CountminKhmer_res[1][0],
alpha=opacity,
color='white',
edgecolor=CountminKhmerColor,
linewidth=line_width,
hatch='.',
label='CMS Khmer FPR 0.1')
bmqf_fpr_0_0001_bar = plt.bar(bmqf_bar_positions, bmqf_res[0][3], bar_width- epsilon,
bottom=bmqf_res[1][3],
color=bmqfColor,
edgecolor=bmqfColor,
linewidth=line_width,
label='Buffered MQF FPR 0.0001')
bmqf_fpr_0_001_bar = plt.bar(bmqf_bar_positions, bmqf_res[0][2], bar_width-epsilon,
bottom=bmqf_res[1][2],
alpha=opacity,
color='white',
edgecolor=bmqfColor,
linewidth=line_width,
hatch='//',
label='Buffered MQF FPR 0.001')
bmqf_fpr_0_01_bar = plt.bar(bmqf_bar_positions, bmqf_res[0][1], bar_width-epsilon,
bottom=bmqf_res[1][1],
alpha=opacity,
color='white',
edgecolor=bmqfColor,
linewidth=line_width,
hatch='0',
label='Buffered MQF FPR 0.01')
bmqf_fpr_0_1_bar = plt.bar(bmqf_bar_positions, bmqf_res[0][0], bar_width-epsilon,
bottom=bmqf_res[1][0],
alpha=opacity,
color='white',
edgecolor=bmqfColor,
linewidth=line_width,
hatch='.',
label='Buffered MQF FPR 0.1')
Countmin_fpr_0_0001_bar = plt.bar(Countmin_bar_positions, Countmin_res[0][3], bar_width- epsilon,
color=CountminColor,
bottom=Countmin_res[1][3],
edgecolor=CountminColor,
linewidth=line_width,
label='CMS FPR 0.0001')
Countmin_fpr_0_001_bar = plt.bar(Countmin_bar_positions, Countmin_res[0][2], bar_width-epsilon,
bottom=Countmin_res[1][2],
alpha=opacity,
color='white',
edgecolor=CountminColor,
linewidth=line_width,
hatch='//',
label='CMS FPR 0.001')
Countmin_fpr_0_01_bar = plt.bar(Countmin_bar_positions, Countmin_res[0][1], bar_width-epsilon,
bottom=Countmin_res[1][1],
alpha=opacity,
color='white',
edgecolor=CountminColor,
linewidth=line_width,
hatch='0',
label='CMS FPR 0.01')
Countmin_fpr_0_1_bar = plt.bar(Countmin_bar_positions, Countmin_res[0][0], bar_width-epsilon,
bottom=Countmin_res[1][0],
alpha=opacity,
color='white',
edgecolor=CountminColor,
linewidth=line_width,
hatch='.',
label='CMS FPR 0.1')
plt.xticks(bmqf_bar_positions, distributions, rotation=45)
plt.ylabel('Million of %s Per Second'%opType)
legend_elements = [
Patch(facecolor=mqfColor,label='MQF',linewidth=0.5,edgecolor='black'),
Patch(facecolor=cqfColor,label='CQF',linewidth=0.5,edgecolor='black'),
Patch(facecolor=bmqfColor,label='Bufferd MQF',linewidth=0.5,edgecolor='black'),
Patch(facecolor=CountminKhmerColor,label='CMS Khmer',linewidth=0.5,edgecolor='black'),
Patch(facecolor=CountminColor,label='CMS',linewidth=0.5,edgecolor='black')
]
fpr_leged=[Patch(facecolor="black",label='0.0001',linewidth=0.5,edgecolor='black'),
Patch(facecolor="white",label='0.001',hatch='//',linewidth=0.5,edgecolor='black'),
Patch(facecolor="white",label='0.01',hatch='0',linewidth=0.5,edgecolor='black'),
Patch(facecolor="white",label='0.1',hatch='.',linewidth=0.5,edgecolor='black')
]
#l1=plt.legend(handles=legend_elements, bbox_to_anchor=(1.19, 0.95),
# fancybox=True,title='Data Structures')
#l2=plt.legend(handles=fpr_leged, bbox_to_anchor=(1.171, 0.650),
# fancybox=True,title='False Positive Rates')
l1=plt.legend(handles=legend_elements, bbox_to_anchor=(1., 0.95),
fancybox=True,title='Data Structures')
l2=plt.legend(handles=fpr_leged, bbox_to_anchor=(1., 0.450),
fancybox=True,title='False Positive Rates')
ax.add_artist(l1)
ax.add_artist(l2)
# plt.legend(loc='best')
#ax.legend()
# sns.despine()
#plt.show()
fig.set_size_inches(5.5, 3.5)
fig.savefig(opType+'.png',bbox_inches='tight', dpi=fig.dpi)
| 43.625
| 112
| 0.541056
|
6a12090c051c51e6f1d51469a92f046ffec58ad8
| 1,694
|
py
|
Python
|
nicos_mlz/biodiff/setups/special/watchdog.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
nicos_mlz/biodiff/setups/special/watchdog.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
nicos_mlz/biodiff/setups/special/watchdog.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null |
description = 'setup for the NICOS watchdog'
group = 'special'
# watch_conditions:
# The entries in this list are dictionaries. Possible keys:
#
# 'setup' -- setup that must be loaded (default '' to mean all setups)
# 'condition' -- condition for warning (a Python expression where cache keys
# can be used: t_value stands for t/value etc.
# 'gracetime' -- time in sec allowed for the condition to be true without
# emitting a warning (default 5 sec)
# 'message' -- warning message to display
# 'type' -- for defining different types of warnings; this corresponds to the
# configured notifiers (default 'default')
# type '' does not emit warnings (useful together with scriptaction)
# 'scriptaction' -- 'pausecount' to pause the count loop on the condition
# or 'stop' or 'immediatestop' to cancel script execution
# (default '')
# 'action' -- code to execute if condition is true (default no code is executed)
watch_conditions = [
dict(condition = '(sixfold_value == \'closed\' or nl1_value == \'closed\') '
'and reactorpower_value > 19.1',
message = 'NL1 or sixfold shutter closed',
type = 'critical',
),
dict(condition = 'selector_speed_status[0] == ERROR',
message = 'Selector in error status; check Windows software!',
type = 'critical',
),
]
includes = ['notifiers']
notifiers = {
'default': ['email'],
'critical': ['email', 'smser'],
}
devices = dict(
Watchdog = device('nicos.services.watchdog.Watchdog',
cache = 'phys.biodiff.frm2:14869',
notifiers = notifiers,
mailreceiverkey = 'email/receivers',
watch = watch_conditions,
),
)
| 36.042553
| 80
| 0.654664
|
e9bad51532d88d14b28c85f70b8a687f26586af2
| 478
|
py
|
Python
|
top/api/rest/WdtExtStatRefundQueryAogouRequest.py
|
SAMZONG/taobao-sdk-python3
|
202a9df2085229838541713bd24433a90d07c7fc
|
[
"MIT"
] | null | null | null |
top/api/rest/WdtExtStatRefundQueryAogouRequest.py
|
SAMZONG/taobao-sdk-python3
|
202a9df2085229838541713bd24433a90d07c7fc
|
[
"MIT"
] | null | null | null |
top/api/rest/WdtExtStatRefundQueryAogouRequest.py
|
SAMZONG/taobao-sdk-python3
|
202a9df2085229838541713bd24433a90d07c7fc
|
[
"MIT"
] | null | null | null |
'''
Created by auto_sdk on 2021.06.25
'''
from top.api.base import RestApi
class WdtExtStatRefundQueryAogouRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.is_retail = None
self.page_no = None
self.page_size = None
self.shop_no = None
self.sid = None
self.stockin_date = None
self.warehouse_no = None
def getapiname(self):
return 'hu3cgwt0tc.wdt.ext.stat.refund.query.aogou'
| 26.555556
| 56
| 0.717573
|
c71d8e0f2949b23baf91343a66943791f37adb19
| 5,669
|
py
|
Python
|
pysnmp-with-texts/ChrComPmAtmATM-VC-Day-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/ChrComPmAtmATM-VC-Day-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/ChrComPmAtmATM-VC-Day-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module ChrComPmAtmATM-VC-Day-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ChrComPmAtmATM-VC-Day-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:35:11 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint")
atmVclVpi, atmVclVci = mibBuilder.importSymbols("ATM-MIB", "atmVclVpi", "atmVclVci")
chrComIfifIndex, = mibBuilder.importSymbols("ChrComIfifTable-MIB", "chrComIfifIndex")
TruthValue, = mibBuilder.importSymbols("ChrTyp-MIB", "TruthValue")
chrComPmAtm, = mibBuilder.importSymbols("Chromatis-MIB", "chrComPmAtm")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, Integer32, MibIdentifier, Bits, Counter64, Gauge32, ModuleIdentity, Unsigned32, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, TimeTicks, NotificationType, iso = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Integer32", "MibIdentifier", "Bits", "Counter64", "Gauge32", "ModuleIdentity", "Unsigned32", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "TimeTicks", "NotificationType", "iso")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
chrComPmAtmATM_VC_DayTable = MibTable((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9), ).setLabel("chrComPmAtmATM-VC-DayTable")
if mibBuilder.loadTexts: chrComPmAtmATM_VC_DayTable.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmATM_VC_DayTable.setDescription('')
chrComPmAtmATM_VC_DayEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1), ).setLabel("chrComPmAtmATM-VC-DayEntry").setIndexNames((0, "ChrComIfifTable-MIB", "chrComIfifIndex"), (0, "ATM-MIB", "atmVclVpi"), (0, "ATM-MIB", "atmVclVci"), (0, "ChrComPmAtmATM-VC-Day-MIB", "chrComPmAtmDayNumber"))
if mibBuilder.loadTexts: chrComPmAtmATM_VC_DayEntry.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmATM_VC_DayEntry.setDescription('')
chrComPmAtmDayNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chrComPmAtmDayNumber.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmDayNumber.setDescription('')
chrComPmAtmSuspectedInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chrComPmAtmSuspectedInterval.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmSuspectedInterval.setDescription('')
chrComPmAtmElapsedTime = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chrComPmAtmElapsedTime.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmElapsedTime.setDescription('')
chrComPmAtmSuppressedIntrvls = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chrComPmAtmSuppressedIntrvls.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmSuppressedIntrvls.setDescription('')
chrComPmAtmReceivedCells = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chrComPmAtmReceivedCells.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmReceivedCells.setDescription('')
chrComPmAtmTransmittedCells = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 6), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chrComPmAtmTransmittedCells.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmTransmittedCells.setDescription('')
chrComPmAtmThresholdProfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: chrComPmAtmThresholdProfIndex.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmThresholdProfIndex.setDescription('')
chrComPmAtmResetPmCountersAction = MibTableColumn((1, 3, 6, 1, 4, 1, 3695, 1, 10, 4, 9, 1, 8), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: chrComPmAtmResetPmCountersAction.setStatus('current')
if mibBuilder.loadTexts: chrComPmAtmResetPmCountersAction.setDescription('')
mibBuilder.exportSymbols("ChrComPmAtmATM-VC-Day-MIB", chrComPmAtmReceivedCells=chrComPmAtmReceivedCells, chrComPmAtmThresholdProfIndex=chrComPmAtmThresholdProfIndex, chrComPmAtmTransmittedCells=chrComPmAtmTransmittedCells, chrComPmAtmSuppressedIntrvls=chrComPmAtmSuppressedIntrvls, chrComPmAtmATM_VC_DayTable=chrComPmAtmATM_VC_DayTable, chrComPmAtmElapsedTime=chrComPmAtmElapsedTime, chrComPmAtmATM_VC_DayEntry=chrComPmAtmATM_VC_DayEntry, chrComPmAtmResetPmCountersAction=chrComPmAtmResetPmCountersAction, chrComPmAtmDayNumber=chrComPmAtmDayNumber, chrComPmAtmSuspectedInterval=chrComPmAtmSuspectedInterval)
| 115.693878
| 607
| 0.800141
|
d1cbd5d5b30706fdbd9b7a1f536043887efb0d54
| 1,333
|
py
|
Python
|
test/snow/engine/common/test_subnet.py
|
jgeofil/avax-python
|
b09e78e3d7e1c35db5ae42e3918e960e775f2d45
|
[
"MIT"
] | 25
|
2021-05-16T23:43:47.000Z
|
2022-03-29T03:08:30.000Z
|
test/snow/engine/common/test_subnet.py
|
zefonseca/ava-python
|
9c72af7c720edfab9c73379a102cf6a11d864ebd
|
[
"MIT"
] | 2
|
2021-04-26T11:43:22.000Z
|
2021-06-04T07:55:22.000Z
|
test/snow/engine/common/test_subnet.py
|
jgeofil/avax-python
|
b09e78e3d7e1c35db5ae42e3918e960e775f2d45
|
[
"MIT"
] | 4
|
2021-08-06T10:55:58.000Z
|
2022-03-29T08:03:05.000Z
|
# avax-python : Python tools for the exploration of the Avalanche AVAX network.
#
# Documentation at https://crypto.bi
"""
Copyright © 2021 ojrdev
Support this Open Source project!
Donate to X-avax1qr6yzjykcjmeflztsgv6y88dl0xnlel3chs3r4
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# --#--#--
from avaxpython.snow.engine.common import subnet
| 60.590909
| 463
| 0.793698
|
9c279e5049de1d10c432ee6ee3cd0cfd436fe6f5
| 17,333
|
py
|
Python
|
src/memory_saving_gradients.py
|
gonzzza007/Russian-gpt-2
|
22bc186d6320b315cd0066bd21bff9c5c9457c77
|
[
"MIT"
] | null | null | null |
src/memory_saving_gradients.py
|
gonzzza007/Russian-gpt-2
|
22bc186d6320b315cd0066bd21bff9c5c9457c77
|
[
"MIT"
] | null | null | null |
src/memory_saving_gradients.py
|
gonzzza007/Russian-gpt-2
|
22bc186d6320b315cd0066bd21bff9c5c9457c77
|
[
"MIT"
] | null | null | null |
from toposort import toposort
import contextlib
import numpy as np
import tensorflow as tf
import tensorflow.contrib.graph_editor as ge
import time
import sys
sys.setrecursionlimit(10000)
# refers back to current module if we decide to split helpers out
util = sys.modules[__name__]
# getting rid of "WARNING:tensorflow:VARIABLES collection name is deprecated"
setattr(tf.compat.v1.GraphKeys, "VARIABLES", "variables")
# save original gradients since tf.gradient could be monkey-patched to point
# to our version
from tensorflow.python.ops import gradients as tf_gradients_lib
tf_gradients = tf_gradients_lib.gradients
MIN_CHECKPOINT_NODE_SIZE=1024 # use lower value during testing
# specific versions we can use to do process-wide replacement of tf.gradients
def gradients_speed(ys, xs, grad_ys=None, **kwargs):
return gradients(ys, xs, grad_ys, checkpoints='speed', **kwargs)
def gradients_memory(ys, xs, grad_ys=None, **kwargs):
return gradients(ys, xs, grad_ys, checkpoints='memory', **kwargs)
def gradients_collection(ys, xs, grad_ys=None, **kwargs):
return gradients(ys, xs, grad_ys, checkpoints='collection', **kwargs)
def gradients(ys, xs, grad_ys=None, checkpoints='collection', **kwargs):
'''
Authors: Tim Salimans & Yaroslav Bulatov
memory efficient gradient implementation inspired by "Training Deep Nets with Sublinear Memory Cost"
by Chen et al. 2016 (https://arxiv.org/abs/1604.06174)
ys,xs,grad_ys,kwargs are the arguments to standard tensorflow tf.gradients
(https://www.tensorflow.org/versions/r0.12/api_docs/python/train.html#gradients)
'checkpoints' can either be
- a list consisting of tensors from the forward pass of the neural net
that we should re-use when calculating the gradients in the backward pass
all other tensors that do not appear in this list will be re-computed
- a string specifying how this list should be determined. currently we support
- 'speed': checkpoint all outputs of convolutions and matmuls. these ops are usually the most expensive,
so checkpointing them maximizes the running speed
(this is a good option if nonlinearities, concats, batchnorms, etc are taking up a lot of memory)
- 'memory': try to minimize the memory usage
(currently using a very simple strategy that identifies a number of bottleneck tensors in the graph to checkpoint)
- 'collection': look for a tensorflow collection named 'checkpoints', which holds the tensors to checkpoint
'''
# print("Calling memsaving gradients with", checkpoints)
if not isinstance(ys,list):
ys = [ys]
if not isinstance(xs,list):
xs = [xs]
bwd_ops = ge.get_backward_walk_ops([y.op for y in ys],
inclusive=True)
debug_print("bwd_ops: %s", bwd_ops)
# forward ops are all ops that are candidates for recomputation
fwd_ops = ge.get_forward_walk_ops([x.op for x in xs],
inclusive=True,
within_ops=bwd_ops)
debug_print("fwd_ops: %s", fwd_ops)
# exclude ops with no inputs
fwd_ops = [op for op in fwd_ops if op.inputs]
# don't recompute xs, remove variables
xs_ops = _to_ops(xs)
fwd_ops = [op for op in fwd_ops if not op in xs_ops]
fwd_ops = [op for op in fwd_ops if not '/assign' in op.name]
fwd_ops = [op for op in fwd_ops if not '/Assign' in op.name]
fwd_ops = [op for op in fwd_ops if not '/read' in op.name]
ts_all = ge.filter_ts(fwd_ops, True) # get the tensors
ts_all = [t for t in ts_all if '/read' not in t.name]
ts_all = set(ts_all) - set(xs) - set(ys)
# construct list of tensors to checkpoint during forward pass, if not
# given as input
if type(checkpoints) is not list:
if checkpoints == 'collection':
checkpoints = tf.compat.v1.get_collection('checkpoints')
elif checkpoints == 'speed':
# checkpoint all expensive ops to maximize running speed
checkpoints = ge.filter_ts_from_regex(fwd_ops, 'conv2d|Conv|MatMul')
elif checkpoints == 'memory':
# remove very small tensors and some weird ops
def fixdims(t): # tf.Dimension values are not compatible with int, convert manually
try:
return [int(e if e.value is not None else 64) for e in t]
except:
return [0] # unknown shape
ts_all = [t for t in ts_all if np.prod(fixdims(t.shape)) > MIN_CHECKPOINT_NODE_SIZE]
ts_all = [t for t in ts_all if 'L2Loss' not in t.name]
ts_all = [t for t in ts_all if 'entropy' not in t.name]
ts_all = [t for t in ts_all if 'FusedBatchNorm' not in t.name]
ts_all = [t for t in ts_all if 'Switch' not in t.name]
ts_all = [t for t in ts_all if 'dropout' not in t.name]
# DV: FP16_FIX - need to add 'Cast' layer here to make it work for FP16
ts_all = [t for t in ts_all if 'Cast' not in t.name]
# filter out all tensors that are inputs of the backward graph
with util.capture_ops() as bwd_ops:
tf_gradients(ys, xs, grad_ys, **kwargs)
bwd_inputs = [t for op in bwd_ops for t in op.inputs]
# list of tensors in forward graph that is in input to bwd graph
ts_filtered = list(set(bwd_inputs).intersection(ts_all))
debug_print("Using tensors %s", ts_filtered)
# try two slightly different ways of getting bottlenecks tensors
# to checkpoint
for ts in [ts_filtered, ts_all]:
# get all bottlenecks in the graph
bottleneck_ts = []
for t in ts:
b = set(ge.get_backward_walk_ops(t.op, inclusive=True, within_ops=fwd_ops))
f = set(ge.get_forward_walk_ops(t.op, inclusive=False, within_ops=fwd_ops))
# check that there are not shortcuts
b_inp = set([inp for op in b for inp in op.inputs]).intersection(ts_all)
f_inp = set([inp for op in f for inp in op.inputs]).intersection(ts_all)
if not set(b_inp).intersection(f_inp) and len(b_inp)+len(f_inp) >= len(ts_all):
bottleneck_ts.append(t) # we have a bottleneck!
else:
debug_print("Rejected bottleneck candidate and ops %s", [t] + list(set(ts_all) - set(b_inp) - set(f_inp)))
# success? or try again without filtering?
if len(bottleneck_ts) >= np.sqrt(len(ts_filtered)): # yes, enough bottlenecks found!
break
if not bottleneck_ts:
raise Exception('unable to find bottleneck tensors! please provide checkpoint nodes manually, or use checkpoints="speed".')
# sort the bottlenecks
bottlenecks_sorted_lists = tf_toposort(bottleneck_ts, within_ops=fwd_ops)
sorted_bottlenecks = [t for ts in bottlenecks_sorted_lists for t in ts]
# save an approximately optimal number ~ sqrt(N)
N = len(ts_filtered)
if len(bottleneck_ts) <= np.ceil(np.sqrt(N)):
checkpoints = sorted_bottlenecks
else:
step = int(np.ceil(len(bottleneck_ts) / np.sqrt(N)))
checkpoints = sorted_bottlenecks[step::step]
else:
raise Exception('%s is unsupported input for "checkpoints"' % (checkpoints,))
checkpoints = list(set(checkpoints).intersection(ts_all))
# at this point automatic selection happened and checkpoints is list of nodes
assert isinstance(checkpoints, list)
debug_print("Checkpoint nodes used: %s", checkpoints)
# better error handling of special cases
# xs are already handled as checkpoint nodes, so no need to include them
xs_intersect_checkpoints = set(xs).intersection(set(checkpoints))
if xs_intersect_checkpoints:
debug_print("Warning, some input nodes are also checkpoint nodes: %s",
xs_intersect_checkpoints)
ys_intersect_checkpoints = set(ys).intersection(set(checkpoints))
debug_print("ys: %s, checkpoints: %s, intersect: %s", ys, checkpoints,
ys_intersect_checkpoints)
# saving an output node (ys) gives no benefit in memory while creating
# new edge cases, exclude them
if ys_intersect_checkpoints:
debug_print("Warning, some output nodes are also checkpoints nodes: %s",
format_ops(ys_intersect_checkpoints))
# remove initial and terminal nodes from checkpoints list if present
checkpoints = list(set(checkpoints) - set(ys) - set(xs))
# check that we have some nodes to checkpoint
# if not checkpoints:
# raise Exception('no checkpoints nodes found or given as input! ')
# disconnect dependencies between checkpointed tensors
checkpoints_disconnected = {}
for x in checkpoints:
if x.op and x.op.name is not None:
grad_node = tf.stop_gradient(x, name=x.op.name+"_sg")
else:
grad_node = tf.stop_gradient(x)
checkpoints_disconnected[x] = grad_node
# partial derivatives to the checkpointed tensors and xs
ops_to_copy = fast_backward_ops(seed_ops=[y.op for y in ys],
stop_at_ts=checkpoints, within_ops=fwd_ops)
debug_print("Found %s ops to copy within fwd_ops %s, seed %s, stop_at %s",
len(ops_to_copy), fwd_ops, [r.op for r in ys], checkpoints)
debug_print("ops_to_copy = %s", ops_to_copy)
debug_print("Processing list %s", ys)
copied_sgv, info = ge.copy_with_input_replacements(ge.sgv(ops_to_copy), {})
for origin_op, op in info._transformed_ops.items():
op._set_device(origin_op.node_def.device)
copied_ops = info._transformed_ops.values()
debug_print("Copied %s to %s", ops_to_copy, copied_ops)
ge.reroute_ts(checkpoints_disconnected.values(), checkpoints_disconnected.keys(), can_modify=copied_ops)
debug_print("Rewired %s in place of %s restricted to %s",
checkpoints_disconnected.values(), checkpoints_disconnected.keys(), copied_ops)
# get gradients with respect to current boundary + original x's
copied_ys = [info._transformed_ops[y.op]._outputs[0] for y in ys]
boundary = list(checkpoints_disconnected.values())
dv = tf_gradients(ys=copied_ys, xs=boundary+xs, grad_ys=grad_ys, **kwargs)
debug_print("Got gradients %s", dv)
debug_print("for %s", copied_ys)
debug_print("with respect to %s", boundary+xs)
inputs_to_do_before = [y.op for y in ys]
if grad_ys is not None:
inputs_to_do_before += grad_ys
wait_to_do_ops = list(copied_ops) + [g.op for g in dv if g is not None]
my_add_control_inputs(wait_to_do_ops, inputs_to_do_before)
# partial derivatives to the checkpointed nodes
# dictionary of "node: backprop" for nodes in the boundary
d_checkpoints = {r: dr for r,dr in zip(checkpoints_disconnected.keys(),
dv[:len(checkpoints_disconnected)])}
# partial derivatives to xs (usually the params of the neural net)
d_xs = dv[len(checkpoints_disconnected):]
# incorporate derivatives flowing through the checkpointed nodes
checkpoints_sorted_lists = tf_toposort(checkpoints, within_ops=fwd_ops)
for ts in checkpoints_sorted_lists[::-1]:
debug_print("Processing list %s", ts)
checkpoints_other = [r for r in checkpoints if r not in ts]
checkpoints_disconnected_other = [checkpoints_disconnected[r] for r in checkpoints_other]
# copy part of the graph below current checkpoint node, stopping at
# other checkpoints nodes
ops_to_copy = fast_backward_ops(within_ops=fwd_ops, seed_ops=[r.op for r in ts], stop_at_ts=checkpoints_other)
debug_print("Found %s ops to copy within %s, seed %s, stop_at %s",
len(ops_to_copy), fwd_ops, [r.op for r in ts],
checkpoints_other)
debug_print("ops_to_copy = %s", ops_to_copy)
if not ops_to_copy: # we're done!
break
copied_sgv, info = ge.copy_with_input_replacements(ge.sgv(ops_to_copy), {})
for origin_op, op in info._transformed_ops.items():
op._set_device(origin_op.node_def.device)
copied_ops = info._transformed_ops.values()
debug_print("Copied %s to %s", ops_to_copy, copied_ops)
ge.reroute_ts(checkpoints_disconnected_other, checkpoints_other, can_modify=copied_ops)
debug_print("Rewired %s in place of %s restricted to %s",
checkpoints_disconnected_other, checkpoints_other, copied_ops)
# gradient flowing through the checkpointed node
boundary = [info._transformed_ops[r.op]._outputs[0] for r in ts]
substitute_backprops = [d_checkpoints[r] for r in ts]
dv = tf_gradients(boundary,
checkpoints_disconnected_other+xs,
grad_ys=substitute_backprops, **kwargs)
debug_print("Got gradients %s", dv)
debug_print("for %s", boundary)
debug_print("with respect to %s", checkpoints_disconnected_other+xs)
debug_print("with boundary backprop substitutions %s", substitute_backprops)
inputs_to_do_before = [d_checkpoints[r].op for r in ts]
wait_to_do_ops = list(copied_ops) + [g.op for g in dv if g is not None]
my_add_control_inputs(wait_to_do_ops, inputs_to_do_before)
# partial derivatives to the checkpointed nodes
for r, dr in zip(checkpoints_other, dv[:len(checkpoints_other)]):
if dr is not None:
if d_checkpoints[r] is None:
d_checkpoints[r] = dr
else:
d_checkpoints[r] += dr
def _unsparsify(x):
if not isinstance(x, tf.IndexedSlices):
return x
assert x.dense_shape is not None, "memory_saving_gradients encountered sparse gradients of unknown shape"
indices = x.indices
while indices.shape.ndims < x.values.shape.ndims:
indices = tf.expand_dims(indices, -1)
return tf.scatter_nd(indices, x.values, x.dense_shape)
# partial derivatives to xs (usually the params of the neural net)
d_xs_new = dv[len(checkpoints_other):]
for j in range(len(xs)):
if d_xs_new[j] is not None:
if d_xs[j] is None:
d_xs[j] = _unsparsify(d_xs_new[j])
else:
d_xs[j] += _unsparsify(d_xs_new[j])
return d_xs
def tf_toposort(ts, within_ops=None):
all_ops = ge.get_forward_walk_ops([x.op for x in ts], within_ops=within_ops)
deps = {}
for op in all_ops:
for o in op.outputs:
deps[o] = set(op.inputs)
sorted_ts = toposort(deps)
# only keep the tensors from our original list
ts_sorted_lists = []
for l in sorted_ts:
keep = list(set(l).intersection(ts))
if keep:
ts_sorted_lists.append(keep)
return ts_sorted_lists
def fast_backward_ops(within_ops, seed_ops, stop_at_ts):
bwd_ops = set(ge.get_backward_walk_ops(seed_ops, stop_at_ts=stop_at_ts))
ops = bwd_ops.intersection(within_ops).difference([t.op for t in stop_at_ts])
return list(ops)
@contextlib.contextmanager
def capture_ops():
"""Decorator to capture ops created in the block.
with capture_ops() as ops:
# create some ops
print(ops) # => prints ops created.
"""
micros = int(time.time()*10**6)
scope_name = str(micros)
op_list = []
with tf.name_scope(scope_name):
yield op_list
g = tf.get_default_graph()
op_list.extend(ge.select_ops(scope_name+"/.*", graph=g))
def _to_op(tensor_or_op):
if hasattr(tensor_or_op, "op"):
return tensor_or_op.op
return tensor_or_op
def _to_ops(iterable):
if not _is_iterable(iterable):
return iterable
return [_to_op(i) for i in iterable]
def _is_iterable(o):
try:
_ = iter(o)
except Exception:
return False
return True
DEBUG_LOGGING=False
def debug_print(s, *args):
"""Like logger.log, but also replaces all TensorFlow ops/tensors with their
names. Sensitive to value of DEBUG_LOGGING, see enable_debug/disable_debug
Usage:
debug_print("see tensors %s for %s", tensorlist, [1,2,3])
"""
if DEBUG_LOGGING:
formatted_args = [format_ops(arg) for arg in args]
print("DEBUG "+s % tuple(formatted_args))
def format_ops(ops, sort_outputs=True):
"""Helper method for printing ops. Converts Tensor/Operation op to op.name,
rest to str(op)."""
if hasattr(ops, '__iter__') and not isinstance(ops, str):
l = [(op.name if hasattr(op, "name") else str(op)) for op in ops]
if sort_outputs:
return sorted(l)
return l
else:
return ops.name if hasattr(ops, "name") else str(ops)
def my_add_control_inputs(wait_to_do_ops, inputs_to_do_before):
for op in wait_to_do_ops:
ci = [i for i in inputs_to_do_before if op.control_inputs is None or i not in op.control_inputs]
ge.add_control_inputs(op, ci)
| 44.67268
| 139
| 0.656205
|
c750cc3ed9765378186d3d91464bfbb3d6420303
| 724
|
py
|
Python
|
src/features/inversion/__init__.py
|
dimitrymindlin/xray
|
0476d2ac950a118b9182e5cc3b077ccd32b8d722
|
[
"MIT"
] | 31
|
2020-01-29T12:45:41.000Z
|
2022-03-01T14:07:02.000Z
|
src/features/inversion/__init__.py
|
dimitrymindlin/xray
|
0476d2ac950a118b9182e5cc3b077ccd32b8d722
|
[
"MIT"
] | 5
|
2020-12-07T04:32:34.000Z
|
2022-01-23T14:39:01.000Z
|
src/features/inversion/__init__.py
|
dimitrymindlin/xray
|
0476d2ac950a118b9182e5cc3b077ccd32b8d722
|
[
"MIT"
] | 12
|
2020-08-03T12:20:19.000Z
|
2022-02-18T06:48:05.000Z
|
import glob
import cv2
import matplotlib.pyplot as plt
import numpy as np
from src import DATA_PATH
# Saves data to the same folder
XR_HAND_CENTRED_PATH = f'{DATA_PATH}/XR_HAND_CENTRED'
path_to_data = f'{XR_HAND_CENTRED_PATH}/*/*/*'
paths = glob.glob(path_to_data)
threshold = 255 / 2
for path in paths:
img = cv2.imread(path)
img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Mean color of corners
color = np.array([img[0:50, 0:50].mean(), img[-50:, -50:].mean(),
img[:50, -50:].mean(), img[-50:, :50].mean()]).mean()
if img.mean() > threshold or color > threshold:
plt.imshow(img, cmap='gray', vmin=0, vmax=255)
plt.show()
cv2.imwrite(path, 255 - img)
| 24.965517
| 75
| 0.635359
|
2969bcd5a971f4f7167322b9efe5c35f49e373a2
| 10,091
|
py
|
Python
|
core/targets.py
|
Prodject/OWASP-Nettacker
|
b6fb4a1fe6b3cf1a0a49872480fd387b77ed9b44
|
[
"Apache-2.0"
] | 35
|
2019-10-17T17:42:50.000Z
|
2020-10-06T12:08:29.000Z
|
core/targets.py
|
Prodject/OWASP-Nettacker
|
b6fb4a1fe6b3cf1a0a49872480fd387b77ed9b44
|
[
"Apache-2.0"
] | 1
|
2022-03-29T22:02:36.000Z
|
2022-03-29T22:02:36.000Z
|
core/targets.py
|
Prodject/OWASP-Nettacker
|
b6fb4a1fe6b3cf1a0a49872480fd387b77ed9b44
|
[
"Apache-2.0"
] | 7
|
2019-10-17T21:46:09.000Z
|
2021-12-15T04:56:29.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import json
import netaddr.ip
import re
from core.ip import *
from core.alert import *
from core._die import __die_failure
from lib.scan.subdomain.engine import __get_subs
from core.log import __log_into_file
def target_to_host(target):
"""
convert a target to host, example http://owasp.org to owasp.org or http://127.0.0.1 to 127.0.0.1
Args:
target: the target
Returns:
the host target
"""
if target_type(target) == 'HTTP':
target = target.lower().replace(
'http://', '').replace('https://', '').rsplit('/')[0]
if ':' in target:
target = target.rsplit(':')[0]
return target
def target_type(target):
"""
define the target type
Args:
target: the target
Returns:
the target type (SINGLE_IPv4, SINGLE_IPv6, RANGE_IPv4, DOMAIN, HTTP, CIDR_IPv4, UNKNOWN)
"""
if isIP(target):
return 'SINGLE_IPv4'
elif isIP6(target):
return 'SINGLE_IPv6'
elif len(target.rsplit('.')) is 7 and '-' in target and '/' not in target:
start_ip, stop_ip = target.rsplit('-')
if isIP(start_ip) and isIP(stop_ip):
return 'RANGE_IPv4'
elif re.match('^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$', target):
return 'DOMAIN'
elif (target.lower().startswith('http://') or target.lower().startswith('https://')):
t = target.rsplit("://")[1].rsplit("/")[0].rsplit(":")[0]
if isIP(t) or isIP6(t) or re.match('^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$', t):
return 'HTTP'
elif len(target.rsplit('.')) is 4 and '-' not in target and '/' in target:
IP, CIDR = target.rsplit('/')
if isIP(IP) and (int(CIDR) >= 0 and int(CIDR) <= 32):
return 'CIDR_IPv4'
return 'UNKNOWN'
def analysis(targets, check_ranges, check_subdomains, subs_temp, range_temp, log_in_file, time_sleep,
language, verbose_level, retries, socks_proxy, enumerate_flag):
"""
analysis and calulcate targets.
Args:
targets: targets
check_ranges: check IP range flag
check_subdomains: check subdomain flag
subs_temp: subdomain temp filename
range_temp: IP range tmp filename
log_in_file: output filename
time_sleep: time to sleep
language: language
verbose_level: verbose level number
retries: retries number
socks_proxy: socks proxy
enumerate_flag: enumerate flag
Returns:
a generator
"""
__log_into_file(range_temp, 'a', '', language)
__log_into_file(subs_temp, 'a', '', language)
for target in targets:
if target_type(target) == 'SINGLE_IPv4':
if check_ranges:
if not enumerate_flag:
info(messages(language, "checking_range").format(target))
IPs = IPRange(getIPRange(target), range_temp, language)
if type(IPs) == netaddr.ip.IPNetwork:
for IPm in IPs:
yield IPm
elif type(IPs) == list:
for IPm in IPs:
for IP in IPm:
yield IP
else:
if not enumerate_flag:
info(messages(language, "target_submitted").format(target))
yield target
elif target_type(target) == 'SINGLE_IPv6':
yield target
elif target_type(target) == 'RANGE_IPv4' or target_type(target) == 'CIDR_IPv4':
IPs = IPRange(target, range_temp, language)
if not enumerate_flag:
info(messages(language, "checking").format(target))
if type(IPs) == netaddr.ip.IPNetwork:
for IPm in IPs:
yield IPm
elif type(IPs) == list:
for IPm in IPs:
for IP in IPm:
yield IP
elif target_type(target) == 'DOMAIN':
if check_subdomains:
if check_ranges:
if enumerate_flag:
info(messages(language, "checking").format(target))
sub_domains = json.loads(open(subs_temp).read()) if len(open(subs_temp).read()) > 2 else \
__get_subs(target, 3, '', 0, language,
0, socks_proxy, 3, 0, 0)
if len(open(subs_temp).read()) is 0:
__log_into_file(subs_temp, 'a', json.dumps(
sub_domains), language)
if target not in sub_domains:
sub_domains.append(target)
for target in sub_domains:
if not enumerate_flag:
info(messages(language, "target_submitted").format(target))
yield target
n = 0
err = 0
IPs = []
while True:
try:
IPs.append(socket.gethostbyname(target))
err = 0
n += 1
if n is 12:
break
except:
err += 1
if err is 3 or n is 12:
break
IPz = list(set(IPs))
for IP in IPz:
if not enumerate_flag:
info(messages(language, "checking_range").format(IP))
IPs = IPRange(getIPRange(IP), range_temp, language)
if type(IPs) == netaddr.ip.IPNetwork:
for IPm in IPs:
yield IPm
elif type(IPs) == list:
for IPm in IPs:
for IPn in IPm:
yield IPn
else:
if enumerate_flag:
info(messages(language, "checking").format(target))
sub_domains = json.loads(open(subs_temp).read()) if len(open(subs_temp).read()) > 2 else \
__get_subs(target, 3, '', 0, language,
0, socks_proxy, 3, 0, 0)
if len(open(subs_temp).read()) is 0:
__log_into_file(subs_temp, 'a', json.dumps(
sub_domains), language)
if target not in sub_domains:
sub_domains.append(target)
for target in sub_domains:
if not enumerate_flag:
info(messages(language, "target_submitted").format(target))
yield target
else:
if check_ranges:
if not enumerate_flag:
info(messages(language, "checking").format(target))
yield target
n = 0
err = 0
IPs = []
while True:
try:
IPs.append(socket.gethostbyname(target))
err = 0
n += 1
if n is 12:
break
except:
err += 1
if err is 3 or n is 12:
break
IPz = list(set(IPs))
for IP in IPz:
if not enumerate_flag:
info(messages(language, "checking_range").format(IP))
IPs = IPRange(getIPRange(IP), range_temp, language)
if type(IPs) == netaddr.ip.IPNetwork:
for IPm in IPs:
yield IPm
elif type(IPs) == list:
for IPm in IPs:
for IPn in IPm:
yield IPn
else:
if not enumerate_flag:
info(messages(language, "target_submitted").format(target))
yield target
elif target_type(target) == 'HTTP':
if not enumerate_flag:
info(messages(language, "checking").format(target))
yield target
if check_ranges:
if 'http://' == target[:7].lower():
target = target[7:].rsplit('/')[0]
if 'https://' == target[:8].lower():
target = target[8:].rsplit('/')[0]
yield target
IPs = []
while True:
try:
IPs.append(socket.gethostbyname(target))
err = 0
n += 1
if n is 12:
break
except:
err += 1
if err is 3 or n is 12:
break
IPz = list(set(IPs))
for IP in IPz:
if not enumerate_flag:
info(messages(language, "checking_range").format(IP))
IPs = IPRange(getIPRange(IP), range_temp, language)
if type(IPs) == netaddr.ip.IPNetwork:
for IPm in IPs:
yield IPm
elif type(IPs) == list:
for IPm in IPs:
for IPn in IPm:
yield IPn
else:
__die_failure(messages(language, "unknown_target").format(target))
| 40.203187
| 110
| 0.440591
|
844542e302f02a2c57d4dd9620f574b9fd7f750e
| 1,541
|
py
|
Python
|
items.py
|
q6806161/-_scrapy-
|
281463018530fd519843a7994c9219f126194675
|
[
"MIT"
] | null | null | null |
items.py
|
q6806161/-_scrapy-
|
281463018530fd519843a7994c9219f126194675
|
[
"MIT"
] | null | null | null |
items.py
|
q6806161/-_scrapy-
|
281463018530fd519843a7994c9219f126194675
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class HouseInfoItem(scrapy.Item):
# define the fields for your item here like:
City = scrapy.Field() # 城市
Item_Url = scrapy.Field() # 对应网址
House_Title = scrapy.Field() # 租房标题
Rent_Style = scrapy.Field() # 整租或合租
Rent_Salary = scrapy.Field() # 租金
House_Type = scrapy.Field() # 户型
House_Area = scrapy.Field() # 房间面积
Rouse_Direction = scrapy.Field() # 朝向
Floor = scrapy.Field() # 楼层
Decoration = scrapy.Field() # 装修
House_Kind = scrapy.Field() # 房子类型
Community = scrapy.Field() # 小区
House_Equipment = scrapy.Field() # 房间配套
House_Description = scrapy.Field() # 房源概况
Agent_Name = scrapy.Field() # 中介名称
Agent_Level = scrapy.Field() # 中介星级(打败同业人员多少percent)
House_Score = scrapy.Field() # 房源评分
Service_Score = scrapy.Field() # 中介服务评分
Evaluation_Score = scrapy.Field() # 用户评价
Agent_Company = scrapy.Field() # 中介公司
Branch_Office = scrapy.Field() # 所处分公司
Company_License = scrapy.Field() # 公司营业执照号
Publish_Date = scrapy.Field() # 发布时间
Font_Url = scrapy.Field() # 字符替换url
Elevator = scrapy.Field() # 有无电梯
Subway = scrapy.Field() # 地铁
District = scrapy.Field() # 城市区域
pass
| 37.585366
| 63
| 0.5756
|
e7a912cb274237c7758bb4d36156942f1cda107f
| 149
|
py
|
Python
|
mypower/matpower_ported/most/lib/t/test_most.py
|
yasirroni/mypower
|
123c2d3380bf5f753a479c35e7b5cbafc82a8ebc
|
[
"Apache-2.0"
] | 2
|
2020-08-08T15:13:49.000Z
|
2021-01-04T07:21:29.000Z
|
mypower/matpower_ported/most/lib/t/test_most.py
|
yasirroni/mypower
|
123c2d3380bf5f753a479c35e7b5cbafc82a8ebc
|
[
"Apache-2.0"
] | null | null | null |
mypower/matpower_ported/most/lib/t/test_most.py
|
yasirroni/mypower
|
123c2d3380bf5f753a479c35e7b5cbafc82a8ebc
|
[
"Apache-2.0"
] | 1
|
2020-08-08T15:14:17.000Z
|
2020-08-08T15:14:17.000Z
|
def test_most(*args,nout=1,oc=None):
if oc == None:
from .....oc_api import oc_matpower
oc = oc_matpower()
return oc.test_most(*args,nout=nout)
| 24.833333
| 37
| 0.697987
|
5e6153f130a7b9bde23586c05ceef1cf7ce241c1
| 3,785
|
py
|
Python
|
unterwegs/utils/pages.py
|
mountain/unterwegs
|
fe84ef366b278382f7589fc21d3442ffd9db530f
|
[
"MIT"
] | 7
|
2020-11-03T11:28:22.000Z
|
2021-01-11T04:10:55.000Z
|
unterwegs/utils/pages.py
|
mountain/unterwegs
|
fe84ef366b278382f7589fc21d3442ffd9db530f
|
[
"MIT"
] | 1
|
2020-11-06T01:59:36.000Z
|
2021-04-29T13:51:56.000Z
|
unterwegs/utils/pages.py
|
mountain/unterwegs
|
fe84ef366b278382f7589fc21d3442ffd9db530f
|
[
"MIT"
] | null | null | null |
import orjson as json
from unterwegs.utils.db import ts, rd, rn, rc
from zlib import decompress, compress
def search_result(q):
ckey = 'search:%s' % q
result = rc.get(ckey)
if result is not None:
result = json.loads(decompress(result))
else:
result = ts.collections['pages'].documents.search({
'q': q,
'per_page': 200,
'query_by': 'content',
'sort_by': '_text_match:desc',
'include_fields': 'id,article',
'drop_tokens_threshold': 0,
'typo_tokens_threshold': 0,
'highlight_affix_num_tokens': 50,
})
rc.set(ckey, compress(json.dumps(result)))
rc.expire(ckey, 3600)
return result
def frquency_analyze(q, hits):
ckey = 'frequency:%s' % q
result = rc.get(ckey)
if result is not None:
result = json.loads(decompress(result))
else:
nkey = 'freq:%s' % q
keys = ['bow:%s' % doc['document']['id'] for doc in hits]
rn.zunionstore(nkey, keys, 'SUM')
rn.expire(nkey, 3600)
freq = rn.zrange(nkey, 0, -1, desc=True, withscores=True)
total = sum([v for k, v in freq])
result = [{"index": ix, "term": f[0].decode('utf-8'), "total": f[1] / total} for ix, f in enumerate(freq) if ix < 300]
rc.set(ckey, compress(json.dumps(result)))
rc.expire(ckey, 3600)
return result
def frequency_of(q, pid, hits):
ckey = 'bagofwords:%s:%s' % (q, pid)
result = rc.get(ckey)
if result is not None:
result = json.loads(decompress(result))
else:
nkey = 'bow:%s' % pid
freq = rn.zrange(nkey, 0, -1, desc=True, withscores=True)
total = sum([v for k, v in freq])
freq = {k.decode('utf-8'): v / total for k, v in freq}
base = frquency_analyze(q, hits)
result = []
for item in base:
ix, term, total = item['index'], item['term'], item['total']
if term in freq:
result.append({"index": ix, "term": term, "total": total, "page": freq[term]})
rc.set(ckey, compress(json.dumps(result)))
rc.expire(ckey, 3600)
return result
def coocurrence_nodes(q, hits):
ckey = 'nodes:%s' % q
result = rc.get(ckey)
if result is not None:
result = json.loads(decompress(result))
else:
result = []
for ix, hit in enumerate(hits):
pid = hit['document']['id']
fid = rd.get('articleOf:page:%s' % pid)
fid = fid.decode('utf-8') if fid else 'None'
result.append({"name": pid, "group": fid, "index": ix, 'highlight': hit['highlights'][0]['snippet']})
rc.set(ckey, compress(json.dumps(result)))
rc.expire(ckey, 3600)
return result
def coocurrence_links(q, hits):
ckey = 'links:%s' % q
result = rc.get(ckey)
if result is not None:
result = json.loads(decompress(result))
else:
result = []
total, avg = 0, 0
for ix, ih in enumerate(hits):
src = ih['document']['id']
for jx, jh in enumerate(hits):
tgt = jh['document']['id']
if ix != jx:
skey, tkey, dkey = 'bow:%s' % src, 'bow:%s' % tgt, 'coocur:%s:%s' % (src, tgt)
if not rn.exists(dkey):
cnt = rn.zinterstore(dkey, [skey, tkey], 'MIN')
rn.expire(dkey, 3600 * 24 * 7)
else:
cnt = rn.zcard(dkey)
if cnt > 0:
total += cnt
result.append({"source": ix, "target": jx, "value": int(cnt)})
rc.set(ckey, compress(json.dumps(result)))
rc.expire(ckey, 3600)
return result
| 33.201754
| 126
| 0.523118
|
74ec62f02b7abe41ce56cce60ee3184b3163d19f
| 2,530
|
py
|
Python
|
app/recipe/tests/test_tags_api.py
|
bl4ck4ndbr0wn/recipe-api
|
0ff03ea2c62a6ec47396c3aaccb25279b4375edf
|
[
"MIT"
] | null | null | null |
app/recipe/tests/test_tags_api.py
|
bl4ck4ndbr0wn/recipe-api
|
0ff03ea2c62a6ec47396c3aaccb25279b4375edf
|
[
"MIT"
] | null | null | null |
app/recipe/tests/test_tags_api.py
|
bl4ck4ndbr0wn/recipe-api
|
0ff03ea2c62a6ec47396c3aaccb25279b4375edf
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag
from recipe.serializers import TagSerializer
TAGS_URL = reverse('recipe:tag-list')
class PublicTagsApiTests(TestCase):
"""Test thae publicly available tags API"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test tha login is required for retrieving tags"""
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTests(TestCase):
"""Test the authorized tags API"""
def setUp(self):
self.user = get_user_model().objects.create_user(
'test@gmail.com',
'testPass'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_retieve_tags(self):
"""Test retrieve tags"""
Tag.objects.create(user=self.user, name='Vegan')
Tag.objects.create(user=self.user, name='Dessert')
res = self.client.get(TAGS_URL)
tags = Tag.objects.all().order_by('-name')
serializer = TagSerializer(tags, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_tags_limited_to_user(self):
"""Test that that tag returned are for the authenticated user"""
user2 = get_user_model().objects.create_user(
'other@gmail.com',
'testPass'
)
Tag.objects.create(user=user2, name='Fruity')
tag = Tag.objects.create(user=self.user, name='Comfort Food')
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], tag.name)
def test_create_tag_successfull(self):
"""test create a new Tag"""
payload = {"name": "test tag"}
self.client.post(TAGS_URL, payload)
exist = Tag.objects.filter(
user = self.user,
name = payload['name']
).exists()
self.assertTrue(exist)
def test_create_tag_invalid(self):
"""Test creating anew tag with invalid payload"""
payload = {'name':''}
res = self.client.post(TAGS_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
| 29.418605
| 72
| 0.651383
|
d100fa9a32b5e8b1cc478e2734d1a018dfa2831f
| 181
|
py
|
Python
|
blog/urls.py
|
Ellena45/template-python-django
|
87819a83ba256f9277610df4b878bd73ae52febc
|
[
"MIT"
] | null | null | null |
blog/urls.py
|
Ellena45/template-python-django
|
87819a83ba256f9277610df4b878bd73ae52febc
|
[
"MIT"
] | null | null | null |
blog/urls.py
|
Ellena45/template-python-django
|
87819a83ba256f9277610df4b878bd73ae52febc
|
[
"MIT"
] | null | null | null |
from django.urls import path
from .import views
app_name='blog'
urlpatterns = [
path('',views.index, name='index'),
path('timeline/',views.timeline_view,name='timeline'),
]
| 22.625
| 58
| 0.701657
|
ce1bf58a602fe07f8df73be8fbe01c8b343c569d
| 4,869
|
py
|
Python
|
built-in/TensorFlow/Research/reinforcement-learning/ModelZoo_PPO_TensorFlow/rl/xt/environment/dst/digital_sky.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
built-in/TensorFlow/Research/reinforcement-learning/ModelZoo_PPO_TensorFlow/rl/xt/environment/dst/digital_sky.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | 3
|
2021-03-31T20:15:40.000Z
|
2022-02-09T23:50:46.000Z
|
built-in/TensorFlow/Research/reinforcement-learning/ModelZoo_PPO_TensorFlow/rl/xt/environment/dst/digital_sky.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
"""
digital sky game environment
"""
import numpy as np
from gym import spaces
from xt.environment.dst.external_env import ExternalEnv
from xt.environment.dst.state_transform import get_preprocessor
from xt.framework.register import Registers
observation_space = spaces.Dict({
"self_health": spaces.Box(0, 1000, (1, )),
"self_shield": spaces.Box(0, 100, (1, )),
"self_shield_cd": spaces.Box(0, 100, (1, )),
"self_shield_state": spaces.Box(0, 10, (4, )),
"self_parry_range": spaces.Box(0, 100, (1, )),
"self_x": spaces.Box(-np.inf, np.inf, (1, )),
"self_y": spaces.Box(-np.inf, np.inf, (1, )),
"self_z": spaces.Box(-np.inf, np.inf, (1, )),
"self_heading_x": spaces.Box(-np.inf, np.inf, (1, )),
"self_heading_y": spaces.Box(-np.inf, np.inf, (1, )),
"self_heading_z": spaces.Box(-np.inf, np.inf, (1, )),
"self_state": spaces.Discrete(101),
"self_CurrentHurtCount": spaces.Box(0, 100, (1, )),
"self_MaxHurtCount": spaces.Box(0, 100, (1, )),
"self_CurrentParryCountInDefence": spaces.Box(0, 100, (1, )),
"self_ParryCountInDefence": spaces.Box(0, 100, (1, )),
"self_Teleport_cd": spaces.Box(0, 100, (1, )),
"self_in_air": spaces.Discrete(2),
"opponent_health": spaces.Box(0, 1000, (1, )),
"opponent_shield": spaces.Box(0, 100, (1, )),
"opponent_shield_cd": spaces.Box(0, 100, (1, )),
"opponent_shield_state": spaces.Box(0, 10, (4, )),
"opponent_parry_range": spaces.Box(0, 100, (1, )),
"opponent_x": spaces.Box(-np.inf, np.inf, (1, )),
"opponent_y": spaces.Box(-np.inf, np.inf, (1, )),
"opponent_z": spaces.Box(-np.inf, np.inf, (1, )),
"opponent_heading_x": spaces.Box(-np.inf, np.inf, (1, )),
"opponent_heading_y": spaces.Box(-np.inf, np.inf, (1, )),
"opponent_heading_z": spaces.Box(-np.inf, np.inf, (1, )),
"opponent_state": spaces.Discrete(101),
"opponent_CurrentHurtCount": spaces.Box(0, 200, (1, )),
"opponent_MaxHurtCount": spaces.Box(0, 100, (1, )),
"opponent_CurrentParryCountInDefence": spaces.Box(0, 100, (1, )),
"opponent_ParryCountInDefence": spaces.Box(0, 100, (1, )),
"opponent_Teleport_cd": spaces.Box(0, 100, (1, )),
"opponent_in_air": spaces.Discrete(2),
})
@Registers.env
class DigitalSky(ExternalEnv):
"""
DigitalSky server class
"""
def transfer_state(self, state, *args):
"""
transform state
"""
state_dict = {}
state_dict["self_health"] = np.array([state[0]])
state_dict["self_shield"] = np.array([state[1]])
state_dict["self_shield_cd"] = np.array([state[2]])
state_dict["self_shield_state"] = np.array(state[3:7])
state_dict["self_parry_range"] = np.array([state[7]])
state_dict["self_x"] = np.array([state[8]])
state_dict["self_y"] = np.array([state[9]])
state_dict["self_z"] = np.array([state[10]])
state_dict["self_heading_x"] = np.array([state[11]])
state_dict["self_heading_y"] = np.array([state[12]])
state_dict["self_heading_z"] = np.array([state[13]])
state_dict["self_state"] = np.array(state[14])
state_dict["self_CurrentHurtCount"] = np.array([state[15]])
state_dict["self_MaxHurtCount"] = np.array([state[16]])
state_dict["self_CurrentParryCountInDefence"] = np.array([state[17]])
state_dict["self_ParryCountInDefence"] = np.array([state[18]])
state_dict["self_Teleport_cd"] = np.array([state[19]])
state_dict["self_in_air"] = np.array(int(state[20] == 'true'))
state_dict["opponent_health"] = np.array([state[21]])
state_dict["opponent_shield"] = np.array([state[22]])
state_dict["opponent_shield_cd"] = np.array([state[23]])
state_dict["opponent_shield_state"] = np.array(state[24:28])
state_dict["opponent_parry_range"] = np.array([state[28]])
state_dict["opponent_x"] = np.array([state[29]])
state_dict["opponent_y"] = np.array([state[30]])
state_dict["opponent_z"] = np.array([state[31]])
state_dict["opponent_heading_x"] = np.array([state[32]])
state_dict["opponent_heading_y"] = np.array([state[33]])
state_dict["opponent_heading_z"] = np.array([state[34]])
state_dict["opponent_state"] = np.array(state[35])
state_dict["opponent_CurrentHurtCount"] = np.array([state[36]])
state_dict["opponent_MaxHurtCount"] = np.array([state[37]])
state_dict["opponent_CurrentParryCountInDefence"] = np.array([state[38]])
state_dict["opponent_ParryCountInDefence"] = np.array([state[39]])
state_dict["opponent_Teleport_cd"] = np.array([state[40]])
state_dict["opponent_in_air"] = np.array(int(state[41] == 'true'))
processor = get_preprocessor(observation_space)(observation_space)
state = processor.transform(state_dict)
return state
| 47.735294
| 81
| 0.633395
|
09119ac8c2c3831ba156ab2e3bb2b28ab116c6d1
| 5,683
|
py
|
Python
|
comp0037_explorer/src/comp0037_explorer/explorer_node_WFD_base.py
|
Yun5141/0037-assignment2
|
b95627e181ee4d46ec6193aad950094de0628de4
|
[
"BSD-3-Clause"
] | null | null | null |
comp0037_explorer/src/comp0037_explorer/explorer_node_WFD_base.py
|
Yun5141/0037-assignment2
|
b95627e181ee4d46ec6193aad950094de0628de4
|
[
"BSD-3-Clause"
] | null | null | null |
comp0037_explorer/src/comp0037_explorer/explorer_node_WFD_base.py
|
Yun5141/0037-assignment2
|
b95627e181ee4d46ec6193aad950094de0628de4
|
[
"BSD-3-Clause"
] | null | null | null |
import rospy
from explorer_node_base import ExplorerNodeBase
from nav_msgs.msg import Odometry
# Part 2.3
# This class is a base class implementing the wave front detection
# refering the seudocode in the suggested paper 'Frontier Based Exploration for Autonomous Robot'
class ExplorerNodeWFDBase(ExplorerNodeBase):
def __init__(self):
self.blackList = []
# to get self position for searching the frontiers
self.searchStartCell = None # search start cell / current cell coords
self.position = None
# for wave frontier detection
self.frontierList = []
self.initFrontierInfo()
ExplorerNodeBase.__init__(self) # not sure but has to be put here, or it can't find the above two attributes
self.current_pose_subscriber = rospy.Subscriber('/robot0/odom', Odometry, self.current_pose_callback)
# pose callback to get self cell position
def current_pose_callback(self, data):
pose = data.pose.pose
pos = pose.position
try:
self.occupancyGrid
except AttributesError:
return
position = self.occupancyGrid.getCellCoordinatesFromWorldCoordinates((pos.x,pos.y))
self.position = position
# if a goal is found unreachable, add it to the blacklist
def destinationReached(self, goal, goalReached):
if goalReached is False:
# print 'Adding ' + str(goal) + ' to the naughty step'
self.blackList.append(goal)
# ------------------------------------
def initFrontierInfo(self):
rospy.loginfo("Clearing old frontier info")
self.frontierList = []
def isInBoundary(self, cell):
width, height = self.occupancyGrid.getWidthInCells(), self.occupancyGrid.getHeightInCells()
return cell[0] in range(0,width) and cell[1] in range(0,height)
def isEmptyCell(self, cell):
return self.occupancyGrid.getCell(cell[0], cell[1]) == 0.0
def getNeighbours(self, centerCell):
offset = [-1,0,1]
offset2 = [[offsetX,offsetY] for offsetX in offset for offsetY in offset]
l = [[centerCell[0] + offsetX, centerCell[1] + offsetY] \
for offsetX, offsetY in offset2 \
if not [offsetX, offsetY] == [0,0]]
l = filter(lambda x:self.isInBoundary(x),l)
return l
def hasAtLeastOneOpenNeighbours(self, cell):
for neighbours in self.getNeighbours(cell):
if self.isInBoundary(neighbours) and self.isEmptyCell(neighbours):
return True
# breadth first search, starting with self position
def searchFrontiers(self,searchStartCell,frontierList):
currentCell = searchStartCell
waitingList = [searchStartCell]
visitedList = []
while len(waitingList) != 0:
currentCell = waitingList.pop(0)
if currentCell in visitedList or currentCell in self.blackList:
continue
if self.isFrontierCell(currentCell[0], currentCell[1]):
currentPotentialFrontier = currentCell
waitingPotentialFrontierList = [currentCell]
while len(waitingPotentialFrontierList) != 0:
currentPotentialFrontier = waitingPotentialFrontierList.pop(0)
if currentPotentialFrontier in visitedList or currentPotentialFrontier in self.blackList:
continue
if self.isFrontierCell(currentPotentialFrontier[0], currentPotentialFrontier[1]):
frontierList.append(currentPotentialFrontier)
for neighbours in self.getNeighbours(currentPotentialFrontier):
if neighbours not in visitedList:
waitingPotentialFrontierList.append(neighbours)
visitedList.append(currentPotentialFrontier)
for neighbours in self.getNeighbours(currentCell):
if neighbours not in waitingList and neighbours not in visitedList and self.hasAtLeastOneOpenNeighbours(neighbours):
waitingList.append(neighbours)
visitedList.append(currentCell)
return frontierList
def getArbitraryFreeCell(self):
rospy.loginfo("initial search start cell is None\n")
width, height = self.occupancyGrid.getWidthInCells(), \
self.occupancyGrid.getHeightInCells()
cellMatrix = [[x,y] for x in range(0, width) for y in range(0, height)]
return filter(lambda cell : self.isEmptyCell(cell), cellMatrix)[0]
def checkSelfPosition(self):
if not self.searchStartCell:
self.searchStartCell = self.getArbitraryFreeCell()
if not self.position:
self.position = self.searchStartCell
return self.position
# wave front detection, using the current self cell position as the search start cell
def updateFrontiers(self):
rospy.loginfo("Update frontiers")
rospy.loginfo("clearing old info")
self.initFrontierInfo()
searchStartCell = self.checkSelfPosition()
rospy.loginfo("search start cell: (%d, %d)\n",searchStartCell[0],searchStartCell[1])
frontierList = self.searchFrontiers(searchStartCell, [])
if len(frontierList) != 0:
# remove unwanted entry
self.frontierList = filter(lambda x : x not in self.blackList, frontierList)
return True
else:
return False
| 36.664516
| 132
| 0.626782
|
e11d7b213469d833c5031bbadcb293907ca8d01e
| 3,390
|
py
|
Python
|
django/tiantian/tiantian/settings.py
|
zhang15780/web_project
|
820708ae68f4d1bc06cdde4a86e40a5457c11df8
|
[
"Apache-2.0"
] | null | null | null |
django/tiantian/tiantian/settings.py
|
zhang15780/web_project
|
820708ae68f4d1bc06cdde4a86e40a5457c11df8
|
[
"Apache-2.0"
] | null | null | null |
django/tiantian/tiantian/settings.py
|
zhang15780/web_project
|
820708ae68f4d1bc06cdde4a86e40a5457c11df8
|
[
"Apache-2.0"
] | null | null | null |
"""
Django settings for tiantian project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'e1wy)!ga02f#fd^=%8drrc*dhfy%*02=01jv86*=jjd=k7w(5m'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'goods',
'carts',
'users',
'order',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'utils.usermiddleware.AuthMiddleware',
]
ROOT_URLCONF = 'tiantian.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tiantian.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'tiantian',
'HOST': '47.106.81.203',
'USER': 'root',
'PASSWORD': 'admin@123',
'PORT': '3306',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
| 26.904762
| 91
| 0.682596
|
4a111e481264ea2f2f7c1097aa1a982cdab129e2
| 473
|
py
|
Python
|
platform/core/polyaxon/administration/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/administration/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/administration/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
from hestia.service_interface import LazyServiceWrapper
from django.conf import settings
from administration.service import AdminService
def get_admin_backend():
return settings.ADMIN_BACKEND or 'administration.service.AdminService'
def get_admin_options():
return {'models': settings.ADMIN_MODELS}
backend = LazyServiceWrapper(
backend_base=AdminService,
backend_path=get_admin_backend(),
options=get_admin_options()
)
backend.expose(locals())
| 21.5
| 74
| 0.79704
|
422e7024e12141476d31715f4b192b7dc6feab18
| 6,253
|
py
|
Python
|
tests/test_routes_artist.py
|
purwin/Parks-Database
|
98cb06dbfacf73c300f32d55f0872fb63ff4a906
|
[
"MIT"
] | null | null | null |
tests/test_routes_artist.py
|
purwin/Parks-Database
|
98cb06dbfacf73c300f32d55f0872fb63ff4a906
|
[
"MIT"
] | 2
|
2021-03-09T19:47:01.000Z
|
2022-02-10T19:41:33.000Z
|
tests/test_routes_artist.py
|
purwin/Parks-Database
|
98cb06dbfacf73c300f32d55f0872fb63ff4a906
|
[
"MIT"
] | null | null | null |
import unittest
from flask import request
from app import db
from app.parks_db import Artist
from base import BaseTests
class TestRoutesArtist(BaseTests):
default_artist = dict(
pName='Person',
fName='Cool',
email='cool_person@website.com',
phone='555-345-5678',
website='www.party.com'
)
@staticmethod
def create_artist(**kwargs):
"""
Static method to add artist class object to database
Takes the following string args: pName, fName, email, phone, website
Adds class to Artist database, commits session, and flushes to get id val
Returns the created class instance
"""
artist = Artist(**kwargs)
db.session.add(artist)
db.session.commit()
db.session.flush()
return artist
# Test artists page not logged in
def test_invalid_artists_not_logged_in(self):
with self.app:
response = self.app.get('/artists', follow_redirects=True)
req = request.url
self.assertIn(b'/login', req)
self.assertEqual(response.status_code, 200)
# Test artists page logged in
def test_valid_artists_logged_in(self):
with self.app as c:
with c.session_transaction() as sess:
sess['url'] = '/'
self.login()
response = self.app.get('/artists', follow_redirects=True)
req = request.url
self.assertIn(b'/artists', req)
self.assertEqual(response.status_code, 200)
# Test artist page not logged in
def test_invalid_artist_not_logged_in(self):
artist = self.default_artist
# Add artist to database
self.create_artist(**artist)
with self.app:
response = self.app.get('/artists/1', follow_redirects=True)
req = request.url
self.assertIn(b'/login', req)
self.assertEqual(response.status_code, 200)
# Test artist page logged in
def test_valid_artist_logged_in(self):
artist = self.default_artist
# Add artist to database
self.create_artist(**artist)
with self.app as c:
with c.session_transaction() as sess:
sess['url'] = '/'
self.login()
response = self.app.get('/artists/1', follow_redirects=True)
req = request.url
self.assertIn(b'/artists/1', req)
self.assertEqual(response.status_code, 200)
# Test artist page with no artists
def test_invalid_artist_no_artists(self):
with self.app as c:
with c.session_transaction() as sess:
sess['url'] = '/'
self.login()
response = self.app.get('/artists/1', follow_redirects=True)
req = request.url
self.assertIn(b'/artists/1', req)
self.assertEqual(response.status_code, 404)
# Test GET artist CREATE page
def test_invalid_artist_create_get(self):
with self.app:
response = self.app.get('/artists/create', follow_redirects=True)
self.assertIn('Method Not Allowed', response.data)
self.assertEqual(response.status_code, 405)
# Test artist CREATE page logged in
def test_valid_artist_create_post(self):
artist = self.default_artist
with self.app as c:
with c.session_transaction() as sess:
sess['url'] = '/'
self.login()
response = self.app.post(
'/artists/create',
data=artist,
follow_redirects=True
)
self.assertIn('"success": true', response.data)
self.assertEqual(response.status_code, 200)
# Test artist CREATE page not logged in
def test_invalid_artist_create_post(self):
artist = self.default_artist
with self.app as c:
response = self.app.post(
'/artists/create',
data=artist,
follow_redirects=True
)
req = request.url
self.assertIn(b'/login', req)
self.assertEqual(response.status_code, 200)
# Test POST artist EDIT page logged in
def test_valid_artist_edit_post(self):
artist = self.default_artist
new_artist_fName = 'Cooler'
# Add artist to database
self.create_artist(**artist)
with self.app as c:
with c.session_transaction() as sess:
sess['url'] = '/'
self.login()
response = self.app.post(
'/artists/1/edit',
data=dict(
fName=new_artist_fName,
pName=artist['pName'],
email=artist['email'],
phone=artist['phone'],
website=artist['website']
),
follow_redirects=True
)
self.assertIn('"success": true', response.data)
self.assertIn(new_artist_fName, response.data)
self.assertEqual(response.status_code, 200)
# Test POST artist EDIT page not logged in
def test_invalid_artist_edit_post(self):
artist = self.default_artist
new_artist_fName = 'Cooler'
# Add artist to database
self.create_artist(**artist)
with self.app as c:
response = self.app.post(
'/artists/1/edit',
data=dict(
fName=new_artist_fName,
pName=artist['pName'],
email=artist['email'],
phone=artist['phone'],
website=artist['website']
),
follow_redirects=True
)
req = request.url
self.assertIn(b'/login', req)
self.assertEqual(response.status_code, 200)
# Test artist DELETE page logged in
def test_valid_artist_delete_post(self):
artist = self.default_artist
# Add artist to database
self.create_artist(**artist)
with self.app as c:
with c.session_transaction() as sess:
sess['url'] = '/'
self.login()
response = self.app.post(
'/artists/1/delete',
follow_redirects=True
)
req = request.url
retry = self.app.get(
'/artists/1',
follow_redirects=True
)
self.assertIn('/artists', req)
self.assertEqual(response.status_code, 200)
self.assertEqual(retry.status_code, 404)
# Test artist DELETE page not logged in
def test_invalid_artist_delete_post(self):
artist = self.default_artist
# Add artist to database
self.create_artist(**artist)
with self.app as c:
response = self.app.post(
'/artists/1/delete',
follow_redirects=True
)
req = request.url
self.assertIn(b'/login', req)
self.assertEqual(response.status_code, 200)
| 25.73251
| 77
| 0.641132
|
aea4b7b422ea022c7bda6c88084cad070e832762
| 1,134
|
py
|
Python
|
samples/openapi3/client/features/dynamic-servers/python-experimental/setup.py
|
therockstorm/openapi-generator
|
01d0b5d4780ebe2d6025e2b443ec136c6ce16c45
|
[
"Apache-2.0"
] | 3
|
2021-04-09T01:04:32.000Z
|
2022-02-02T11:02:22.000Z
|
samples/openapi3/client/features/dynamic-servers/python-experimental/setup.py
|
therockstorm/openapi-generator
|
01d0b5d4780ebe2d6025e2b443ec136c6ce16c45
|
[
"Apache-2.0"
] | 10
|
2021-03-09T14:12:46.000Z
|
2022-02-27T11:42:16.000Z
|
samples/openapi3/client/features/dynamic-servers/python-experimental/setup.py
|
therockstorm/openapi-generator
|
01d0b5d4780ebe2d6025e2b443ec136c6ce16c45
|
[
"Apache-2.0"
] | 5
|
2020-11-26T05:13:41.000Z
|
2021-04-09T15:58:18.000Z
|
# coding: utf-8
"""
OpenAPI Extension with dynamic servers
This specification shows how to use dynamic servers. # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from setuptools import setup, find_packages # noqa: H301
NAME = "dynamic-servers"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = [
"urllib3 >= 1.15",
"certifi",
"python-dateutil",
"nulltype",
]
setup(
name=NAME,
version=VERSION,
description="OpenAPI Extension with dynamic servers",
author="OpenAPI Generator community",
author_email="team@openapitools.org",
url="",
keywords=["OpenAPI", "OpenAPI-Generator", "OpenAPI Extension with dynamic servers"],
python_requires=">=3.5",
install_requires=REQUIRES,
packages=find_packages(exclude=["test", "tests"]),
include_package_data=True,
license="Apache-2.0",
long_description="""\
This specification shows how to use dynamic servers. # noqa: E501
"""
)
| 23.625
| 88
| 0.687831
|
8be087faead54fa09f244c7fca0b6fef62da54d5
| 5,604
|
py
|
Python
|
test/test_simulate.py
|
bmkramer/automated-systematic-review
|
f99079926f381bc7895ff6fefa9e6e729a2c26b8
|
[
"Apache-2.0"
] | 1
|
2021-01-22T15:18:33.000Z
|
2021-01-22T15:18:33.000Z
|
test/test_simulate.py
|
bmkramer/automated-systematic-review
|
f99079926f381bc7895ff6fefa9e6e729a2c26b8
|
[
"Apache-2.0"
] | null | null | null |
test/test_simulate.py
|
bmkramer/automated-systematic-review
|
f99079926f381bc7895ff6fefa9e6e729a2c26b8
|
[
"Apache-2.0"
] | null | null | null |
import os
from shutil import copyfile
import numpy as np
from asreview.logging import open_logger
from asreview.review.factory import get_reviewer
data_fp = os.path.join("test", "demo_data", "generic_labels.csv")
embedding_fp = os.path.join("test", "demo_data", "generic.vec")
cfg_dir = os.path.join("test", "cfg_files")
log_dir = os.path.join("test", "log_files")
h5_log_file = os.path.join(log_dir, "test.h5")
json_log_file = os.path.join(log_dir, "test.json")
def test_log_continue_json():
inter_file = os.path.join(log_dir, "test_1_inst.json")
if not os.path.isfile(inter_file):
reviewer = get_reviewer(
data_fp, mode="simulate", model="nb", embedding_fp=embedding_fp,
prior_included=[1, 3], prior_excluded=[2, 4], log_file=inter_file,
n_instances=1, n_queries=1)
reviewer.review()
copyfile(inter_file, json_log_file)
check_model(mode="simulate", model="nb", log_file=json_log_file,
continue_from_log=True, n_instances=1, n_queries=2)
def test_log_continue_h5():
inter_file = os.path.join(log_dir, "test_1_inst.h5")
if not os.path.isfile(inter_file):
reviewer = get_reviewer(
data_fp, mode="simulate", model="nb", embedding_fp=embedding_fp,
prior_included=[1, 3], prior_excluded=[2, 4], log_file=inter_file,
n_instances=1, n_queries=1)
reviewer.review()
copyfile(inter_file, h5_log_file)
check_model(mode="simulate", model="nb", log_file=h5_log_file,
continue_from_log=True, n_instances=1, n_queries=2)
def test_lstm_base():
check_model(mode="simulate",
config_file=os.path.join(cfg_dir, "lstm_base.ini"),
log_file=h5_log_file)
def test_lstm_pool():
check_model(mode="simulate",
config_file=os.path.join(cfg_dir, "lstm_pool.ini"),
log_file=json_log_file)
def test_nb():
check_model(mode="simulate",
model="nb",
log_file=None,
use_granular=True,
n_instances=1, n_queries=1)
def test_svm():
check_model(mode="simulate",
model="svm",
log_file=json_log_file,
use_granular=False,
n_instances=1, n_queries=2)
def test_rf():
check_model(mode="simulate",
model="rf",
log_file=json_log_file,
use_granular=False,
n_instances=1, n_queries=2)
def test_nn_2_layer():
check_model(mode="simulate",
model="nn-2-layer",
log_file=json_log_file,
n_instances=1, n_queries=2)
def test_logistic():
check_model(mode="simulate",
model="logistic",
log_file=json_log_file,
n_instances=1, n_queries=2)
def check_label_methods(label_methods, n_labels, methods):
assert len(label_methods) == n_labels
for method in label_methods:
assert method in methods
def check_log(logger):
check_label_methods(logger.get("label_methods", 0), 4, ["initial"])
check_label_methods(logger.get("label_methods", 1), 1, ["max", "random"])
check_label_methods(logger.get("label_methods", 2), 1, ["max", "random"])
assert len(logger.get("inclusions", 0)) == 4
assert len(logger.get("inclusions", 1)) == 1
assert len(logger.get("inclusions", 2)) == 1
assert len(logger.get("train_idx", 1)) == 4
assert len(logger.get("pool_idx", 1)) == 2
assert len(logger.get("train_idx", 2)) == 5
assert len(logger.get("pool_idx", 2)) == 1
assert len(logger.get("labels")) == 6
def check_model(monkeypatch=None, use_granular=False, log_file=h5_log_file,
continue_from_log=False, mode="oracle", **kwargs):
if not continue_from_log:
try:
if log_file is not None:
os.unlink(log_file)
except OSError:
pass
if monkeypatch is not None:
monkeypatch.setattr('builtins.input', lambda _: "0")
# start the review process.
reviewer = get_reviewer(data_fp, mode=mode, embedding_fp=embedding_fp,
prior_included=[1, 3], prior_excluded=[2, 4],
log_file=log_file,
**kwargs)
if use_granular:
with open_logger(log_file) as logger:
# Two loops of training and classification.
reviewer.train()
reviewer.log_probabilities(logger)
query_idx = reviewer.query(1)
inclusions = reviewer._get_labels(query_idx)
reviewer.classify(query_idx, inclusions, logger)
reviewer.train()
reviewer.log_probabilities(logger)
query_idx = reviewer.query(1)
inclusions = reviewer._get_labels(query_idx)
reviewer.classify(query_idx, inclusions, logger)
else:
with open_logger(log_file) as logger:
if log_file is None:
logger.set_labels(reviewer.y)
init_idx, init_labels = reviewer._prior_knowledge()
reviewer.query_i = 0
reviewer.train_idx = np.array([], dtype=np.int)
reviewer.classify(init_idx, init_labels, logger,
method="initial")
reviewer._do_review(logger)
if log_file is None:
print(logger._log_dict)
check_log(logger)
if log_file is not None:
with open_logger(log_file, read_only=True) as logger:
check_log(logger)
| 33.556886
| 78
| 0.611171
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.