hexsha
stringlengths
40
40
size
int64
7
1.04M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
247
max_stars_repo_name
stringlengths
4
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
368k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
247
max_issues_repo_name
stringlengths
4
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
247
max_forks_repo_name
stringlengths
4
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
1
1.04M
avg_line_length
float64
1.77
618k
max_line_length
int64
1
1.02M
alphanum_fraction
float64
0
1
original_content
stringlengths
7
1.04M
filtered:remove_function_no_docstring
int64
-102
942k
filtered:remove_class_no_docstring
int64
-354
977k
filtered:remove_delete_markers
int64
0
60.1k
9d9dad60fb7a4a12ac3627ec22f153d2fd0908f8
789
py
Python
v32_3/app/route/route.py
thangpxph/python_training
e05eceba9b39dd0b23076be1eb3b85dee24fcdaa
[ "MIT" ]
null
null
null
v32_3/app/route/route.py
thangpxph/python_training
e05eceba9b39dd0b23076be1eb3b85dee24fcdaa
[ "MIT" ]
null
null
null
v32_3/app/route/route.py
thangpxph/python_training
e05eceba9b39dd0b23076be1eb3b85dee24fcdaa
[ "MIT" ]
null
null
null
from flask import Blueprint, request as flask_request, jsonify from cartmigration.libs.utils import * route_path = Blueprint('route_path', __name__) @route_path.route("/action/<string:method>", methods = ['post'])
30.346154
64
0.730038
from flask import Blueprint, request as flask_request, jsonify from cartmigration.libs.utils import * route_path = Blueprint('route_path', __name__) @route_path.route("/action/<string:method>", methods = ['post']) def action(method): request_data = flask_request.data if isinstance(request_data, bytes): request_data = request_data.decode() request_data = json_decode(request_data) buffer = dict() if not buffer.get('controller'): buffer['controller'] = 'migration' buffer['action'] = to_str(method).replace('-', '_') buffer['data'] = request_data res = start_subprocess(None, buffer, True) if isinstance(res, dict) and 'next' in res: migration_id = res['next'].get('migration_id') start_subprocess(migration_id, res['next']) del (res['next']) return jsonify(res)
549
0
22
70567d2c43a47fc18436aa805eb67644efa687b1
26,350
py
Python
tensorflow_federated/python/simulation/training_loop_test.py
alessiomora/federated
3b501067ed7062aaec3cc8830aaec0a7cf8f0942
[ "Apache-2.0" ]
1
2021-05-10T10:49:34.000Z
2021-05-10T10:49:34.000Z
tensorflow_federated/python/simulation/training_loop_test.py
alessiomora/federated
3b501067ed7062aaec3cc8830aaec0a7cf8f0942
[ "Apache-2.0" ]
null
null
null
tensorflow_federated/python/simulation/training_loop_test.py
alessiomora/federated
3b501067ed7062aaec3cc8830aaec0a7cf8f0942
[ "Apache-2.0" ]
null
null
null
# Copyright 2021, The TensorFlow Federated Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import collections from unittest import mock from absl.testing import absltest from absl.testing import parameterized from tensorflow_federated.python.core.templates import iterative_process from tensorflow_federated.python.simulation import checkpoint_manager from tensorflow_federated.python.simulation import metrics_manager from tensorflow_federated.python.simulation import training_loop if __name__ == '__main__': absltest.main()
46.390845
80
0.731917
# Copyright 2021, The TensorFlow Federated Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import collections from unittest import mock from absl.testing import absltest from absl.testing import parameterized from tensorflow_federated.python.core.templates import iterative_process from tensorflow_federated.python.simulation import checkpoint_manager from tensorflow_federated.python.simulation import metrics_manager from tensorflow_federated.python.simulation import training_loop class LoadInitialCheckpointTest(parameterized.TestCase): def test_returns_input_state_and_zero_if_checkpoint_is_none(self): file_checkpoint_manager = mock.create_autospec( checkpoint_manager.FileCheckpointManager) file_checkpoint_manager.load_latest_checkpoint.return_value = (None, 10) input_state = 'input_state' state, round_num = training_loop._load_initial_checkpoint( input_state, file_checkpoint_manager) file_checkpoint_manager.load_latest_checkpoint.assert_called_once_with( input_state) self.assertEqual(input_state, state) self.assertEqual(round_num, 0) @parameterized.named_parameters( ('checkpoint_round_1', 'state', 0), ('checkpoint_round_2', {}, 5), ('checkpoint_round_3', '0.12', 10), ('checkpoint_round_4', 2, 2), ) def test_checkpoint_not_none(self, state, round_num): file_checkpoint_manager = mock.create_autospec( checkpoint_manager.FileCheckpointManager) file_checkpoint_manager.load_latest_checkpoint.return_value = (state, round_num - 1) input_state = 'input_state' actual_state, actual_round = training_loop._load_initial_checkpoint( input_state, file_checkpoint_manager) file_checkpoint_manager.load_latest_checkpoint.assert_called_once_with( input_state) self.assertEqual(actual_state, state) self.assertEqual(actual_round, round_num) class ComputeValidationMetricsTest(absltest.TestCase): def test_validation_function_called_once(self): validation_fn = mock.MagicMock() input_state = 'state' round_num = 0 training_loop._compute_validation_metrics(input_state, round_num, validation_fn) validation_fn.assert_called_once_with(input_state, round_num) def test_runs_with_empty_dict(self): validation_fn = lambda x, y: {} actual_metrics = training_loop._compute_validation_metrics( 'state', 0, validation_fn) self.assertIn(training_loop.VALIDATION_TIME_KEY, actual_metrics.keys()) actual_metrics.pop(training_loop.VALIDATION_TIME_KEY) expected_metrics = {} self.assertDictEqual(actual_metrics, expected_metrics) def test_prefixes_keys_with_validation_string(self): metrics = {'metric_1': 0, 'metric_2': 1.0, 'metric_3': 'metric_3'} validation_fn = lambda x, y: metrics actual_metrics = training_loop._compute_validation_metrics( 'state', 0, validation_fn) self.assertIn(training_loop.VALIDATION_TIME_KEY, actual_metrics.keys()) actual_metrics.pop(training_loop.VALIDATION_TIME_KEY) expected_metrics = {} for (key, value) in metrics.items(): expected_metrics[training_loop.VALIDATION_METRICS_PREFIX + key] = value self.assertDictEqual(actual_metrics, expected_metrics) class BuildOnLoopStartFnTest(parameterized.TestCase): @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._load_initial_checkpoint') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_no_input_args(self, mock_compute_validation, mock_initialize): on_loop_start_fn = training_loop._create_on_loop_start_fn() on_loop_start_input = 'input' actual_state, actual_round = on_loop_start_fn(on_loop_start_input) mock_initialize.assert_not_called() mock_compute_validation.assert_not_called() expected_state = on_loop_start_input expected_round = 1 self.assertEqual(actual_state, expected_state) self.assertEqual(actual_round, expected_round) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._load_initial_checkpoint') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_only_checkpoint_manager_and_zero_checkpoint_round( self, mock_compute_validation, mock_initialize): file_checkpoint_manager = mock.create_autospec( checkpoint_manager.FileCheckpointManager) expected_state = 'state' expected_round = 1 mock_initialize.return_value = (expected_state, expected_round - 1) on_loop_start_fn = training_loop._create_on_loop_start_fn( file_checkpoint_manager=file_checkpoint_manager) on_loop_start_input = 'input' actual_state, actual_round = on_loop_start_fn(on_loop_start_input) mock_initialize.assert_called_once_with(on_loop_start_input, file_checkpoint_manager) mock_compute_validation.assert_not_called() file_checkpoint_manager.save_checkpoint.assert_called_once_with( expected_state, expected_round - 1) self.assertEqual(actual_state, expected_state) self.assertEqual(actual_round, expected_round) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._load_initial_checkpoint') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_only_checkpoint_manager_and_non_zero_checkpoint_round( self, mock_compute_validation, mock_initialize): file_checkpoint_manager = mock.create_autospec( checkpoint_manager.FileCheckpointManager) expected_state = 'state' expected_round = 3 mock_initialize.return_value = (expected_state, expected_round) on_loop_start_fn = training_loop._create_on_loop_start_fn( file_checkpoint_manager=file_checkpoint_manager) on_loop_start_input = 'input' actual_state, actual_round = on_loop_start_fn(on_loop_start_input) mock_initialize.assert_called_once_with(on_loop_start_input, file_checkpoint_manager) mock_compute_validation.assert_not_called() file_checkpoint_manager.save_checkpoint.assert_not_called() self.assertEqual(actual_state, expected_state) self.assertEqual(actual_round, expected_round) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._load_initial_checkpoint') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_only_metrics_managers(self, mock_compute_validation, mock_initialize): metric_manager1 = mock.create_autospec(metrics_manager.MetricsManager) metric_manager2 = mock.create_autospec(metrics_manager.MetricsManager) metrics_managers = [metric_manager1, metric_manager2] on_loop_start_fn = training_loop._create_on_loop_start_fn( metrics_managers=metrics_managers) on_loop_start_input = 'input' actual_state, actual_round = on_loop_start_fn(on_loop_start_input) mock_initialize.assert_not_called() mock_compute_validation.assert_not_called() expected_state = on_loop_start_input expected_round = 1 for metr_mngr in metrics_managers: metr_mngr.clear_metrics.assert_called_once_with(expected_round - 1) metr_mngr.save_metrics.assert_not_called() self.assertEqual(actual_state, expected_state) self.assertEqual(actual_round, expected_round) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._load_initial_checkpoint') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_only_validation_fn(self, mock_compute_validation, mock_initialize): validation_fn = mock.MagicMock() on_loop_start_fn = training_loop._create_on_loop_start_fn( validation_fn=validation_fn) on_loop_start_input = 'input' actual_state, actual_round = on_loop_start_fn(on_loop_start_input) mock_initialize.assert_not_called() expected_state = on_loop_start_input expected_round = 1 mock_compute_validation.assert_called_once_with(expected_state, expected_round - 1, validation_fn) self.assertEqual(actual_state, expected_state) self.assertEqual(actual_round, expected_round) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._load_initial_checkpoint') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_metrics_managers_and_validation_fn( self, mock_compute_validation, mock_initialize): metric_manager1 = mock.create_autospec(metrics_manager.MetricsManager) metric_manager2 = mock.create_autospec(metrics_manager.MetricsManager) metrics_managers = [metric_manager1, metric_manager2] validation_fn = mock.MagicMock() metrics = {'metric1': 2} mock_compute_validation.return_value = metrics on_loop_start_fn = training_loop._create_on_loop_start_fn( metrics_managers=metrics_managers, validation_fn=validation_fn) on_loop_start_input = 'input' actual_state, actual_round = on_loop_start_fn(on_loop_start_input) mock_initialize.assert_not_called() expected_state = on_loop_start_input expected_round = 1 mock_compute_validation.assert_called_once_with(expected_state, expected_round - 1, validation_fn) for metr_mngr in metrics_managers: metr_mngr.clear_metrics.assert_called_once_with(0) metr_mngr.save_metrics.assert_called_once_with(metrics, 0) self.assertEqual(actual_state, expected_state) self.assertEqual(actual_round, expected_round) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._load_initial_checkpoint') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_non_zero_checkpoint_and_validation_fn( self, mock_compute_validation, mock_initialize): file_checkpoint_manager = mock.create_autospec( checkpoint_manager.FileCheckpointManager) validation_fn = mock.MagicMock() expected_state = 'state' expected_round = 2 mock_initialize.return_value = (expected_state, expected_round) on_loop_start_fn = training_loop._create_on_loop_start_fn( file_checkpoint_manager=file_checkpoint_manager, validation_fn=validation_fn) on_loop_start_input = 'input' actual_state, actual_round = on_loop_start_fn(on_loop_start_input) mock_initialize.assert_called_once_with(on_loop_start_input, file_checkpoint_manager) mock_compute_validation.assert_not_called() file_checkpoint_manager.save_checkpoint.assert_not_called() self.assertEqual(actual_state, expected_state) self.assertEqual(actual_round, expected_round) class CreateOnRoundEndTest(absltest.TestCase): @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_no_input_args(self, mock_compute_validation): on_round_end_fn = training_loop._create_on_round_end_fn() state = 'state' round_num = 1 metrics = {'metric': 1} actual_state, actual_metrics = on_round_end_fn(state, round_num, metrics) mock_compute_validation.assert_not_called() self.assertEqual(actual_state, state) self.assertEqual(actual_metrics, metrics) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_only_checkpoint_manager(self, mock_compute_validation): file_checkpoint_manager = mock.create_autospec( checkpoint_manager.FileCheckpointManager) on_round_end_fn = training_loop._create_on_round_end_fn( file_checkpoint_manager=file_checkpoint_manager) state = 'state' round_num = 1 metrics = {'metric': 1} actual_state, actual_metrics = on_round_end_fn(state, round_num, metrics) mock_compute_validation.assert_not_called() file_checkpoint_manager.load_latest_checkpoint.assert_not_called() file_checkpoint_manager.save_checkpoint.assert_called_once_with( state, round_num) self.assertEqual(actual_state, state) self.assertEqual(actual_metrics, metrics) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_only_metrics_managers(self, mock_compute_validation): mock_metrics_manager1 = mock.create_autospec(metrics_manager.MetricsManager) mock_metrics_manager2 = mock.create_autospec(metrics_manager.MetricsManager) metrics_managers = [mock_metrics_manager1, mock_metrics_manager2] on_round_end_fn = training_loop._create_on_round_end_fn( metrics_managers=metrics_managers) state = 'state' round_num = 1 metrics = {'metric': 1} actual_state, actual_metrics = on_round_end_fn(state, round_num, metrics) mock_compute_validation.assert_not_called() for mock_metrics_manager in metrics_managers: mock_metrics_manager.clear_metrics.assert_not_called() mock_metrics_manager.save_metrics.assert_called_once_with( metrics, round_num) self.assertEqual(actual_state, state) self.assertEqual(actual_metrics, metrics) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_only_validation_fn(self, mock_compute_validation): validation_fn = mock.MagicMock() mock_compute_validation.return_value = {'validation_metric': 2} on_round_end_fn = training_loop._create_on_round_end_fn( validation_fn=validation_fn) state = 'state' round_num = 1 metrics = {'metric': 1} actual_state, actual_metrics = on_round_end_fn(state, round_num, metrics) mock_compute_validation.assert_called_once_with(state, round_num, validation_fn) self.assertEqual(actual_state, state) expected_metrics = {'metric': 1, 'validation_metric': 2} self.assertDictEqual(actual_metrics, expected_metrics) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._compute_validation_metrics') def test_calls_with_validation_fn_and_metrics_managers( self, mock_compute_validation): mock_metrics_manager1 = mock.create_autospec(metrics_manager.MetricsManager) mock_metrics_manager2 = mock.create_autospec(metrics_manager.MetricsManager) metrics_managers = [mock_metrics_manager1, mock_metrics_manager2] validation_fn = mock.MagicMock() mock_compute_validation.return_value = {'validation_metric': 2} on_round_end_fn = training_loop._create_on_round_end_fn( metrics_managers=metrics_managers, validation_fn=validation_fn) state = 'input_state' round_num = 1 metrics = collections.OrderedDict(metric=1) actual_state, actual_metrics = on_round_end_fn(state, round_num, metrics) mock_compute_validation.assert_called_once_with(state, round_num, validation_fn) expected_metrics = {'metric': 1, 'validation_metric': 2} for mock_metrics_manager in metrics_managers: mock_metrics_manager.clear_metrics.assert_not_called() mock_metrics_manager.save_metrics.assert_called_once_with( expected_metrics, round_num) self.assertEqual(actual_state, state) self.assertEqual(actual_metrics, expected_metrics) class RunSimulationTest(parameterized.TestCase): @mock.patch('tensorflow_federated.python.simulation.' 'training_loop.run_simulation_with_callbacks') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._create_on_round_end_fn') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._create_on_loop_start_fn') def test_run_simulation_passes_correctly_with_no_optional_arguments( self, mock_create_on_loop_start, mock_create_on_round_end, mock_run_simulation_with_callbacks): process = mock.create_autospec(iterative_process.IterativeProcess) client_selection_fn = lambda x: () total_rounds = 10 on_loop_start = 'on_loop_start' mock_create_on_loop_start.return_value = on_loop_start on_round_end = 'on_round_end' mock_create_on_round_end.return_value = on_round_end training_loop.run_simulation(process, client_selection_fn, total_rounds) mock_create_on_loop_start.assert_called_once_with(None, None, None) mock_create_on_round_end.assert_called_once_with(None, None, None) mock_run_simulation_with_callbacks.assert_called_once_with( process, client_selection_fn, total_rounds, on_loop_start, on_round_end) @parameterized.named_parameters( ('optional_inputs_0', None, None, None), ('optional_inputs_1', 'arg1', None, None), ('optional_inputs_2', None, 'arg2', None), ('optional_inputs_3', None, None, 'arg3'), ('optional_inputs_4', 'arg1', 'arg2', None), ('optional_inputs_5', 'arg1', None, 'arg3'), ('optional_inputs_6', None, 'arg2', 'arg3'), ('optional_inputs_7', 'arg1', 'arg2', 'arg3'), ) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop.run_simulation_with_callbacks') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._create_on_round_end_fn') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._create_on_loop_start_fn') def test_run_simulation_passes_unnamed_optional_arguments_correctly( self, file_checkpoint_manager, metrics_managers, validation_fn, mock_create_on_loop_start, mock_create_on_round_end, mock_run_simulation_with_callbacks): process = mock.create_autospec(iterative_process.IterativeProcess) client_selection_fn = lambda x: () total_rounds = 10 on_loop_start = 'on_loop_start' mock_create_on_loop_start.return_value = on_loop_start on_round_end = 'on_round_end' mock_create_on_round_end.return_value = on_round_end training_loop.run_simulation(process, client_selection_fn, total_rounds, file_checkpoint_manager, metrics_managers, validation_fn) mock_create_on_loop_start.assert_called_once_with(file_checkpoint_manager, metrics_managers, validation_fn) mock_create_on_round_end.assert_called_once_with(file_checkpoint_manager, metrics_managers, validation_fn) mock_run_simulation_with_callbacks.assert_called_once_with( process, client_selection_fn, total_rounds, on_loop_start, on_round_end) @parameterized.named_parameters( ('optional_inputs_0', None, None, None), ('optional_inputs_1', 'arg1', None, None), ('optional_inputs_2', None, 'arg2', None), ('optional_inputs_3', None, None, 'arg3'), ('optional_inputs_4', 'arg1', 'arg2', None), ('optional_inputs_5', 'arg1', None, 'arg3'), ('optional_inputs_6', None, 'arg2', 'arg3'), ('optional_inputs_7', 'arg1', 'arg2', 'arg3'), ) @mock.patch('tensorflow_federated.python.simulation.' 'training_loop.run_simulation_with_callbacks') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._create_on_round_end_fn') @mock.patch('tensorflow_federated.python.simulation.' 'training_loop._create_on_loop_start_fn') def test_run_simulation_passes_named_optional_arguments_correctly( self, file_checkpoint_manager, metrics_managers, validation_fn, mock_create_on_loop_start, mock_create_on_round_end, mock_run_simulation_with_callbacks): process = mock.create_autospec(iterative_process.IterativeProcess) client_selection_fn = lambda x: () total_rounds = 10 on_loop_start = 'on_loop_start' mock_create_on_loop_start.return_value = on_loop_start on_round_end = 'on_round_end' mock_create_on_round_end.return_value = on_round_end training_loop.run_simulation( process, client_selection_fn, total_rounds, file_checkpoint_manager=file_checkpoint_manager, metrics_managers=metrics_managers, validation_fn=validation_fn) mock_create_on_loop_start.assert_called_once_with(file_checkpoint_manager, metrics_managers, validation_fn) mock_create_on_round_end.assert_called_once_with(file_checkpoint_manager, metrics_managers, validation_fn) mock_run_simulation_with_callbacks.assert_called_once_with( process, client_selection_fn, total_rounds, on_loop_start, on_round_end) class RunSimulationWithCallbacksTest(parameterized.TestCase): @parameterized.named_parameters( ('rounds_1', 1), ('rounds_2', 2), ('rounds_3', 3), ('rounds_0', 0), ) def test_next_calls_total_rounds_times(self, total_rounds): process = mock.create_autospec(iterative_process.IterativeProcess) process.next.return_value = ('0', {}) client_selection_fn = mock.MagicMock() training_loop.run_simulation_with_callbacks(process, client_selection_fn, total_rounds) self.assertEqual(process.next.call_count, total_rounds) self.assertEqual(client_selection_fn.call_count, total_rounds) @parameterized.named_parameters( ('rounds_1', 1), ('rounds_2', 2), ('rounds_3', 3), ('rounds_5', 5), ) def test_round_num_is_passed_to_client_selection_fn(self, total_rounds): process = mock.create_autospec(iterative_process.IterativeProcess) process.next.return_value = ('0', {}) client_selection_fn = mock.MagicMock() training_loop.run_simulation_with_callbacks(process, client_selection_fn, total_rounds) expected_calls = [mock.call(i) for i in range(1, total_rounds + 1)] self.assertEqual(expected_calls, client_selection_fn.mock_calls) @parameterized.named_parameters( ('rounds_1', 1), ('rounds_2', 2), ('rounds_3', 3), ('rounds_5', 5), ) def test_on_round_end_called_after_each_round(self, total_rounds): process = mock.create_autospec(iterative_process.IterativeProcess) mock_state = 2.0 mock_metrics = {'mock_train_metric': 1} process.next.return_value = (mock_state, mock_metrics) client_selection_fn = mock.MagicMock() on_round_end = mock.MagicMock() on_round_end.return_value = (3.0, {'validation/metric': 5}) training_loop.run_simulation_with_callbacks( process, client_selection_fn, total_rounds, on_round_end=on_round_end) for i in range(1, total_rounds + 1): round_end_call_args = on_round_end.call_args_list[i - 1][0] self.assertEqual(round_end_call_args[0], mock_state) self.assertEqual(round_end_call_args[1], i) self.assertDictContainsSubset({ 'round_num': i, 'mock_train_metric': 1 }, round_end_call_args[2]) @parameterized.named_parameters( ('rounds_1', 1), ('rounds_2', 2), ('rounds_3', 3), ('rounds_5', 5), ) def test_on_loop_start_only_called_once(self, total_rounds): process = mock.create_autospec(iterative_process.IterativeProcess) process.next.return_value = (0, {}) initialize_return_value = 'initial_state' process.initialize.return_value = initialize_return_value client_selection_fn = mock.MagicMock() on_loop_start = mock.MagicMock() on_loop_start.return_value = (0, 0) training_loop.run_simulation_with_callbacks( process, client_selection_fn, total_rounds, on_loop_start=on_loop_start) on_loop_start.assert_called_once_with(initialize_return_value) @mock.patch('time.time') def test_train_step_timing_metrics_correctly_added(self, mock_time): process = mock.create_autospec(iterative_process.IterativeProcess) mock_state = 2.0 mock_metrics = {'mock_train_metric': 1} process.next.return_value = (mock_state, mock_metrics) client_selection_fn = lambda x: () mock_time.return_value = 0 # We use `on_round_end` to pass the metrics as an output on_round_end = mock.MagicMock() on_round_end.return_value = ((), {}) training_loop.run_simulation_with_callbacks( process, client_selection_fn, 1, on_round_end=on_round_end) expected_metrics_passed_to_round_end = { 'round_num': 1, 'mock_train_metric': 1, training_loop.TRAIN_STEP_TIME_KEY: 0, training_loop.TRAIN_STEPS_PER_HOUR_KEY: None } actual_metrics_passed_to_round_end = on_round_end.call_args_list[0][0][-1] self.assertDictEqual(actual_metrics_passed_to_round_end, expected_metrics_passed_to_round_end) if __name__ == '__main__': absltest.main()
19,538
5,551
213
730fc158f3d29e266101a018adaa55044281c82d
1,167
py
Python
src/quart_rate_limiter/redis_store.py
DebugOwO/quart-rate-limiter
cd3cb703f05d983a80d9a78486887af4f3a57e47
[ "MIT" ]
null
null
null
src/quart_rate_limiter/redis_store.py
DebugOwO/quart-rate-limiter
cd3cb703f05d983a80d9a78486887af4f3a57e47
[ "MIT" ]
null
null
null
src/quart_rate_limiter/redis_store.py
DebugOwO/quart-rate-limiter
cd3cb703f05d983a80d9a78486887af4f3a57e47
[ "MIT" ]
null
null
null
from datetime import datetime from typing import Any, Optional import aioredis from .store import RateLimiterStoreABC class RedisStore(RateLimiterStoreABC): """An redis backed store of rate limits. Arguments: address: The address of the redis instance. kwargs: Any keyword arguments to pass to the redis client on creation, see the aioredis documentation. """
30.710526
99
0.663239
from datetime import datetime from typing import Any, Optional import aioredis from .store import RateLimiterStoreABC class RedisStore(RateLimiterStoreABC): """An redis backed store of rate limits. Arguments: address: The address of the redis instance. kwargs: Any keyword arguments to pass to the redis client on creation, see the aioredis documentation. """ def __init__(self, address: str, **kwargs: Any) -> None: self._redis: Optional[aioredis.Redis] = None self._redis_arguments = (address, kwargs) async def before_serving(self) -> None: self._redis = await aioredis.from_url(self._redis_arguments[0], **self._redis_arguments[1]) async def get(self, key: str, default: datetime) -> datetime: result = await self._redis.get(key) if result is None: return default else: return datetime.fromtimestamp(float(result)) async def set(self, key: str, tat: datetime) -> None: await self._redis.set(key, tat.timestamp()) async def after_serving(self) -> None: await self._redis.close() self._redis = None
627
0
135
acb41a4176be094274e70012ad8343911c4182ad
30
py
Python
src/keyrings/envvars/tests/__init__.py
wwuck/keyrings.envvars
ca8ddf39cd8162d6b5bb51623a8cfd5cbf37b547
[ "MIT" ]
null
null
null
src/keyrings/envvars/tests/__init__.py
wwuck/keyrings.envvars
ca8ddf39cd8162d6b5bb51623a8cfd5cbf37b547
[ "MIT" ]
33
2021-11-30T11:43:54.000Z
2022-01-29T20:05:52.000Z
src/keyrings/envvars/tests/__init__.py
wwuck/keyrings.envvars
ca8ddf39cd8162d6b5bb51623a8cfd5cbf37b547
[ "MIT" ]
null
null
null
"""keyrings.envvars tests."""
15
29
0.666667
"""keyrings.envvars tests."""
0
0
0
cf607c0b33f530d85d4a08f867beda70ad09ffa0
1,018
py
Python
python/LogFileEntryTable.py
ferguman/OpenAg-MVP-II
600ce329f373ef3dc867163cdd09a424b49cd007
[ "MIT" ]
2
2019-03-18T05:47:55.000Z
2019-05-30T13:08:13.000Z
python/LogFileEntryTable.py
ferguman/OpenAg-MVP-II
600ce329f373ef3dc867163cdd09a424b49cd007
[ "MIT" ]
14
2018-06-27T14:02:23.000Z
2020-02-16T19:47:43.000Z
python/LogFileEntryTable.py
ferguman/OpenAg-MVP-II
600ce329f373ef3dc867163cdd09a424b49cd007
[ "MIT" ]
null
null
null
from time import time
39.153846
108
0.659136
from time import time class LogFileEntryTable(object): #TODO Need to figure out a way to keep the log_entries table from over flowing. def __init__(self, interval): self.interval = interval log_entries = {} def add_log_entry(self, func, entry_val): ''' This function adds values to the log dictionary and creates a log entry if the logging interval has been exceeded. The idea is to use it for situations where one does not one to flood the logs with entries. Typically this occurs when an error occurs such that the error is to be logged but the system is not to stop. Here is a typical call -> add_log_entry(logger_error, 'hey this thing is failing every second') ''' if entry_val not in self.log_entries.keys() or self.log_entries[entry_val] < time() - self.interval: self.log_entries[entry_val] = time() func('({} sec. throttle) '.format(self.interval) + entry_val)
41
932
23
e075b7a83faeb41e24824072dfdc5caff4bca598
130
py
Python
qproject/__init__.py
KirovVerst/YAQueueProject
d232267f67230dc54223c4b1019b6f66b9f5e249
[ "MIT" ]
null
null
null
qproject/__init__.py
KirovVerst/YAQueueProject
d232267f67230dc54223c4b1019b6f66b9f5e249
[ "MIT" ]
null
null
null
qproject/__init__.py
KirovVerst/YAQueueProject
d232267f67230dc54223c4b1019b6f66b9f5e249
[ "MIT" ]
null
null
null
from __future__ import absolute_import, unicode_literals from qproject.celery import app as celery_app __all__ = ['celery_app']
21.666667
56
0.823077
from __future__ import absolute_import, unicode_literals from qproject.celery import app as celery_app __all__ = ['celery_app']
0
0
0
8dedf326c8351e9a175aadaaf4e5c00e01f7b8c7
3,247
py
Python
bermuda/tests/test_bbox.py
glue-viz/bermuda
0bc26bac376d4f08a4964481d1f737f6deb86270
[ "BSD-3-Clause" ]
1
2018-07-20T21:09:46.000Z
2018-07-20T21:09:46.000Z
bermuda/tests/test_bbox.py
glue-viz/bermuda
0bc26bac376d4f08a4964481d1f737f6deb86270
[ "BSD-3-Clause" ]
null
null
null
bermuda/tests/test_bbox.py
glue-viz/bermuda
0bc26bac376d4f08a4964481d1f737f6deb86270
[ "BSD-3-Clause" ]
1
2018-07-20T21:15:41.000Z
2018-07-20T21:15:41.000Z
from numpy.testing import assert_allclose import pytest from ..bbox import AnchorPoint, FrozenError, BBox
28.234783
85
0.540807
from numpy.testing import assert_allclose import pytest from ..bbox import AnchorPoint, FrozenError, BBox class TestAnchor(object): def setup_method(self, method): self.a = AnchorPoint(x=1, y=2) def test_anchor_init(self): assert self.a.x == 1 assert self.a.y == 2 assert self.a.frozen == False assert self.a.visible == True def test_anchor_change(self): assert self.a.x == 1 assert self.a.y == 2 self.a.x = 3 self.a.y = 4 assert self.a.x == 3 assert self.a.y == 4 def test_anchor_freeze(self): assert self.a.x == 1 assert self.a.y == 2 self.a.frozen = True with pytest.raises(FrozenError): self.a.x = 3 with pytest.raises(FrozenError): self.a.y = 4 self.a.frozen = False self.a.x = 3 self.a.y = 4 assert self.a.x == 3 assert self.a.y == 4 def test_anchor_toggle_visibility(self): self.a.visible = False self.a.visible = True class TestBBox(object): def setup_method(self, method): self.bbox = BBox(center=(3,4), width=2, height=3, theta=0) def test_vertices(self): assert_allclose(self.bbox.vertices, [(2, 5.5), (4, 5.5), (4, 2.5), (2, 2.5)]) def test_center(self): self.bbox.center = (4,5) assert self.bbox.center == (4,5) assert_allclose(self.bbox.vertices, [(3, 6.5), (5, 6.5), (5, 3.5), (3, 3.5)]) def test_width(self): self.bbox.width = 4 assert self.bbox.width == 4 assert_allclose(self.bbox.vertices, [(1, 5.5), (5, 5.5), (5, 2.5), (1, 2.5)]) def test_height(self): self.bbox.height = 4 assert self.bbox.height == 4 assert_allclose(self.bbox.vertices, [(2, 6), (4, 6), (4, 2), (2, 2)]) def test_rotate_pos(self): self.bbox.theta = 90. assert self.bbox.theta == 90. assert_allclose(self.bbox.vertices, [(1.5, 3), (1.5, 5), (4.5, 5), (4.5, 3)]) def test_rotate_neg(self): self.bbox.theta = -90. assert self.bbox.theta == -90. assert_allclose(self.bbox.vertices, [(4.5, 5), (4.5, 3), (1.5, 3), (1.5, 5)]) def test_aspect(self): assert_allclose(self.bbox.aspect, 2./3.) self.bbox.width = 3 assert_allclose(self.bbox.aspect, 1.) self.bbox.width = 4 assert_allclose(self.bbox.aspect, 4./3.) def test_aspect_set(self): self.bbox.aspect = 1 assert_allclose(self.bbox.height, 2.) class TestMoveAnchorUnrotated(object): def setup_method(self, method): self.bb = BBox(center=(1.0, 2.0), width=2, height=4, theta=0) @pytest.mark.xfail @pytest.mark.parametrize('id', range(1)) def test_resize_upper_left(self, id): #lower left at origin, upper right at (3,4) bb = self.bb bb.move_anchor(-1, 1, id) answers = {0: (-1, 0, 3, 1), } l, b, w, h = answers[id] assert bbox.left == l assert bbox.bottom == b assert bbox.width == w assert bbox.height == h
2,462
157
495
f1a4c916269fa93d57546166ac49a6863a3bbc75
541
py
Python
demonstrations/synthesize_data/send_markers.py
stfnrpplngr/rteeg
e73331ef46a269cf0dda1b04333784ce3cf44247
[ "MIT" ]
30
2017-03-17T02:15:45.000Z
2022-02-21T06:20:37.000Z
demonstrations/synthesize_data/send_markers.py
stfnrpplngr/rteeg
e73331ef46a269cf0dda1b04333784ce3cf44247
[ "MIT" ]
10
2016-12-29T21:09:47.000Z
2017-03-28T18:05:06.000Z
demonstrations/synthesize_data/send_markers.py
stfnrpplngr/rteeg
e73331ef46a269cf0dda1b04333784ce3cf44247
[ "MIT" ]
12
2017-03-14T07:09:40.000Z
2021-01-06T06:22:27.000Z
"""Example program to demonstrate how to send markers into LSL.""" import random import time from pylsl import StreamInfo, StreamOutlet info = StreamInfo(name='markers', type='Markers', channel_count=1, channel_format='int32', source_id='markers_test1234') # next make an outlet outlet = StreamOutlet(info) trigger = 0 print("now sending markers...") while True: # pick a sample to send an wait for a bit outlet.push_sample([trigger]) print(trigger) trigger += 1 time.sleep(2.0)
25.761905
72
0.674677
"""Example program to demonstrate how to send markers into LSL.""" import random import time from pylsl import StreamInfo, StreamOutlet info = StreamInfo(name='markers', type='Markers', channel_count=1, channel_format='int32', source_id='markers_test1234') # next make an outlet outlet = StreamOutlet(info) trigger = 0 print("now sending markers...") while True: # pick a sample to send an wait for a bit outlet.push_sample([trigger]) print(trigger) trigger += 1 time.sleep(2.0)
0
0
0
54f7145a40819c4d3fb5da72f1515717ad7d7027
971
py
Python
models/Iris/score.py
cghat/pipelines-azureml
296349847f1d151af2e5366dad3b117d8e84ec67
[ "CC-BY-4.0", "MIT" ]
null
null
null
models/Iris/score.py
cghat/pipelines-azureml
296349847f1d151af2e5366dad3b117d8e84ec67
[ "CC-BY-4.0", "MIT" ]
null
null
null
models/Iris/score.py
cghat/pipelines-azureml
296349847f1d151af2e5366dad3b117d8e84ec67
[ "CC-BY-4.0", "MIT" ]
null
null
null
import json import numpy as np import pickle from sklearn.linear_model import Ridge from azureml.core.model import Model from inference_schema.schema_decorators import input_schema, output_schema from inference_schema.parameter_types.numpy_parameter_type import NumpyParameterType from utils import mylib input_sample = np.array([[11, 0, 0, 0, 8, 5, 0, 0, 6]]) output_sample = np.array([0.95]) @input_schema('data', NumpyParameterType(input_sample)) @output_schema(NumpyParameterType(output_sample))
27.742857
84
0.722966
import json import numpy as np import pickle from sklearn.linear_model import Ridge from azureml.core.model import Model from inference_schema.schema_decorators import input_schema, output_schema from inference_schema.parameter_types.numpy_parameter_type import NumpyParameterType from utils import mylib def init(): global model model_path = Model.get_model_path('Iris-model') with open(model_path, 'rb') as file: model = pickle.load(file) # For demonstration purposes only print(mylib.get_alphas()) input_sample = np.array([[11, 0, 0, 0, 8, 5, 0, 0, 6]]) output_sample = np.array([0.95]) @input_schema('data', NumpyParameterType(input_sample)) @output_schema(NumpyParameterType(output_sample)) def run(data): try: result = model.predict(data) # you can return any datatype as long as it is JSON-serializable return result.tolist() except Exception as e: error = str(e) return error
422
0
45
f5442fda543ea143316e891a5b0389f7115be0b9
789
py
Python
baekjoon/11292.py
GihwanKim/Baekjoon
52eb2bf80bb1243697858445e5b5e2d50d78be4e
[ "MIT" ]
null
null
null
baekjoon/11292.py
GihwanKim/Baekjoon
52eb2bf80bb1243697858445e5b5e2d50d78be4e
[ "MIT" ]
null
null
null
baekjoon/11292.py
GihwanKim/Baekjoon
52eb2bf80bb1243697858445e5b5e2d50d78be4e
[ "MIT" ]
null
null
null
""" 11292 : 키 큰 사람 URL : https://www.acmicpc.net/problem/11292 Input : 3 John 1.75 Mary 1.64 Sam 1.81 2 Jose 1.62 Miguel 1.58 5 John 1.75 Mary 1.75 Sam 1.74 Jose 1.75 Miguel 1.75 0 Output : Sam Jose John Mary Jose Miguel """ while True: n = int(input()) if n == 0: break high_height = 0 high_students = [] for i in range(n): name, height = input().split() height = float(height) if height > high_height: high_height = height high_students = [name] elif height == high_height: high_students.append(name) print(' '.join(high_students))
18.785714
47
0.474018
""" 11292 : 키 큰 사람 URL : https://www.acmicpc.net/problem/11292 Input : 3 John 1.75 Mary 1.64 Sam 1.81 2 Jose 1.62 Miguel 1.58 5 John 1.75 Mary 1.75 Sam 1.74 Jose 1.75 Miguel 1.75 0 Output : Sam Jose John Mary Jose Miguel """ while True: n = int(input()) if n == 0: break high_height = 0 high_students = [] for i in range(n): name, height = input().split() height = float(height) if height > high_height: high_height = height high_students = [name] elif height == high_height: high_students.append(name) print(' '.join(high_students))
0
0
0
864726b1c8e9989ced807885a138a4c2e08ae26a
1,459
py
Python
dev/local/notebook/core.py
LaurenSpiegel/fastai_docs
4fe6b62116d88dea9610548133e6cadb6b260a73
[ "Apache-2.0" ]
null
null
null
dev/local/notebook/core.py
LaurenSpiegel/fastai_docs
4fe6b62116d88dea9610548133e6cadb6b260a73
[ "Apache-2.0" ]
null
null
null
dev/local/notebook/core.py
LaurenSpiegel/fastai_docs
4fe6b62116d88dea9610548133e6cadb6b260a73
[ "Apache-2.0" ]
null
null
null
#AUTOGENERATED! DO NOT EDIT! File to edit: dev/90_notebook_core.ipynb (unless otherwise specified). __all__ = ['in_ipython', 'IN_IPYTHON', 'in_colab', 'IN_COLAB', 'in_notebook', 'IN_NOTEBOOK'] from ..imports import * def in_ipython(): "Check if the code is running in the ipython environment (jupyter including)" program_name = os.path.basename(os.getenv('_', '')) if ('jupyter-notebook' in program_name or # jupyter-notebook 'ipython' in program_name or # ipython 'JPY_PARENT_PID' in os.environ): # ipython-notebook return True else: return False IN_IPYTHON = in_ipython() def in_colab(): "Check if the code is running in Google Colaboratory" if not IN_IPYTHON: return False try: from google import colab return True except: return False IN_COLAB = in_colab() def in_notebook(): "Check if the code is running in a jupyter notebook" try: from google import colab return True except: pass try: shell = get_ipython().__class__.__name__ if shell == 'ZMQInteractiveShell': return True # Jupyter notebook, Spyder or qtconsole elif shell == 'TerminalInteractiveShell': return False # Terminal running IPython else: return False # Other type (?) except NameError: return False # Probably standard Python interpreter IN_NOTEBOOK = in_notebook()
31.717391
99
0.655243
#AUTOGENERATED! DO NOT EDIT! File to edit: dev/90_notebook_core.ipynb (unless otherwise specified). __all__ = ['in_ipython', 'IN_IPYTHON', 'in_colab', 'IN_COLAB', 'in_notebook', 'IN_NOTEBOOK'] from ..imports import * def in_ipython(): "Check if the code is running in the ipython environment (jupyter including)" program_name = os.path.basename(os.getenv('_', '')) if ('jupyter-notebook' in program_name or # jupyter-notebook 'ipython' in program_name or # ipython 'JPY_PARENT_PID' in os.environ): # ipython-notebook return True else: return False IN_IPYTHON = in_ipython() def in_colab(): "Check if the code is running in Google Colaboratory" if not IN_IPYTHON: return False try: from google import colab return True except: return False IN_COLAB = in_colab() def in_notebook(): "Check if the code is running in a jupyter notebook" try: from google import colab return True except: pass try: shell = get_ipython().__class__.__name__ if shell == 'ZMQInteractiveShell': return True # Jupyter notebook, Spyder or qtconsole elif shell == 'TerminalInteractiveShell': return False # Terminal running IPython else: return False # Other type (?) except NameError: return False # Probably standard Python interpreter IN_NOTEBOOK = in_notebook()
0
0
0
4d127e1cd3fe18cacf11291a387628624ffb1c7b
5,685
py
Python
requestsdata.py
saulcosta18/hackvt2016
c4aa26b71079422ffbb7e15f1dfe41235c93a220
[ "BSD-3-Clause" ]
null
null
null
requestsdata.py
saulcosta18/hackvt2016
c4aa26b71079422ffbb7e15f1dfe41235c93a220
[ "BSD-3-Clause" ]
null
null
null
requestsdata.py
saulcosta18/hackvt2016
c4aa26b71079422ffbb7e15f1dfe41235c93a220
[ "BSD-3-Clause" ]
null
null
null
import requests import random from hackvt2016.app import create_app from hackvt2016.resource.models import Resource from hackvt2016.category.models import Category def load_seeds(): """ max longitudes and latitudes: Coordinates [5:55] lat: 42.777 - 44.953 long: (-72.632) - (-73.132) lat: 44.452 - 44.953 long: (-71.739) - (-72.632) """ for index in xrange(10): resources = [ ('Sports', 'Softball Practice', 'Weekly softball practice - bring gear!', 'Theo Fido', 'tfo@softball.com'), ('Event', 'Calligraphy Lesson', 'Workshop for Calligraphy Lessons on Tuesdays', 'Tanner Riley', 'triley@clriley.com'), ('Nature Site', 'Geocache', 'Placed in 1971.', '', ''), ('Event', 'Oliver Twist Auditions', 'Auditions for the Oliver Twist play begin at 3PM.', 'Julia Reynolds', 'middleproductions@yahoo.com'), ('Sports', 'Soccer Game', 'Everyone is invited to a quick soccer game this weekend.', 'Saul Costa', 'saulcosta18222@gmail.com'), ('Resource', 'AndreWorks Studio', 'Available for reservations!', 'Andrew Minor', 'aminor@andreworks.org'), ('Event', 'Gymnastics Open Hours', 'Open to all age ranges!', 'Lydia Kiles', 'opengym@opengym.org'), ('Museum', 'Middle Age Weapons Musuem', 'Open 10-5 Daily!', 'Brianna Wright', 'mawm@museums.org'), ('Nature Site', 'Hunter Trail', 'Requires appropriate footwear.', '', ''), ('Cool Stuff', 'Alden Partridge Monument', 'In memory of the Norwich University Founder.', '', ''), ('Sports', 'Ski Range', 'Bring your skis!', '', ''), ('Sports', 'Karate Lessons', 'Tae Kwon Do', 'Sensei Vivian', 'themaster@thekaratestudio.com'), ('Nature Site', 'Crystal Mine Lake', 'No lifeguard on duty!', 'Trevor Daniels', 'fri@.mns.org'), ('Resource', 'School Supplies and Book Store', 'For all your education needs!', '', 'theotherstaples@bookstores.com'), ('Musuem', 'Stone House Historical Center', 'With live-in actors!', 'Manny Curtis', 'mcurtis@stonehouse.org'), ('Musuem', 'VT Historical Archives', 'Open 8 to 4 on weekdays.', '', 'admin@vtarchives.org'), ('Resource', 'Musical Studio', 'Instruments and soundrooms available to reserve!', '', 'mstudio@vtmusic.net') ('Museum', 'Black Manor Historical House', 'Provides historical reenactments on Mondays and Thursdays!', 'Tia Ramirez', 'tramirez@blackmanor.edu') ('Nature Site', 'Red Fern Bike Trail', 'Maps are available at all entrances.', '', 'infotrails@parksandrecvt.gov') ('Nature Site', 'Westerman Bird Viewing Platform', '', '', 'infobirds@parksandrecvt.gov') ('Nature Site', 'Holmes Stargazing Platform', 'Parking is available down the road.', '', 'jweng@vtastronomy.org') ('Event', 'Ski Race', 'Open to grades 1-5', 'Scoville J Danis"' 'scjda12@yahoos.org') ('Event', 'Guitar Lessons', 'Specialty: acoustic', 'Wren Fido', 'fido@onestopguitar.com') ('Event', 'Stargazing', 'Bring a coat and blanket for the Fall stargazing event!', 'Julian Weng', 'jweng@vtastronomy.org') ('Event', 'Band Tryouts', 'Instrument rentals can be arranged beforehand with the contact person.' 'Valerie Collins', 'vcollins@themusicstudio.org') ('Cool Stuff', 'Sundial', 'Built in 1834', '', '') ('Cool Stuff', 'Geodome', '', '', '') ('Cool Stuff', 'Historical Cemetery', 'Established in 1782', '', '') ('Cool Stuff', 'Old Weeping Willow Tree', 'Planted 1900', '', '') ('Sports', 'Horseback Riding Lessons', 'Beginner to Advanced lessons provided!', 'Leonard McGarth', 'horseback@horsebackvt.info') ('Sports', 'Hockey Tryouts', 'Open to grades 5-8, gear provided.', 'Olivia Olsen', 'olsen@watervillemiddle.com') ('Sports', 'Swimming Lessons', 'Group classes and one-on-one mentoring offered.', '', 'lessons@swimvt.com') ('Sports', 'Cross Country Practice', 'We will be starting with a 3 mile run on Tuesday.', 'Gina Woo', 'gwoo@tritonhs.com') ] for (category, title, description, host, email) in resources: category = Category.query.filter_by(name=category).first() if not category: continue Resource.create( category_id=category.id, title=title, description=description, host=host, email=email, longitude=random.uniform(-73.132, -72.632) if index <= 7 else random.uniform(-72.632, -71.739), latitude=random.uniform(42.777, 44.953) if index <= 7 else random.uniform(44.452, 44.953)) if __name__ == '__main__': main()
50.758929
160
0.619173
import requests import random from hackvt2016.app import create_app from hackvt2016.resource.models import Resource from hackvt2016.category.models import Category def main(): create_app().app_context().push() Resource.query.delete() load_libraries() load_seeds() def load_seeds(): """ max longitudes and latitudes: Coordinates [5:55] lat: 42.777 - 44.953 long: (-72.632) - (-73.132) lat: 44.452 - 44.953 long: (-71.739) - (-72.632) """ for index in xrange(10): resources = [ ('Sports', 'Softball Practice', 'Weekly softball practice - bring gear!', 'Theo Fido', 'tfo@softball.com'), ('Event', 'Calligraphy Lesson', 'Workshop for Calligraphy Lessons on Tuesdays', 'Tanner Riley', 'triley@clriley.com'), ('Nature Site', 'Geocache', 'Placed in 1971.', '', ''), ('Event', 'Oliver Twist Auditions', 'Auditions for the Oliver Twist play begin at 3PM.', 'Julia Reynolds', 'middleproductions@yahoo.com'), ('Sports', 'Soccer Game', 'Everyone is invited to a quick soccer game this weekend.', 'Saul Costa', 'saulcosta18222@gmail.com'), ('Resource', 'AndreWorks Studio', 'Available for reservations!', 'Andrew Minor', 'aminor@andreworks.org'), ('Event', 'Gymnastics Open Hours', 'Open to all age ranges!', 'Lydia Kiles', 'opengym@opengym.org'), ('Museum', 'Middle Age Weapons Musuem', 'Open 10-5 Daily!', 'Brianna Wright', 'mawm@museums.org'), ('Nature Site', 'Hunter Trail', 'Requires appropriate footwear.', '', ''), ('Cool Stuff', 'Alden Partridge Monument', 'In memory of the Norwich University Founder.', '', ''), ('Sports', 'Ski Range', 'Bring your skis!', '', ''), ('Sports', 'Karate Lessons', 'Tae Kwon Do', 'Sensei Vivian', 'themaster@thekaratestudio.com'), ('Nature Site', 'Crystal Mine Lake', 'No lifeguard on duty!', 'Trevor Daniels', 'fri@.mns.org'), ('Resource', 'School Supplies and Book Store', 'For all your education needs!', '', 'theotherstaples@bookstores.com'), ('Musuem', 'Stone House Historical Center', 'With live-in actors!', 'Manny Curtis', 'mcurtis@stonehouse.org'), ('Musuem', 'VT Historical Archives', 'Open 8 to 4 on weekdays.', '', 'admin@vtarchives.org'), ('Resource', 'Musical Studio', 'Instruments and soundrooms available to reserve!', '', 'mstudio@vtmusic.net') ('Museum', 'Black Manor Historical House', 'Provides historical reenactments on Mondays and Thursdays!', 'Tia Ramirez', 'tramirez@blackmanor.edu') ('Nature Site', 'Red Fern Bike Trail', 'Maps are available at all entrances.', '', 'infotrails@parksandrecvt.gov') ('Nature Site', 'Westerman Bird Viewing Platform', '', '', 'infobirds@parksandrecvt.gov') ('Nature Site', 'Holmes Stargazing Platform', 'Parking is available down the road.', '', 'jweng@vtastronomy.org') ('Event', 'Ski Race', 'Open to grades 1-5', 'Scoville J Danis"' 'scjda12@yahoos.org') ('Event', 'Guitar Lessons', 'Specialty: acoustic', 'Wren Fido', 'fido@onestopguitar.com') ('Event', 'Stargazing', 'Bring a coat and blanket for the Fall stargazing event!', 'Julian Weng', 'jweng@vtastronomy.org') ('Event', 'Band Tryouts', 'Instrument rentals can be arranged beforehand with the contact person.' 'Valerie Collins', 'vcollins@themusicstudio.org') ('Cool Stuff', 'Sundial', 'Built in 1834', '', '') ('Cool Stuff', 'Geodome', '', '', '') ('Cool Stuff', 'Historical Cemetery', 'Established in 1782', '', '') ('Cool Stuff', 'Old Weeping Willow Tree', 'Planted 1900', '', '') ('Sports', 'Horseback Riding Lessons', 'Beginner to Advanced lessons provided!', 'Leonard McGarth', 'horseback@horsebackvt.info') ('Sports', 'Hockey Tryouts', 'Open to grades 5-8, gear provided.', 'Olivia Olsen', 'olsen@watervillemiddle.com') ('Sports', 'Swimming Lessons', 'Group classes and one-on-one mentoring offered.', '', 'lessons@swimvt.com') ('Sports', 'Cross Country Practice', 'We will be starting with a 3 mile run on Tuesday.', 'Gina Woo', 'gwoo@tritonhs.com') ] for (category, title, description, host, email) in resources: category = Category.query.filter_by(name=category).first() if not category: continue Resource.create( category_id=category.id, title=title, description=description, host=host, email=email, longitude=random.uniform(-73.132, -72.632) if index <= 7 else random.uniform(-72.632, -71.739), latitude=random.uniform(42.777, 44.953) if index <= 7 else random.uniform(44.452, 44.953)) def load_libraries(): thisRequest = requests.get('https://data.vermont.gov/resource/g5rt-gwwe') jsonObject = thisRequest.json() for entry in jsonObject: category = 'library' latitude = entry.get('location_1').get('latitude') longitude = entry.get('location_1').get('longitude') title = entry.get('library') + ' Library' description = 'Local library.' email = None if entry.get('web_location'): email = entry.get('web_location') categoryID = Category.query.filter_by(name='Library').first().id if latitude and longitude: Resource.create(title=title, description=description, category_id=categoryID, latitude=latitude, longitude=longitude, email=email) if __name__ == '__main__': main()
833
0
46
d11452d1558b0f5076cec42d82d74dea95d013aa
3,528
py
Python
[OPMan]/Seasonals [TV]/2011-4 - Fall/[a8292] Ben-To/BentoBD_NCOP1v01.py
LightArrowsEXE/Encoding-Projects
4ea96a5b25a7710f615ada5ff25949c496492b53
[ "MIT" ]
57
2019-01-31T17:32:46.000Z
2022-03-23T05:46:51.000Z
[OPMan]/Seasonals [TV]/2011-4 - Fall/[a8292] Ben-To/BentoBD_NCOP1v01.py
LightArrowsEXE/Encoding-Projects
4ea96a5b25a7710f615ada5ff25949c496492b53
[ "MIT" ]
null
null
null
[OPMan]/Seasonals [TV]/2011-4 - Fall/[a8292] Ben-To/BentoBD_NCOP1v01.py
LightArrowsEXE/Encoding-Projects
4ea96a5b25a7710f615ada5ff25949c496492b53
[ "MIT" ]
12
2019-04-30T06:16:13.000Z
2022-03-14T16:15:07.000Z
import subprocess from typing import List, Tuple import vapoursynth as vs from lvsfunc.misc import source from lvsfunc.types import Range from vardautomation import (FSRCNNX_56_16_4_1, JAPANESE, AudioCutter, AudioStream, BasicTool, FileInfo, FlacEncoder, Mux, PresetBD, PresetFLAC, RunnerConfig, SelfRunner, VideoStream, VPath, X265Encoder) from vardefunc.misc import get_bicubic_params from vsutil import get_w from bento_filters import flt core = vs.core core.num_threads = 16 EPNUM = __file__[-5:-3] # Sources JPBD = FileInfo(r'BDMV/Vol.1/BDMV/STREAM/00003.m2ts', 0, -24, idx=lambda x: source(x, cachedir=''), preset=[PresetBD, PresetFLAC]) JPBD.name_file_final = VPath(fr"premux/{JPBD.name} (Premux).mkv") JPBD.do_qpfile = True JPBD.a_src = VPath(f"{JPBD.name}.wav") JPBD.a_src_cut = VPath(f"{JPBD.name}_cut.wav") JPBD.a_enc_cut = VPath(f"{JPBD.name}_cut.flac") # Common variables op_aisle: List[Range] = [(281, 373)] red_circle: List[Range] = [(1934, 1951), (1956, 1979), (1984, 2054)] def main() -> vs.VideoNode: """Vapoursynth filtering""" from adptvgrnMod import adptvgrnMod from havsfunc import FastLineDarkenMOD from lvsfunc.misc import replace_ranges from vsutil import depth src = JPBD.clip_cut scaled = flt.rescaler(src, 720) denoised = flt.denoiser(scaled, bm3d_sigma=[0.8, 0.6], bm3d_rad=1) aa_rep = flt.clamped_aa(denoised) trans_sraa = flt.transpose_sraa(denoised) aa_ranges = replace_ranges(aa_rep, trans_sraa, red_circle) darken = FastLineDarkenMOD(aa_ranges, strength=48, protection=6, luma_cap=255, threshold=2) deband = flt.masked_deband(darken, denoised=True, deband_args={'iterations': 2, 'threshold': 5.0, 'radius': 8, 'grain': 6}) pdeband = flt.placebo_debander(darken, grain=4, deband_args={'iterations': 2, 'threshold': 8.0, 'radius': 10}) deband = replace_ranges(deband, pdeband, op_aisle) grain = adptvgrnMod(deband, strength=0.3, luma_scaling=10, size=1.25, sharp=80, grain_chroma=False, seed=42069) return depth(grain, 10).std.Limiter(16 << 2, [235 << 2, 240 << 2], [0, 1, 2]) if __name__ == '__main__': filtered = main() filtered = filtered Encoding(JPBD, filtered).run() else: JPBD.clip_cut.set_output(0) FILTERED = main() FILTERED.set_output(1)
31.5
127
0.642574
import subprocess from typing import List, Tuple import vapoursynth as vs from lvsfunc.misc import source from lvsfunc.types import Range from vardautomation import (FSRCNNX_56_16_4_1, JAPANESE, AudioCutter, AudioStream, BasicTool, FileInfo, FlacEncoder, Mux, PresetBD, PresetFLAC, RunnerConfig, SelfRunner, VideoStream, VPath, X265Encoder) from vardefunc.misc import get_bicubic_params from vsutil import get_w from bento_filters import flt core = vs.core core.num_threads = 16 EPNUM = __file__[-5:-3] # Sources JPBD = FileInfo(r'BDMV/Vol.1/BDMV/STREAM/00003.m2ts', 0, -24, idx=lambda x: source(x, cachedir=''), preset=[PresetBD, PresetFLAC]) JPBD.name_file_final = VPath(fr"premux/{JPBD.name} (Premux).mkv") JPBD.do_qpfile = True JPBD.a_src = VPath(f"{JPBD.name}.wav") JPBD.a_src_cut = VPath(f"{JPBD.name}_cut.wav") JPBD.a_enc_cut = VPath(f"{JPBD.name}_cut.flac") # Common variables op_aisle: List[Range] = [(281, 373)] red_circle: List[Range] = [(1934, 1951), (1956, 1979), (1984, 2054)] def main() -> vs.VideoNode: """Vapoursynth filtering""" from adptvgrnMod import adptvgrnMod from havsfunc import FastLineDarkenMOD from lvsfunc.misc import replace_ranges from vsutil import depth src = JPBD.clip_cut scaled = flt.rescaler(src, 720) denoised = flt.denoiser(scaled, bm3d_sigma=[0.8, 0.6], bm3d_rad=1) aa_rep = flt.clamped_aa(denoised) trans_sraa = flt.transpose_sraa(denoised) aa_ranges = replace_ranges(aa_rep, trans_sraa, red_circle) darken = FastLineDarkenMOD(aa_ranges, strength=48, protection=6, luma_cap=255, threshold=2) deband = flt.masked_deband(darken, denoised=True, deband_args={'iterations': 2, 'threshold': 5.0, 'radius': 8, 'grain': 6}) pdeband = flt.placebo_debander(darken, grain=4, deband_args={'iterations': 2, 'threshold': 8.0, 'radius': 10}) deband = replace_ranges(deband, pdeband, op_aisle) grain = adptvgrnMod(deband, strength=0.3, luma_scaling=10, size=1.25, sharp=80, grain_chroma=False, seed=42069) return depth(grain, 10).std.Limiter(16 << 2, [235 << 2, 240 << 2], [0, 1, 2]) class Encoding: def __init__(self, file: FileInfo, clip: vs.VideoNode) -> None: self.file = file self.clip = clip def run(self) -> None: assert self.file.a_src assert self.file.a_enc_cut v_encoder = X265Encoder('x265', 'settings/x265_settings_BD_NCOP1v01') a_extracters = [ BasicTool( 'eac3to', [self.file.path.to_str(), '2:', self.file.a_src.format(1).to_str()] ) ] a_cutters = [AudioCutter(self.file, track=1)] a_encoders = [FlacEncoder(self.file, track=1)] muxer = Mux( self.file, streams=( VideoStream(self.file.name_clip_output, 'HEVC BDRip by LightArrowsEXE@Kaleido', JAPANESE), AudioStream(self.file.a_enc_cut.format(1), 'FLAC 2.0', JAPANESE), None ) ) config = RunnerConfig(v_encoder, None, a_extracters, a_cutters, a_encoders, muxer) runner = SelfRunner(self.clip, self.file, config) runner.run() runner.do_cleanup() if __name__ == '__main__': filtered = main() filtered = filtered Encoding(JPBD, filtered).run() else: JPBD.clip_cut.set_output(0) FILTERED = main() FILTERED.set_output(1)
1,049
-6
76
ee5b6fba5eb91f07b96eece0fb24f71f34b641c3
390
py
Python
ribosome/test/klk/matchers/window.py
tek/ribosome-py
8bd22e549ddff1ee893d6e3a0bfba123a09e96c6
[ "MIT" ]
null
null
null
ribosome/test/klk/matchers/window.py
tek/ribosome-py
8bd22e549ddff1ee893d6e3a0bfba123a09e96c6
[ "MIT" ]
null
null
null
ribosome/test/klk/matchers/window.py
tek/ribosome-py
8bd22e549ddff1ee893d6e3a0bfba123a09e96c6
[ "MIT" ]
null
null
null
from kallikrein import Expectation from kallikrein.matchers.comparison import eq from ribosome.test.klk.expectation import await_k_with from ribosome.nvim.io.compute import NvimIO from ribosome.nvim.api.ui import current_cursor __all__ = ('current_cursor_is',)
27.857143
66
0.8
from kallikrein import Expectation from kallikrein.matchers.comparison import eq from ribosome.test.klk.expectation import await_k_with from ribosome.nvim.io.compute import NvimIO from ribosome.nvim.api.ui import current_cursor def current_cursor_is(line: int, col: int) -> NvimIO[Expectation]: return await_k_with(eq((line, col)), current_cursor) __all__ = ('current_cursor_is',)
102
0
23
a2f6168f0742f4589ed364cb98a691fe3a86d64b
903
py
Python
Exception Case/304. Range Sum Query 2D - Immutable.py
Into-Y0u/Github-Baby
5e4e6b02f49c2c99533289be9d49911006cad919
[ "MIT" ]
null
null
null
Exception Case/304. Range Sum Query 2D - Immutable.py
Into-Y0u/Github-Baby
5e4e6b02f49c2c99533289be9d49911006cad919
[ "MIT" ]
null
null
null
Exception Case/304. Range Sum Query 2D - Immutable.py
Into-Y0u/Github-Baby
5e4e6b02f49c2c99533289be9d49911006cad919
[ "MIT" ]
null
null
null
# Your NumMatrix object will be instantiated and called as such: # obj = NumMatrix(matrix) # param_1 = obj.sumRegion(row1,col1,row2,col2)
30.1
75
0.528239
class NumMatrix: def __init__(self, mat: List[List[int]]): row = len(mat) col = len(mat[0]) self.dp = [[0]*(col+1) for _ in range(row+1)] for i in range(row): prefix = 0 for j in range(col): prefix += mat[i][j] above = self.dp[i][j+1] self.dp[i+1][j+1] = prefix + above def sumRegion(self, row1: int, col1: int, row2: int, col2: int) -> int: row1,row2,col1,col2 = row1+1,row2+1,col1+1,col2+1 bottomRight = self.dp[row2][col2] topLeft = self.dp[row1-1][col1-1] above = self.dp[row1-1][col2] left = self.dp[row2][col1-1] return bottomRight - above - left + topLeft # Your NumMatrix object will be instantiated and called as such: # obj = NumMatrix(matrix) # param_1 = obj.sumRegion(row1,col1,row2,col2)
673
-5
77
78b02f8a4c005f33a699cb1d5f31e0d92ffae42d
6,033
py
Python
tests/test_varma_lingam.py
Koji-Kurihara/lingam
880561f619d2d185614df4a97b6bc38917f9e901
[ "MIT" ]
159
2019-08-22T05:17:19.000Z
2022-03-28T23:41:27.000Z
tests/test_varma_lingam.py
Koji-Kurihara/lingam
880561f619d2d185614df4a97b6bc38917f9e901
[ "MIT" ]
14
2020-04-26T17:25:42.000Z
2022-02-14T08:05:05.000Z
tests/test_varma_lingam.py
Koji-Kurihara/lingam
880561f619d2d185614df4a97b6bc38917f9e901
[ "MIT" ]
27
2020-01-19T07:31:08.000Z
2021-12-26T06:23:35.000Z
import os import numpy as np import pandas as pd from lingam.varma_lingam import VARMALiNGAM def randnetbalanced(dims, samples, indegree, parminmax, errminmax): """ この関数は以前頂いたmatlabのスクリプトを移植したものですのでご確認不要です。 create a more balanced random network Parameter --------- dims : int number of variables samples : int number of samples indegree : int or float('inf') number of parents of each node (float('inf') = fully connected) parminmax : dictionary standard deviation owing to parents errminmax : dictionary standard deviation owing to error variable Return ------ B : array, shape (dims, dims) the strictly lower triangular network matrix errstd : array, shape (dims, 1) the vector of error (disturbance) standard deviations """ # First, generate errstd errstd = np.random.uniform(low=errminmax['min'], high=errminmax['max'], size=(dims, 1)) # Initializations X = np.empty(shape=[dims, samples]) B = np.zeros([dims, dims]) # Go trough each node in turn for i in range(dims): # If indegree is finite, randomly pick that many parents, # else, all previous variables are parents if indegree == float('inf'): if i <= indegree: par = np.arange(i) else: par = np.random.permutation(i)[:indegree] else: par = np.arange(i) if len(par) == 0: # if node has no parents # Increase errstd to get it to roughly same variance parent_std = np.random.uniform(low=parminmax['min'], high=parminmax['max']) errstd[i] = np.sqrt(errstd[i]**2 + parent_std**2) # Set data matrix to empty X[i] = np.zeros(samples) else: # If node has parents, do the following w = np.random.normal(size=[1, len(par)]) # Randomly pick weights wfull = np.zeros([1, i]) wfull[0, par] = w # Calculate contribution of parents X[i] = np.dot(wfull, X[:i, :]) # Randomly select a 'parents std' parstd = np.random.uniform(low=parminmax['min'], high=parminmax['max']) # Scale w so that the combination of parents has 'parstd' std scaling = parstd / np.sqrt(np.mean(X[i] ** 2)) w = w * scaling # Recalculate contribution of parents wfull = np.zeros([1, i]) wfull[0, par] = w X[i] = np.dot(wfull, X[:i, :]) # Fill in B B[i, par] = w # Update data matrix X[i] = X[i] + np.random.normal(size=samples) * errstd[i] return B, errstd
32.262032
133
0.554119
import os import numpy as np import pandas as pd from lingam.varma_lingam import VARMALiNGAM def randnetbalanced(dims, samples, indegree, parminmax, errminmax): """ この関数は以前頂いたmatlabのスクリプトを移植したものですのでご確認不要です。 create a more balanced random network Parameter --------- dims : int number of variables samples : int number of samples indegree : int or float('inf') number of parents of each node (float('inf') = fully connected) parminmax : dictionary standard deviation owing to parents errminmax : dictionary standard deviation owing to error variable Return ------ B : array, shape (dims, dims) the strictly lower triangular network matrix errstd : array, shape (dims, 1) the vector of error (disturbance) standard deviations """ # First, generate errstd errstd = np.random.uniform(low=errminmax['min'], high=errminmax['max'], size=(dims, 1)) # Initializations X = np.empty(shape=[dims, samples]) B = np.zeros([dims, dims]) # Go trough each node in turn for i in range(dims): # If indegree is finite, randomly pick that many parents, # else, all previous variables are parents if indegree == float('inf'): if i <= indegree: par = np.arange(i) else: par = np.random.permutation(i)[:indegree] else: par = np.arange(i) if len(par) == 0: # if node has no parents # Increase errstd to get it to roughly same variance parent_std = np.random.uniform(low=parminmax['min'], high=parminmax['max']) errstd[i] = np.sqrt(errstd[i]**2 + parent_std**2) # Set data matrix to empty X[i] = np.zeros(samples) else: # If node has parents, do the following w = np.random.normal(size=[1, len(par)]) # Randomly pick weights wfull = np.zeros([1, i]) wfull[0, par] = w # Calculate contribution of parents X[i] = np.dot(wfull, X[:i, :]) # Randomly select a 'parents std' parstd = np.random.uniform(low=parminmax['min'], high=parminmax['max']) # Scale w so that the combination of parents has 'parstd' std scaling = parstd / np.sqrt(np.mean(X[i] ** 2)) w = w * scaling # Recalculate contribution of parents wfull = np.zeros([1, i]) wfull[0, par] = w X[i] = np.dot(wfull, X[:i, :]) # Fill in B B[i, par] = w # Update data matrix X[i] = X[i] + np.random.normal(size=samples) * errstd[i] return B, errstd def generate_data(n=5, T=800, initial_data=None): head = 100 T = T + head if initial_data is None: # psi0 indegree = float('inf') psi0, _ = randnetbalanced(n, n, indegree, {'min':0.05, 'max':0.5}, {'min':0.05, 'max':0.5}) permutation = np.random.permutation(n) psi0 = psi0[permutation][:, permutation] # causal order causal_order = np.empty(len(permutation)) causal_order[permutation] = np.arange(len(permutation)) causal_order = causal_order.astype(int) # phi1 value = np.random.uniform(low=0.01, high=0.5, size=(n, n)) sign = np.random.choice([-1, 1], size=(n, n)) phi1 = np.multiply(value, sign) # theta1 value = np.random.uniform(low=0.01, high=0.5, size=(n, n)) sign = np.random.choice([-1, 1], size=(n, n)) theta1 = np.multiply(value, sign) else: psi0 = initial_data['psi0'] phi1 = initial_data['phi1'] theta1 = initial_data['theta1'] causal_order = initial_data['causal_order'] # psi1, omega1 psi1 = np.dot(np.eye(n) - psi0, phi1) omega1 = np.dot(np.eye(n) - psi0, theta1, np.linalg.inv(np.eye(n) - psi0)) # external influence expon = 0.1 ext = np.empty((n, T)) for i in range(n): ext[i, :] = np.random.normal(size=(1, T)) ext[i, :] = np.multiply(np.sign(ext[i, :]), abs(ext[i, :]) ** expon) ext[i, :] = ext[i, :] - np.mean(ext[i, :]) ext[i, :] = ext[i, :] / np.std(ext[i, :]) # observed signals y y = np.zeros((n, T)) y[:, 0] = np.random.normal(loc=0.1, scale=1, size=(n, )) * np.random.choice([-1, 1], size=(n, )) for t in range(1, T): for i in causal_order: y[i, t] = np.dot(psi0[i, :], y[:, t]) + np.dot(psi1[i, :], y[:, t - 1]) + ext[i, t] + np.dot(omega1[i, :], ext[:, t - 1]) return y[:, head:].T, psi0, psi1, omega1, causal_order def test_fit_success(): initial_data = {} initial_data['psi0'] = np.array([ [0, 0.2669171, -0.16719712], [0, 0, 0], [0, -0.92769185, 0], ]) initial_data['phi1'] = np.array([ [-0.50941033, -0.01429937, 0.09002112], [0.09321691, -0.44028983, -0.05818995], [-0.12986617, -0.88781915, 0.21726865], ]) initial_data['omega1'] = np.array([ [0.02264769, 0.29487095, 0.29243977], [-0.15626269, 0.22860591, 0.11884103], [-0.09901518, 0.45200271, 0.05312345], ]) initial_data['theta1'] = np.array([ [-0.02674394, 0.31577469, 0.33371151], [-0.15626269, 0.22860591, 0.11884103], [0.04594844, 0.23992688, -0.05712441], ]) initial_data['causal_order'] = [1, 2, 0] X, psi0, phi1, omega1, causal_order = generate_data(n=3, T=500, initial_data=initial_data) model = VARMALiNGAM(order=(1, 1), criterion=None) model.fit(X) # check the causal ordering co = model.causal_order_ assert co.index(1) < co.index(2) < co.index(0) # check the adjacency matrix psi0 = model.adjacency_matrices_[0][0] assert psi0[0, 1] > 0.2 and psi0[0, 2] < -0.1 and psi0[2, 1] < -0.8 psi0[0, 1] = 0.0 psi0[0, 2] = 0.0 psi0[2, 1] = 0.0 assert np.sum(psi0) < 0.1
3,226
0
46
76fa268006028c6c1e1382c0093069282b0af4b7
2,295
py
Python
examples/pxScene2d/external/libnode-v6.9.0/tools/comtypes/test/test_outparam.py
madanagopaltcomcast/pxCore
c4a3a40a190521c8b6383d126c87612eca5b3c42
[ "Apache-2.0" ]
212
2015-01-13T18:24:17.000Z
2022-03-28T07:52:48.000Z
examples/pxScene2d/external/libnode-v6.9.0/tools/comtypes/test/test_outparam.py
madanagopaltcomcast/pxCore
c4a3a40a190521c8b6383d126c87612eca5b3c42
[ "Apache-2.0" ]
1,432
2017-06-21T04:08:48.000Z
2020-08-25T16:21:15.000Z
examples/pxScene2d/external/libnode-v6.9.0/tools/comtypes/test/test_outparam.py
madanagopaltcomcast/pxCore
c4a3a40a190521c8b6383d126c87612eca5b3c42
[ "Apache-2.0" ]
317
2017-06-20T19:57:17.000Z
2020-09-16T10:28:30.000Z
from ctypes import * import unittest import comtypes.test comtypes.test.requires("devel") from comtypes import BSTR, IUnknown, GUID, COMMETHOD, HRESULT malloc = POINTER(IMalloc)() oledll.ole32.CoGetMalloc(1, byref(malloc)) assert bool(malloc) c_wchar_p.__ctypes_from_outparam__ = from_outparm ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() if __name__ == "__main__": unittest.main()
32.785714
81
0.606536
from ctypes import * import unittest import comtypes.test comtypes.test.requires("devel") from comtypes import BSTR, IUnknown, GUID, COMMETHOD, HRESULT class IMalloc(IUnknown): _iid_ = GUID("{00000002-0000-0000-C000-000000000046}") _methods_ = [ COMMETHOD([], c_void_p, "Alloc", ([], c_ulong, "cb")), COMMETHOD([], c_void_p, "Realloc", ([], c_void_p, "pv"), ([], c_ulong, "cb")), COMMETHOD([], None, "Free", ([], c_void_p, "py")), COMMETHOD([], c_ulong, "GetSize", ([], c_void_p, "pv")), COMMETHOD([], c_int, "DidAlloc", ([], c_void_p, "pv")), COMMETHOD([], None, "HeapMinimize") # 25 ] malloc = POINTER(IMalloc)() oledll.ole32.CoGetMalloc(1, byref(malloc)) assert bool(malloc) def from_outparm(self): if not self: return None result = wstring_at(self) if not malloc.DidAlloc(self): raise ValueError("memory was NOT allocated by CoTaskMemAlloc") windll.ole32.CoTaskMemFree(self) return result c_wchar_p.__ctypes_from_outparam__ = from_outparm def comstring(text, typ=c_wchar_p): text = unicode(text) size = (len(text) + 1) * sizeof(c_wchar) mem = windll.ole32.CoTaskMemAlloc(size) print "malloc'd 0x%x, %d bytes" % (mem, size) ptr = cast(mem, typ) memmove(mem, text, size) return ptr class Test(unittest.TestCase): def test_c_char(self): ## ptr = c_wchar_p("abc") ## self.failUnlessEqual(ptr.__ctypes_from_outparam__(), ## "abc") ## p = BSTR("foo bar spam") x = comstring("Hello, World") y = comstring("foo bar") z = comstring("spam, spam, and spam") ## (x.__ctypes_from_outparam__(), x.__ctypes_from_outparam__()) print (x.__ctypes_from_outparam__(), None) #x.__ctypes_from_outparam__()) ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() ## print comstring("Hello, World", c_wchar_p).__ctypes_from_outparam__() if __name__ == "__main__": unittest.main()
919
594
117
6f4549892c4096edd9c43c705922b69c8f1d0a6c
2,458
py
Python
scripts/logreg_multiclass_demo2.py
vipavlovic/pyprobml
59a2edc682d0163955db5e2f27491ad772b60141
[ "MIT" ]
4,895
2016-08-17T22:28:34.000Z
2022-03-31T17:07:15.000Z
scripts/logreg_multiclass_demo2.py
vipavlovic/pyprobml
59a2edc682d0163955db5e2f27491ad772b60141
[ "MIT" ]
446
2016-09-17T14:35:29.000Z
2022-03-31T19:59:33.000Z
scripts/logreg_multiclass_demo2.py
vipavlovic/pyprobml
59a2edc682d0163955db5e2f27491ad772b60141
[ "MIT" ]
1,160
2016-08-18T23:19:27.000Z
2022-03-31T12:44:07.000Z
# Fit logistic regression models to 3 classs 2d data. import superimport import matplotlib.pyplot as plt import numpy as np from sklearn.preprocessing import PolynomialFeatures from scipy.stats import multivariate_normal as mvn from sklearn.linear_model import LogisticRegression import matplotlib.colors as mcol import os figdir = "../figures" X, y = create_data(100) nclasses = len(np.unique(y)) degrees = [1, 2, 10, 20] for i, degree in enumerate(degrees): transformer = PolynomialFeatures(degree) name = 'Degree{}'.format(degree) XX = transformer.fit_transform(X)[:, 1:] # skip the first column of 1s model = LogisticRegression(C=1.0) model = model.fit(XX, y) #xx, yy = np.meshgrid(np.linspace(-1, 1, 150), np.linspace(-1, 1, 150)) xx, yy = np.meshgrid(np.linspace(-1, 1, 250), np.linspace(-1, 1, 250)) grid = np.c_[xx.ravel(), yy.ravel()] grid2 = transformer.transform(grid)[:, 1:] Z = model.predict(grid2).reshape(xx.shape) fig, ax = plt.subplots() # uses gray background for black dots plt.pcolormesh(xx, yy, Z, cmap=plt.cm.coolwarm) # https://stackoverflow.com/questions/40601997/setting-discrete-colormap-corresponding-to-specific-data-range-in-matplotlib #cmap = plt.cm.get_cmap("jet", lut=nclasses) #cmap_bounds = np.arange(nclasses+1) - 0.5 #norm = mcol.BoundaryNorm(cmap_bounds, cmap.N) #plt.pcolormesh(xx, yy, Z, cmap=cmap, norm=norm) plot_data(X[:, 0], X[:, 1], y) #plt.scatter(X[:,0], X[:,1], y) plt.title(name) fname = 'logregMulti-{}.png'.format(name) save_fig(fname) plt.draw() plt.show()
30.725
127
0.615948
# Fit logistic regression models to 3 classs 2d data. import superimport import matplotlib.pyplot as plt import numpy as np from sklearn.preprocessing import PolynomialFeatures from scipy.stats import multivariate_normal as mvn from sklearn.linear_model import LogisticRegression import matplotlib.colors as mcol import os figdir = "../figures" def save_fig(fname): plt.savefig(os.path.join(figdir, fname)) def create_data(N): np.random.seed(234) # np.random.RandomState(0) C = 0.05*np.eye(2) Gs = [mvn(mean=[0.5, 0.5], cov=C), mvn(mean=[-0.5, -0.5], cov=C), mvn(mean=[0.5, -0.5], cov=C), mvn(mean=[-0.5, 0.5], cov=C), mvn(mean=[0, 0], cov=C)] X = np.concatenate([G.rvs(size=N) for G in Gs]) y = np.concatenate((1*np.ones(N), 1*np.ones(N), 2*np.ones(N), 2*np.ones(N), 3*np.ones(N))) return X, y def plot_data(X0, X1, y): for x0, x1, cls in zip(X0, X1, y): colors = ['blue', 'black', 'red'] markers = ['x', 'o', '*'] color = colors[int(cls)-1] marker = markers[int(cls)-1] plt.scatter(x0, x1, marker=marker, color=color) X, y = create_data(100) nclasses = len(np.unique(y)) degrees = [1, 2, 10, 20] for i, degree in enumerate(degrees): transformer = PolynomialFeatures(degree) name = 'Degree{}'.format(degree) XX = transformer.fit_transform(X)[:, 1:] # skip the first column of 1s model = LogisticRegression(C=1.0) model = model.fit(XX, y) #xx, yy = np.meshgrid(np.linspace(-1, 1, 150), np.linspace(-1, 1, 150)) xx, yy = np.meshgrid(np.linspace(-1, 1, 250), np.linspace(-1, 1, 250)) grid = np.c_[xx.ravel(), yy.ravel()] grid2 = transformer.transform(grid)[:, 1:] Z = model.predict(grid2).reshape(xx.shape) fig, ax = plt.subplots() # uses gray background for black dots plt.pcolormesh(xx, yy, Z, cmap=plt.cm.coolwarm) # https://stackoverflow.com/questions/40601997/setting-discrete-colormap-corresponding-to-specific-data-range-in-matplotlib #cmap = plt.cm.get_cmap("jet", lut=nclasses) #cmap_bounds = np.arange(nclasses+1) - 0.5 #norm = mcol.BoundaryNorm(cmap_bounds, cmap.N) #plt.pcolormesh(xx, yy, Z, cmap=cmap, norm=norm) plot_data(X[:, 0], X[:, 1], y) #plt.scatter(X[:,0], X[:,1], y) plt.title(name) fname = 'logregMulti-{}.png'.format(name) save_fig(fname) plt.draw() plt.show()
769
0
68
3dfcfa26b3f03c211f08b2bc132c5c031fdace8e
568
py
Python
Sirius/roles.py
VladimirARodionov/sirius
1c901935c65d7d02cbcf1f172b1f0ebd346bea95
[ "MIT" ]
10
2018-12-21T13:42:13.000Z
2022-02-08T20:27:52.000Z
Sirius/roles.py
VladimirARodionov/sirius
1c901935c65d7d02cbcf1f172b1f0ebd346bea95
[ "MIT" ]
14
2018-11-23T10:02:14.000Z
2022-03-11T23:35:02.000Z
Sirius/roles.py
VladimirARodionov/sirius
1c901935c65d7d02cbcf1f172b1f0ebd346bea95
[ "MIT" ]
2
2018-11-23T12:29:55.000Z
2020-07-01T16:33:06.000Z
from rolepermissions.roles import AbstractUserRole
15.351351
50
0.727113
from rolepermissions.roles import AbstractUserRole class AdminRole(AbstractUserRole): available_permissions = {} class UserRole(AbstractUserRole): available_permissions = { } class EditRole(AbstractUserRole): available_permissions = { } class ReportsRole(AbstractUserRole): available_permissions = { } class ActionsRole(AbstractUserRole): available_permissions = { } class UserListRole(AbstractUserRole): available_permissions = { } class UserDetailRole(AbstractUserRole): available_permissions = { }
0
348
161
7756c016ce3496559cfa61adcacf9fdb4304fee4
14,346
py
Python
python/presched.py
lsst/rtn-014
773e470c06371fffb3c9844923065a9f0e0e70bf
[ "CC-BY-4.0" ]
null
null
null
python/presched.py
lsst/rtn-014
773e470c06371fffb3c9844923065a9f0e0e70bf
[ "CC-BY-4.0" ]
null
null
null
python/presched.py
lsst/rtn-014
773e470c06371fffb3c9844923065a9f0e0e70bf
[ "CC-BY-4.0" ]
null
null
null
#!/usr/bin/env python """Pre-schedule DDF sequences """ # pylint: disable=no-member # imports import sys import logging from argparse import ArgumentParser import yaml import numpy as np import pandas as pd import astropy.coordinates import astropy.units as u import lsst.sims.utils # constants # exception classes # interface functions def schedule_all(mag_limit, location, config): """Schedule one field on one band. Parameters ---------- m5 : `pandas.DataFrame` Has a multilevel index with the following levels: field_name : `str` the field name band : `str` the band Includes the following columns: mjd : `float` MJD of candidate time m5 : `float` 5-sigma limiting magnitude of the field if observed at that time `location` : `astropy.coordinates.EarthLocation` the location of the observatory config : `dict` Configuration parameters Return ------ schedule : `pandas.DataFrame` includes three columns: mjd : `float` the best time at which to start the sequence of exposures why : `str` an indicator of why this sequence was scheduled night : `int` the MJD of the night (at midnight) on which the sequence is to be scheduled sequence : `str` which sequence this is """ seq_schedules = [] for seq_config in config["sequences"]: logger.info(f'Scheduling {seq_config["label"]}') seq_schedule = schedule_sequence(mag_limit, location, seq_config) seq_schedule["sequence"] = seq_config["label"] logger.info(f'Computing scheduled for {seq_config["label"]}') mag_limit["scheduled"] = _compute_scheduled( mag_limit, seq_schedule, seq_config["sequence_duration"] ) seq_schedules.append(seq_schedule) logger.info("Compiling full schedule") full_schedule = ( pd.concat(seq_schedules).sort_values("mjd").set_index("mjd", drop=False) ) return full_schedule def schedule_sequence(mag_limit, location, config): """Schedule one set of sequences. Parameters ---------- m5 : `pandas.DataFrame` Has a multilevel index with the following levels: field_name : `str` the field name band : `str` the band Includes the following columns: mjd : `float` MJD of candidate time m5 : `float` 5-sigma limiting magnitude of the field if observed at that time `location` : `astropy.coordinates.EarthLocation` the location of the observatory config : `dict` Configuration parameters, with the following contents: field_name : `str` the name of the field to schedule mag_lim_band : `str` the name of the filter to schedule sequence_duration : `astropy.units.Quantity` the duration of a block of one sequence of exposures caninocal_gap : `astropy.units.Quantity` the desired time between sequences of exposures min_gap: `astropy.units.Quantity` the minimum gap for which "bridge" exposures should be scheduled max_gap: `astropy.units.Quantity` the target maximum time between sequences of exposures season_gap : `astropy.units.Quantity` the gap time greater than which no bridges should be attempted mag_limit : `dict` of `str`: `float` target magnitude limits in each band Return ------ schedule : `pandas.DataFrame` includes three columns: mjd : `float` the best time at which to start the sequence of exposures why : `str` an indicator of why this sequence was scheduled night : `int` the MJD of the night (at midnight) on which the sequence is to be scheduled """ # pylint: disable=too-many-locals these_m5 = ( mag_limit.sort_index() .loc[(config["field_name"], config["mag_lim_band"])] .sort_index() .copy() ) min_m5 = _compute_rolling_m5(these_m5, config["sequence_duration"]).set_index( "mjd", drop=False ) min_m5["night_mjd"] = compute_night_mjd(min_m5["mjd"], location) bridge_nights = _find_bridge_nights(mag_limit, location, config) bridge_gap = config["bridge_gap"] maintain_cadence = config["maintain_cadence_in_gap"] scheduled_sequences = [] for night_mjd in range(min_m5.night_mjd.min(), min_m5.night_mjd.max()): if night_mjd in bridge_nights["night_before_mjd"].values: why = "pregap" attempt_tonight = True force_tonight = True elif bridge_gap and (night_mjd in bridge_nights["bridge_night_mjd"].values): why = "bridge" attempt_tonight = True force_tonight = True elif night_mjd in bridge_nights["night_after_mjd"].values: why = "postgap" attempt_tonight = True force_tonight = True elif len(scheduled_sequences) == 0: # We are just starting why = "start" attempt_tonight = True force_tonight = False elif (night_mjd - scheduled_sequences[-1]["night_mjd"]) * u.day >= config[ "canonical_gap" ]: why = "cadence" attempt_tonight = True force_tonight = maintain_cadence else: continue if not attempt_tonight: continue candidate_times = min_m5.query(f"night_mjd == {night_mjd}") if len(candidate_times) < 1: assert maintain_cadence or not force_tonight continue best_time = min_m5.loc[candidate_times["m5"].idxmax()] if isinstance(best_time, pd.DataFrame): best_time = best_time.sort_values("count", ascending=True).iloc[-1] if (not force_tonight) and (best_time.m5 < config["mag_limit"]): continue if best_time.m5 < config["gap_mag_limit"]: continue scheduled_sequences.append({"mjd": best_time.mjd, "why": why}) scheduled_sequences[-1]["night_mjd"] = compute_night_mjd( best_time.mjd, location ) schedule = pd.DataFrame(scheduled_sequences) return schedule def compute_night_mjd(mjd, location): """Convert the floating point mjd to the integer local Julian date for the night. Parameters ---------- mjd : `float`, `pandas.Series`, or `numpy.ndarray` Returns ------- jd : `int`, `pandas.Series`, or `numpy.ndarray` """ # add longitude to get into the local timezone, # round to find the nearest midnight night_mjd = np.round(mjd + (location.lon.deg / 360.0)).astype(int) return night_mjd def read_config(fname): """Read m5 configuration file Parameters ---------- fname: `str` The name of the file to read configuration from. Return ------ config: `dict` Dictionary of configuration values """ logger.debug("Reading configuration from %s", fname) with open(fname, "r") as config_file: config = yaml.load(config_file.read(), Loader=yaml.FullLoader) # Apply units for seq_config in config["sequences"]: seq_config["sequence_duration"] = u.Quantity( seq_config["sequence_duration"] ).to(u.second) seq_config["max_gap"] = u.Quantity(seq_config["max_gap"]).to(u.day) seq_config["min_gap"] = u.Quantity(seq_config["min_gap"]).to(u.day) seq_config["season_gap"] = u.Quantity(seq_config["season_gap"]).to(u.day) seq_config["canonical_gap"] = u.Quantity(seq_config["canonical_gap"]).to(u.day) site_name = "LSST" if config["site_name"] == "LSST" else config["site_name"] site = lsst.sims.utils.Site(site_name) config["location"] = astropy.coordinates.EarthLocation( lat=site.latitude, lon=site.longitude, height=site.height ) return config # classes # internal functions & classes def main(): """Parse command line arguments and config file, and run""" parser = ArgumentParser() parser.add_argument("config", help="configuration file") parser.add_argument("m5", help="file from which to load limiting magnitudes") parser.add_argument("output", help="file in which to write results") args = parser.parse_args() config_fname = args.config m5_fname = args.m5 output_fname = args.output config = read_config(config_fname) logger.info("Reading m5 from %s", m5_fname) m5_limits = ( pd.read_hdf(m5_fname) .reset_index() .query("sun_alt < -18") .set_index(["field_name", "band", "mjd"], drop=False) .assign(scheduled=False) ) schedule = schedule_all(m5_limits, config["location"], config) schedule.to_csv(output_fname, sep="\t", index=False, header=True) return 0 def _init_logger(log_level=logging.DEBUG): """Create the ddfpresched logger and set initial configuration""" ddfpresched_logger = logging.getLogger("ddfpresched") ddfpresched_logger.setLevel(log_level) handler = logging.StreamHandler() handler.setLevel(log_level) formatter = logging.Formatter("%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s") handler.setFormatter(formatter) ddfpresched_logger.addHandler(handler) return ddfpresched_logger if __name__ == "__main__": logger = _init_logger() status = main() # pylint: disable=invalid-name sys.exit(status)
33.362791
88
0.640876
#!/usr/bin/env python """Pre-schedule DDF sequences """ # pylint: disable=no-member # imports import sys import logging from argparse import ArgumentParser import yaml import numpy as np import pandas as pd import astropy.coordinates import astropy.units as u import lsst.sims.utils # constants # exception classes # interface functions def schedule_all(mag_limit, location, config): """Schedule one field on one band. Parameters ---------- m5 : `pandas.DataFrame` Has a multilevel index with the following levels: field_name : `str` the field name band : `str` the band Includes the following columns: mjd : `float` MJD of candidate time m5 : `float` 5-sigma limiting magnitude of the field if observed at that time `location` : `astropy.coordinates.EarthLocation` the location of the observatory config : `dict` Configuration parameters Return ------ schedule : `pandas.DataFrame` includes three columns: mjd : `float` the best time at which to start the sequence of exposures why : `str` an indicator of why this sequence was scheduled night : `int` the MJD of the night (at midnight) on which the sequence is to be scheduled sequence : `str` which sequence this is """ seq_schedules = [] for seq_config in config["sequences"]: logger.info(f'Scheduling {seq_config["label"]}') seq_schedule = schedule_sequence(mag_limit, location, seq_config) seq_schedule["sequence"] = seq_config["label"] logger.info(f'Computing scheduled for {seq_config["label"]}') mag_limit["scheduled"] = _compute_scheduled( mag_limit, seq_schedule, seq_config["sequence_duration"] ) seq_schedules.append(seq_schedule) logger.info("Compiling full schedule") full_schedule = ( pd.concat(seq_schedules).sort_values("mjd").set_index("mjd", drop=False) ) return full_schedule def schedule_sequence(mag_limit, location, config): """Schedule one set of sequences. Parameters ---------- m5 : `pandas.DataFrame` Has a multilevel index with the following levels: field_name : `str` the field name band : `str` the band Includes the following columns: mjd : `float` MJD of candidate time m5 : `float` 5-sigma limiting magnitude of the field if observed at that time `location` : `astropy.coordinates.EarthLocation` the location of the observatory config : `dict` Configuration parameters, with the following contents: field_name : `str` the name of the field to schedule mag_lim_band : `str` the name of the filter to schedule sequence_duration : `astropy.units.Quantity` the duration of a block of one sequence of exposures caninocal_gap : `astropy.units.Quantity` the desired time between sequences of exposures min_gap: `astropy.units.Quantity` the minimum gap for which "bridge" exposures should be scheduled max_gap: `astropy.units.Quantity` the target maximum time between sequences of exposures season_gap : `astropy.units.Quantity` the gap time greater than which no bridges should be attempted mag_limit : `dict` of `str`: `float` target magnitude limits in each band Return ------ schedule : `pandas.DataFrame` includes three columns: mjd : `float` the best time at which to start the sequence of exposures why : `str` an indicator of why this sequence was scheduled night : `int` the MJD of the night (at midnight) on which the sequence is to be scheduled """ # pylint: disable=too-many-locals these_m5 = ( mag_limit.sort_index() .loc[(config["field_name"], config["mag_lim_band"])] .sort_index() .copy() ) min_m5 = _compute_rolling_m5(these_m5, config["sequence_duration"]).set_index( "mjd", drop=False ) min_m5["night_mjd"] = compute_night_mjd(min_m5["mjd"], location) bridge_nights = _find_bridge_nights(mag_limit, location, config) bridge_gap = config["bridge_gap"] maintain_cadence = config["maintain_cadence_in_gap"] scheduled_sequences = [] for night_mjd in range(min_m5.night_mjd.min(), min_m5.night_mjd.max()): if night_mjd in bridge_nights["night_before_mjd"].values: why = "pregap" attempt_tonight = True force_tonight = True elif bridge_gap and (night_mjd in bridge_nights["bridge_night_mjd"].values): why = "bridge" attempt_tonight = True force_tonight = True elif night_mjd in bridge_nights["night_after_mjd"].values: why = "postgap" attempt_tonight = True force_tonight = True elif len(scheduled_sequences) == 0: # We are just starting why = "start" attempt_tonight = True force_tonight = False elif (night_mjd - scheduled_sequences[-1]["night_mjd"]) * u.day >= config[ "canonical_gap" ]: why = "cadence" attempt_tonight = True force_tonight = maintain_cadence else: continue if not attempt_tonight: continue candidate_times = min_m5.query(f"night_mjd == {night_mjd}") if len(candidate_times) < 1: assert maintain_cadence or not force_tonight continue best_time = min_m5.loc[candidate_times["m5"].idxmax()] if isinstance(best_time, pd.DataFrame): best_time = best_time.sort_values("count", ascending=True).iloc[-1] if (not force_tonight) and (best_time.m5 < config["mag_limit"]): continue if best_time.m5 < config["gap_mag_limit"]: continue scheduled_sequences.append({"mjd": best_time.mjd, "why": why}) scheduled_sequences[-1]["night_mjd"] = compute_night_mjd( best_time.mjd, location ) schedule = pd.DataFrame(scheduled_sequences) return schedule def compute_night_mjd(mjd, location): """Convert the floating point mjd to the integer local Julian date for the night. Parameters ---------- mjd : `float`, `pandas.Series`, or `numpy.ndarray` Returns ------- jd : `int`, `pandas.Series`, or `numpy.ndarray` """ # add longitude to get into the local timezone, # round to find the nearest midnight night_mjd = np.round(mjd + (location.lon.deg / 360.0)).astype(int) return night_mjd def read_config(fname): """Read m5 configuration file Parameters ---------- fname: `str` The name of the file to read configuration from. Return ------ config: `dict` Dictionary of configuration values """ logger.debug("Reading configuration from %s", fname) with open(fname, "r") as config_file: config = yaml.load(config_file.read(), Loader=yaml.FullLoader) # Apply units for seq_config in config["sequences"]: seq_config["sequence_duration"] = u.Quantity( seq_config["sequence_duration"] ).to(u.second) seq_config["max_gap"] = u.Quantity(seq_config["max_gap"]).to(u.day) seq_config["min_gap"] = u.Quantity(seq_config["min_gap"]).to(u.day) seq_config["season_gap"] = u.Quantity(seq_config["season_gap"]).to(u.day) seq_config["canonical_gap"] = u.Quantity(seq_config["canonical_gap"]).to(u.day) site_name = "LSST" if config["site_name"] == "LSST" else config["site_name"] site = lsst.sims.utils.Site(site_name) config["location"] = astropy.coordinates.EarthLocation( lat=site.latitude, lon=site.longitude, height=site.height ) return config # classes # internal functions & classes def _infer_time_sampling(mag_limit): mjds = pd.Series(mag_limit["mjd"].unique()).sort_values() timestep_duration = ((mjds - mjds.shift(1)).median() * u.day).to(u.minute) return timestep_duration def _compute_rolling_m5(mag_limit, roll_window): mag_limit = mag_limit.query("not scheduled").copy().sort_index() mag_limit["datetime"] = pd.to_datetime( mag_limit.mjd + 2400000.5, origin="julian", unit="D" ) mag_limit["counter"] = 1 mag_limit.set_index("datetime", inplace=True, drop=False) roll_seconds = roll_window.to("second").value mag_limit_roll = mag_limit.rolling(f"{int(roll_seconds)}s") min_mag_limit = mag_limit_roll[["mjd", "moon_angle", "night", "m5"]].min() min_mag_limit["start_datetime"] = pd.to_datetime( min_mag_limit.mjd + 2400000.5, origin="julian", unit="D" ) min_mag_limit["count"] = mag_limit_roll["counter"].sum().astype(int) min_mag_limit = ( min_mag_limit.reset_index() .rename(columns={"datetime": "end_datetime"}) .set_index("start_datetime", drop=False) ) min_mag_limit["m5"] = min_mag_limit["m5"].fillna(-np.inf) # Infer which windows do not have a full set of samples, and toss them sample_dt = _infer_time_sampling(mag_limit) expected_samples = int(np.floor((roll_window.to(sample_dt.unit) / sample_dt).value)) min_mag_limit.query( f"(count == {expected_samples}) or (count == {expected_samples+1})", inplace=True, ) min_mag_limit.sort_values("count", ascending=False).groupby( level="start_datetime" ).first() return min_mag_limit def _find_gaps(mjds, min_gap, season_gap, location, night_epoch_mjd=0): gaps = pd.DataFrame({"start": np.unique(np.sort(mjds))}) gaps["end"] = gaps.start.shift(-1) gaps.dropna(inplace=True) gaps["duration"] = gaps["end"] - gaps["start"] gaps["mjd"] = 0.5 * (gaps["end"] + gaps["start"]) gaps["night_before"] = compute_night_mjd(gaps["start"], location) - night_epoch_mjd gaps["night_after"] = compute_night_mjd(gaps["end"], location) - night_epoch_mjd gaps["gap_nights"] = gaps["night_after"] - gaps["night_before"] gaps.query( f"({min_gap} <= gap_nights) and ({season_gap} > gap_nights)", inplace=True ) gaps.set_index("mjd", inplace=True) gaps.sort_index(inplace=True) return gaps def _find_bridge_nights(all_mag_limit, location, config): oversampled_mag_limit = ( all_mag_limit.sort_index() .loc[(config["field_name"], config["mag_lim_band"])] .sort_index() .copy() ) mag_limit = _compute_rolling_m5(oversampled_mag_limit, config["sequence_duration"]) good_mag_limit = mag_limit.query(f'm5>{config["mag_limit"]}') night_epoch_mjd = ( compute_night_mjd(mag_limit.iloc[0].mjd, location) - mag_limit.iloc[0].night ) gaps = _find_gaps( good_mag_limit.mjd, config["min_gap"].to(u.day).value, config["season_gap"].to(u.day).value, location, night_epoch_mjd, ) gaps["bridge_mjd"] = np.nan gaps["has_bridge"] = False max_gap = config["max_gap"].to(u.day).value for mjd, gap in gaps.iterrows(): candidate_bridges = mag_limit.query( f"(night > {gap.night_before}) and (night < {gap.night_after})" ).query(f"(mjd < {gap.start+max_gap}) and (mjd > {gap.end-max_gap})") if len(candidate_bridges) == 0: continue best_bridge = candidate_bridges.loc[candidate_bridges["m5"].idxmax()] # Sometimes there can be two time windows with the same starting, # differing by a sample time. if isinstance(best_bridge, pd.DataFrame): best_bridge = best_bridge.sort_values("count").iloc[-1] gaps["has_bridge"] = True gaps.loc[mjd, "bridge_mjd"] = best_bridge["mjd"] gaps["bridge_night_mjd"] = compute_night_mjd(gaps["bridge_mjd"].fillna(0), location) gaps["night_before_mjd"] = (gaps["night_before"] + night_epoch_mjd).astype(int) gaps["night_after_mjd"] = (gaps["night_after"] + night_epoch_mjd).astype(int) return gaps def _compute_scheduled(m5_limits, schedule, sequence_duration): scheduled = ( m5_limits["scheduled"] .reset_index() .set_index("mjd", drop=False) .sort_index() .copy() ) seq_days = sequence_duration.to(u.day).value for _, obs_seq in schedule.iterrows(): start_mjd = obs_seq.mjd end_mjd = obs_seq.mjd + seq_days scheduled.loc[start_mjd:end_mjd, "scheduled"] = True scheduled.set_index(m5_limits.index.names, inplace=True) return scheduled["scheduled"] def main(): """Parse command line arguments and config file, and run""" parser = ArgumentParser() parser.add_argument("config", help="configuration file") parser.add_argument("m5", help="file from which to load limiting magnitudes") parser.add_argument("output", help="file in which to write results") args = parser.parse_args() config_fname = args.config m5_fname = args.m5 output_fname = args.output config = read_config(config_fname) logger.info("Reading m5 from %s", m5_fname) m5_limits = ( pd.read_hdf(m5_fname) .reset_index() .query("sun_alt < -18") .set_index(["field_name", "band", "mjd"], drop=False) .assign(scheduled=False) ) schedule = schedule_all(m5_limits, config["location"], config) schedule.to_csv(output_fname, sep="\t", index=False, header=True) return 0 def _init_logger(log_level=logging.DEBUG): """Create the ddfpresched logger and set initial configuration""" ddfpresched_logger = logging.getLogger("ddfpresched") ddfpresched_logger.setLevel(log_level) handler = logging.StreamHandler() handler.setLevel(log_level) formatter = logging.Formatter("%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s") handler.setFormatter(formatter) ddfpresched_logger.addHandler(handler) return ddfpresched_logger if __name__ == "__main__": logger = _init_logger() status = main() # pylint: disable=invalid-name sys.exit(status)
4,567
0
115
4ffb8db0c57128edf137b856b06f93dfc1f283a0
8,608
py
Python
mab/gd/nbody/snapshot.py
maartenbreddels/mab
112dcfbc4a74b07aff13d489b3776bca58fe9bdf
[ "MIT" ]
1
2018-12-01T04:10:34.000Z
2018-12-01T04:10:34.000Z
mab/gd/nbody/snapshot.py
maartenbreddels/mab
112dcfbc4a74b07aff13d489b3776bca58fe9bdf
[ "MIT" ]
null
null
null
mab/gd/nbody/snapshot.py
maartenbreddels/mab
112dcfbc4a74b07aff13d489b3776bca58fe9bdf
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from numpy import * from mab.binningtools import bingrid, binrange import mab.gd.logging as logging logger = logging.getLogger("gd.nbody.gadget")
33.364341
132
0.646027
# -*- coding: utf-8 -*- from numpy import * from mab.binningtools import bingrid, binrange import mab.gd.logging as logging logger = logging.getLogger("gd.nbody.gadget") class Component(object): def __init__(self, name, q, p, mass, potential=None): self.name = name self.q = q self.p = p self.mass = mass self.masses = None self.potential = potential def clone(self): return Component(self.name, self.q * 1., self.p * 1., self.mass, self.potential) # TODO check ifwe need to copy mass and potential def rescale(self, scale_mass, scale_size): self.q *= 1#0.1#scale_size self.p *= 1#0.1**-0.5#10**(1./3)#(scale_mass/scale_size)**0.5 self.mass *= 1#scale_mass def rotate_xz(self, angle): x, y, z = self.q vx, vy, vz = self.p xn = x * cos(angle) + z * sin(angle) yn = y zn = x * sin(angle) - z * cos(angle) vxn = vx * cos(angle) + vz * sin(angle) vyn = vy vzn = vx * sin(angle) - vz * cos(angle) self.q = array([xn, yn, zn]) self.p = array([vxn, vyn, vzn]) def translate(self, q, p): self.q = self.q + array(q).reshape(3,1) self.p = self.p + array(p).reshape(3,1) def moments_polar(self, N=250): xp, yp, zp = self.q vx, vy, vz = self.p Nperbin = N r3d = sqrt(xp**2+yp**2+zp**2) #print "r3d min", r3d.min() #scale = float(i%5)/4. #color = Color(scale, 0, 0) #color = #r = sqrt(xp*xp+yp*yp+zp*zp).astype(float64) rhosq = (xp*xp+yp*yp); rho = sqrt(rhosq); vR = (vx*xp + vy*yp)/rho; vphi = (vy*xp - vx*yp)/rho; #vtheta = (vx*zp*xp/rho + vy*zp*yp/rho - vz*rho)/r; #vr[isnan(vr)] = 0 #vphi[isnan(vphi)] = 0 #vtheta[isnan(vtheta)] = 0 #vr[isinf(vr)] = 0 #vphi[isinf(vphi)] = 0 #vtheta[isinf(vtheta)] = 0 logrho, vR, vRvar = self.moment1(log10(rho), vR, N) logrho, vphi, vphivar = self.moment1(log10(rho), vphi, N) logrho, vz, vzvar = self.moment1(log10(rho), vz, N) return logrho, vR, vRvar, vphi, vphivar, vz, vzvar def moments_spherical(self, N=250, moments=2): xp, yp, zp = self.q vx, vy, vz = self.p Nperbin = N r3d = sqrt(xp**2+yp**2+zp**2) #print "r3d min", r3d.min() #scale = float(i%5)/4. #color = Color(scale, 0, 0) #color = r = sqrt(xp*xp+yp*yp+zp*zp).astype(float64) + 1e-10 rhosq = (xp*xp+yp*yp); rho = sqrt(rhosq); vr = (vx*xp + vy*yp + vz*zp)/r; vphi = (vy*xp - vx*yp)/rho; vtheta = (vx*zp*xp/rho + vy*zp*yp/rho - vz*rho)/r; vr[isnan(vr)] = 0 vphi[isnan(vphi)] = 0 vtheta[isnan(vtheta)] = 0 vr[isinf(vr)] = 0 vphi[isinf(vphi)] = 0 vtheta[isinf(vtheta)] = 0 logrs, mr = self.moment1(log10(r), vr, N, moments) logrs, mtheta = self.moment1(log10(r), vtheta, N, moments) logrs, mphi = self.moment1(log10(r), vphi, N, moments) return logrs, mr, mphi, mtheta def moment1(self, x, v, Nperbin, moments): xbins = [] momentbins = [] def moment(x, n): if n == 0: return len(x) if n == 1: return mean(x) xc = x - mean(x) return sum(xc**n)/len(xc) for n, xbin, vbin in binrange(Nperbin, x, v): xbins.append(mean(xbin)) momentbins.append([moment(vbin, i) for i in range(moments+1)]) return array(xbins), array(momentbins).T class Snapshot(object): def __init__(self): self.components = [] self.componentmap = {} def add_component(self, component): self.components.append(component) self.componentmap[component.name] = component def center(self, name="halo", rc=0.3): if name not in self.componentmap: name = "disk" component = self.componentmap[name] if component.potential is None: logger.warning("component %s has no potential information, cannot center" % name) return array([0., 0., 0.]), array([0., 0., 0.]) center_index = argmin(component.potential) center_indices = argsort(component.potential)[:10] q0 = mean(component.q[:,center_indices], axis=1) #q0 = component.q[:,center_index] * 1. # times 1 to avoid reference logger.debug("center: %r" % q0) for c in self.components: c.translate(-q0, [0, 0, 0]) x, y, z = component.q vx, vy, vz = component.p r = (x**2+y**2+z**2)**0.5 mask = r < rc vx0 = mean(vx[mask]) vy0 = mean(vy[mask]) vz0 = mean(vz[mask]) p0 = array([vx0, vy0, vz0]) for c in self.components: c.translate([0, 0, 0], -p0) return q0, p0 class SnapshotTransformed(Snapshot): def __init__(self, snapshot, rotate_xz_angle=0, q0=[0,0,0], p0=[0,0,0], mass_scale=1., size_scale=1.): super(SnapshotTransformed, self).__init__() self.rotate_xz_angle = rotate_xz_angle self.snapshot = snapshot self.p0 = p0 self.q0 = q0 self.mass_scale = mass_scale self.size_scale = size_scale def load(self): self.snapshot.load() for component in self.snapshot.components: component = component.clone() self.add_component(component) logger.debug("stds before: %r %r "% ([std(component.p[k]) for k in range(3)], [std(component.q[k]) for k in range(3)])) logger.debug("means before: %r %r "% ([mean(component.p[k]) for k in range(3)], [mean(component.q[k]) for k in range(3)])) component.rescale(self.mass_scale, self.size_scale) component.rotate_xz(self.rotate_xz_angle) component.translate(self.q0, self.p0) logger.debug("stds before: %r %r "% ([std(component.p[k]) for k in range(3)], [std(component.q[k]) for k in range(3)])) logger.debug("means before: %r %r "% ([mean(component.p[k]) for k in range(3)], [mean(component.q[k]) for k in range(3)])) class SnapshotFiltered(Snapshot): def __init__(self, snapshot, component_filter): super(SnapshotFiltered, self).__init__() self.snapshot = snapshot self.component_filter = component_filter def load(self): self.snapshot.load() for component in self.snapshot.components: if self.component_filter(component): logger.debug("adding component: %s" % component.name) component = component.clone() self.add_component(component) else: logger.debug("skipping component: %s" % component.name) class SnapshotCenter(Snapshot): def __init__(self, snapshot): super(SnapshotCenter, self).__init__() self.snapshot = snapshot def run(self, args, opts, scope): self.load() def load(self): self.snapshot.load() for component in self.snapshot.components: component = component.clone() x, y, z = component.q r = sqrt(x**2 + y**2 +z**2) mask = r < 1 print "mean q ", component.name, mean(component.q, axis=1) print "mean q (r<1kpc)", component.name, mean(component.q[:,mask], axis=1) print "mean p ", component.name, mean(component.p, axis=1) print "mean p (r<1kpc)", component.name, mean(component.p[:,mask], axis=1) component.p -= mean(component.p[:,mask], axis=1).reshape(3,1) print "mean q ", component.name, mean(component.q, axis=1) print "mean q (r<1kpc)", component.name, mean(component.q[:,mask], axis=1) print "mean p ", component.name, mean(component.p, axis=1) print "mean p (r<1kpc)", component.name, mean(component.p[:,mask], axis=1) print self.add_component(component) class SnapshotsMerge(Snapshot): def __init__(self, snapshots, output_snapshot): super(SnapshotsMerge, self).__init__() self.snapshots = snapshots self.output_snapshot = output_snapshot def load(self): for snapshot in self.snapshots: snapshot.load() component_names = {} for snapshot in self.snapshots: for component in snapshot.components: component_names[component.name] = None names = component_names.keys() for name in names: q = zeros((3,0)) p = zeros((3,0)) mass = None masses = zeros((0)) for snapshot in self.snapshots: if name in snapshot.componentmap: component = snapshot.componentmap[name] x = component.q[0] N = len(x) q = concatenate([q, component.q],1) #p = concatenate([p, component.p],1) p = concatenate([p, component.p],1) masses = concatenate([masses, ones(N) * component.mass]) #if mass is None: # mass = component.mass #assert mass == component.mass, "masses should be equal (there are %r %r)" % (mass, component.mass) c = Component(name, q, p, None) c.masses = masses self.add_component(c) class Convert(object): def __init__(self, input, output): self.input = input self.output = output def run(self, args, opts, scope): self.input.load() for component in self.input.components: logger.debug("stds before: %r %r "% ([std(component.p[k]) for k in range(3)], [std(component.q[k]) for k in range(3)])) logger.debug("means before: %r %r "% ([mean(component.p[k]) for k in range(3)], [mean(component.q[k]) for k in range(3)])) self.output.add_component(component) self.output.save()
7,669
53
707
0f9938e3d277edecec666efae97a653271e989ee
6,892
py
Python
chb/arm/ARMFunction.py
orinatic/CodeHawk-Binary
8b4fd728213e629736d5ece840ea3b43cea53f30
[ "MIT" ]
null
null
null
chb/arm/ARMFunction.py
orinatic/CodeHawk-Binary
8b4fd728213e629736d5ece840ea3b43cea53f30
[ "MIT" ]
null
null
null
chb/arm/ARMFunction.py
orinatic/CodeHawk-Binary
8b4fd728213e629736d5ece840ea3b43cea53f30
[ "MIT" ]
null
null
null
# ------------------------------------------------------------------------------ # CodeHawk Binary Analyzer # Author: Henny Sipma # ------------------------------------------------------------------------------ # The MIT License (MIT) # # Copyright (c) 2021 Aarno Labs LLC # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ------------------------------------------------------------------------------ import xml.etree.ElementTree as ET from typing import Callable, cast, Dict, List, Mapping, Optional, Sequence from chb.api.InterfaceDictionary import InterfaceDictionary from chb.app.BasicBlock import BasicBlock from chb.app.BDictionary import BDictionary from chb.app.Function import Function from chb.app.FunctionDictionary import FunctionDictionary from chb.app.FunctionInfo import FunctionInfo from chb.app.Cfg import Cfg from chb.app.StringXRefs import StringsXRefs from chb.invariants.FnVarDictionary import FnVarDictionary from chb.invariants.FnXprDictionary import FnXprDictionary from chb.arm.ARMBlock import ARMBlock from chb.arm.ARMDictionary import ARMDictionary from chb.arm.ARMInstruction import ARMInstruction from chb.arm.ARMCfg import ARMCfg import chb.util.fileutil as UF
36.465608
82
0.606065
# ------------------------------------------------------------------------------ # CodeHawk Binary Analyzer # Author: Henny Sipma # ------------------------------------------------------------------------------ # The MIT License (MIT) # # Copyright (c) 2021 Aarno Labs LLC # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ------------------------------------------------------------------------------ import xml.etree.ElementTree as ET from typing import Callable, cast, Dict, List, Mapping, Optional, Sequence from chb.api.InterfaceDictionary import InterfaceDictionary from chb.app.BasicBlock import BasicBlock from chb.app.BDictionary import BDictionary from chb.app.Function import Function from chb.app.FunctionDictionary import FunctionDictionary from chb.app.FunctionInfo import FunctionInfo from chb.app.Cfg import Cfg from chb.app.StringXRefs import StringsXRefs from chb.invariants.FnVarDictionary import FnVarDictionary from chb.invariants.FnXprDictionary import FnXprDictionary from chb.arm.ARMBlock import ARMBlock from chb.arm.ARMDictionary import ARMDictionary from chb.arm.ARMInstruction import ARMInstruction from chb.arm.ARMCfg import ARMCfg import chb.util.fileutil as UF class ARMFunction(Function): def __init__( self, path: str, filename: str, bd: BDictionary, ixd: InterfaceDictionary, finfo: FunctionInfo, armd: ARMDictionary, stringsxrefs: StringsXRefs, names: Sequence[str], xnode: ET.Element) -> None: Function.__init__( self, path, filename, bd, ixd, finfo, stringsxrefs, names, xnode) self._armd = armd self._cfg: Optional[ARMCfg] = None self._blocks: Dict[str, ARMBlock] = {} self._instructions: Dict[str, ARMInstruction] = {} self._armfnd: Optional[FunctionDictionary] = None @property def armdictionary(self) -> ARMDictionary: return self._armd @property def armfunctiondictionary(self) -> FunctionDictionary: if self._armfnd is None: xfnd = self.xnode.find("instr-dictionary") if xfnd is None: raise UF.CHBError("Element instr-dictionary missing from xml") self._armfnd = FunctionDictionary(self, xfnd) return self._armfnd @property def blocks(self) -> Mapping[str, ARMBlock]: if len(self._blocks) == 0: xinstrs = self.xnode.find("instructions") if xinstrs is None: raise UF.CHBError("ARM instructions element missing") for n in xinstrs.findall("bl"): baddr = n.get("ba") if baddr is None: raise UF.CHBError("ARM block address missing from xml") self._blocks[baddr] = ARMBlock(self, n) return self._blocks @property def instructions(self) -> Mapping[str, ARMInstruction]: if len(self._instructions) == 0: result: Dict[str, ARMInstruction] = {} def f(baddr: str, block: ARMBlock) -> None: result.update(block.instructions) self.iter_blocks(f) return result return self._instructions def iter_blocks(self, f: Callable[[str, ARMBlock], None]) -> None: for (ba, block) in self.blocks.items(): armblock = cast(ARMBlock, block) f(ba, armblock) def iter_instructions(self, f: Callable[[str, ARMInstruction], None]) -> None: for (ia, instr) in self.instructions.items(): arminstr = cast(ARMInstruction, instr) f(ia, arminstr) @property def branchconditions(self) -> Mapping[str, ARMInstruction]: result: Dict[str, ARMInstruction] = {} for b in self.blocks.values(): lastinstr = b.last_instruction if lastinstr.is_branch_instruction: ftconditions = lastinstr.ft_conditions if len(ftconditions) > 0: result[b.baddr] = cast(ARMInstruction, lastinstr) return result def set_fnvar_dictionary(self, xnode: ET.Element) -> FnVarDictionary: return FnVarDictionary(self, xnode) def strings_referenced(self) -> List[str]: result: List[str] = [] def f(iaddr: str, instr: ARMInstruction) -> None: result.extend(instr.strings_referenced) self.iter_instructions(f) return result @property def cfg(self) -> Cfg: if self._cfg is None: xcfg = self.xnode.find("cfg") if xcfg is None: raise UF.CHBError("cfg element is missing from arm function") self._cfg = ARMCfg(self, xcfg) return self._cfg def byte_string(self, chunksize: int = None) -> str: s: List[str] = [] def f(ia: str, i: ARMInstruction) -> None: s.extend(i.bytestring) self.iter_instructions(f) if chunksize is None: return "".join(s) else: result = "".join(s) size = len(s) chunks = [result[i:i+chunksize] for i in range(0, size, chunksize)] return "\n".join(chunks) def to_string( self, bytes: bool = False, bytestring: bool = False, hash: bool = False, opcodetxt: bool = True, opcodewidth: int = 40, sp: bool = True) -> str: lines: List[str] = [] for b in sorted(self.blocks): lines.append( self.blocks[b].to_string( bytes=bytes, opcodetxt=opcodetxt, opcodewidth=opcodewidth, sp=sp)) lines.append("-" * 80) if bytestring: lines.append(self.byte_string(chunksize=32)) return "\n".join(lines)
4,218
442
23
6ece77b6e0e3299f441d2801f483c672b6b6feb4
9,137
py
Python
KmeansCluster.py
QuKunLab/RA-OA
0672bf306a31e2e4295ec7e6d279daf34ba30b91
[ "BSD-2-Clause" ]
null
null
null
KmeansCluster.py
QuKunLab/RA-OA
0672bf306a31e2e4295ec7e6d279daf34ba30b91
[ "BSD-2-Clause" ]
null
null
null
KmeansCluster.py
QuKunLab/RA-OA
0672bf306a31e2e4295ec7e6d279daf34ba30b91
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 # In[1]: import pandas as pd import statsmodels.api as sm import numpy as np import matplotlib.pyplot as plt from sklearn.decomposition import PCA from mpl_toolkits.mplot3d import Axes3D import seaborn as sns import os import sys import scipy.stats from scipy.stats.mstats import gmean import scipy.stats as stats import math import matplotlib as mpl from sklearn.cluster import KMeans mpl.rcParams['pdf.fonttype'] = 42 mpl.rcParams["font.sans-serif"] = "Arial" #1.Z-score Normalzie DiseaseSP_DF: Cell='Monocytes' outDir=os.path.join('{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/KmeansCluster'.format(Cell)) if not os.path.exists(outDir): os.mkdir(outDir) DiseaseSP_F='{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/TwoTwoCompare_Merge.sortCol.txt'.format(Cell) DiseaseSP_DF=pd.read_table(DiseaseSP_F,sep='\t',index_col=0) DiseaseSP_DFz= DiseaseSP_DF.apply(scipy.stats.zscore,axis=1,result_type='broadcast') #decide K:1.手肘法(误差平方法SSE);2.轮廓系数法 SSE = [] # 存放每次结果的误差平方和 for k in range(1,10): estimator = KMeans(n_clusters=k) estimator.fit(DiseaseSP_DFz) SSE.append(estimator.inertia_) X = range(1,10) plt.style.use('seaborn-white') fig=plt.figure(figsize=(3.5,2)) ax=fig.add_axes([0.2,0.2,0.7,0.7]) ax.set_ylabel('Sum of the squared errors',fontsize=10) ax.set_xlabel('k number',fontsize=10) ax.tick_params(axis='y',length=7,labelsize=8,direction='out') ax.tick_params(axis='x',length=7,labelsize=8,direction='out') ax.spines['bottom'].set_linewidth(0.5) ax.spines['left'].set_linewidth(0.5) ax.spines['right'].set_linewidth(0.5) ax.spines['top'].set_linewidth(0.5) plt.plot(X,SSE,color='purple', marker='o', linestyle='dashed',linewidth=1, markersize=5) fig.savefig(outDir+'/Kvalue_SSE.pdf') #print '误差平方和:' plt.show() 2.#根据最佳K值进行KMeans聚类 (Kmeans聚类用的ZscoreNorm后的DF!!!) KMean_Cluster(DiseaseSP_DFz,outDir,2) KMean_Cluster(DiseaseSP_DFz,outDir,3) print ('K-means Done !') # In[5]: k='3' Cell='Monocytes' DiseaseSP_F='{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/TwoTwoCompare_Merge.sortCol.txt'.format(Cell) DiseaseSP_DF=pd.read_table(DiseaseSP_F,sep='\t',index_col=0) RAs=[i for i in list(DiseaseSP_DF) if 'RA' in i] OAs=[i for i in list(DiseaseSP_DF) if 'OA' in i] HCs=[i for i in list(DiseaseSP_DF) if 'HC' in i] BedF= '{}/RAOAHC.removeY.bed'.format(Cell) #read PeakBed BedDF=pd.read_table(BedF,sep='\t',header=None) BedDF.index=BedDF[3] # In[6]: k='3' PlotKmeanCluster_K3(k) # In[ ]: # In[ ]: # In[ ]: # In[ ]:
36.257937
246
0.700887
#!/usr/bin/env python # coding: utf-8 # In[1]: import pandas as pd import statsmodels.api as sm import numpy as np import matplotlib.pyplot as plt from sklearn.decomposition import PCA from mpl_toolkits.mplot3d import Axes3D import seaborn as sns import os import sys import scipy.stats from scipy.stats.mstats import gmean import scipy.stats as stats import math import matplotlib as mpl from sklearn.cluster import KMeans mpl.rcParams['pdf.fonttype'] = 42 mpl.rcParams["font.sans-serif"] = "Arial" #1.Z-score Normalzie DiseaseSP_DF: Cell='Monocytes' outDir=os.path.join('{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/KmeansCluster'.format(Cell)) if not os.path.exists(outDir): os.mkdir(outDir) DiseaseSP_F='{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/TwoTwoCompare_Merge.sortCol.txt'.format(Cell) DiseaseSP_DF=pd.read_table(DiseaseSP_F,sep='\t',index_col=0) DiseaseSP_DFz= DiseaseSP_DF.apply(scipy.stats.zscore,axis=1,result_type='broadcast') #decide K:1.手肘法(误差平方法SSE);2.轮廓系数法 SSE = [] # 存放每次结果的误差平方和 for k in range(1,10): estimator = KMeans(n_clusters=k) estimator.fit(DiseaseSP_DFz) SSE.append(estimator.inertia_) X = range(1,10) plt.style.use('seaborn-white') fig=plt.figure(figsize=(3.5,2)) ax=fig.add_axes([0.2,0.2,0.7,0.7]) ax.set_ylabel('Sum of the squared errors',fontsize=10) ax.set_xlabel('k number',fontsize=10) ax.tick_params(axis='y',length=7,labelsize=8,direction='out') ax.tick_params(axis='x',length=7,labelsize=8,direction='out') ax.spines['bottom'].set_linewidth(0.5) ax.spines['left'].set_linewidth(0.5) ax.spines['right'].set_linewidth(0.5) ax.spines['top'].set_linewidth(0.5) plt.plot(X,SSE,color='purple', marker='o', linestyle='dashed',linewidth=1, markersize=5) fig.savefig(outDir+'/Kvalue_SSE.pdf') #print '误差平方和:' plt.show() 2.#根据最佳K值进行KMeans聚类 (Kmeans聚类用的ZscoreNorm后的DF!!!) def KMean_Cluster(DF,outDirPrefix,k): #print 'Do KMean Cluster, k={}'.format(k) kmeans=KMeans(n_clusters=k) kmeans.fit(DF) Kcluster=pd.DataFrame(kmeans.labels_,index=list(DF.index),columns=['Cluster']) Kcluster.to_csv(outDir+'/TwoTwoCompareMerge_zscore_k{}.txt'.format(k),sep='\t') #return Kcluster KMean_Cluster(DiseaseSP_DFz,outDir,2) KMean_Cluster(DiseaseSP_DFz,outDir,3) print ('K-means Done !') # In[5]: k='3' Cell='Monocytes' DiseaseSP_F='{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/TwoTwoCompare_Merge.sortCol.txt'.format(Cell) DiseaseSP_DF=pd.read_table(DiseaseSP_F,sep='\t',index_col=0) RAs=[i for i in list(DiseaseSP_DF) if 'RA' in i] OAs=[i for i in list(DiseaseSP_DF) if 'OA' in i] HCs=[i for i in list(DiseaseSP_DF) if 'HC' in i] BedF= '{}/RAOAHC.removeY.bed'.format(Cell) #read PeakBed BedDF=pd.read_table(BedF,sep='\t',header=None) BedDF.index=BedDF[3] def PlotKmeanCluster_K3(k): kmeansDir=os.path.join('{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/KmeansCluster/kvalue_k{}/'.format(Cell,k)) if not os.path.exists(kmeansDir): os.mkdir(kmeansDir) KClusterF='{}/DiffPeaks/mean3_fc2_p0.001_fdr0.05/KmeansCluster/TwoTwoCompareMerge_zscore_k{}.txt'.format(Cell,k) KCluster=pd.read_table(KClusterF,sep='\t',index_col=0) k1=KCluster[KCluster['Cluster']==0] k2=KCluster[KCluster['Cluster']==1] k3=KCluster[KCluster['Cluster']==2] k1DF=DiseaseSP_DF.loc[k1.index] k2DF=DiseaseSP_DF.loc[k2.index] k3DF=DiseaseSP_DF.loc[k3.index] k1Bed=BedDF.loc[k1DF.index] k2Bed=BedDF.loc[k2DF.index] k3Bed=BedDF.loc[k3DF.index] a1=k1DF.iloc[:,-2:-1].mean(axis=0)[0] a2=k2DF.iloc[:,-2:-1].mean(axis=0)[0] a3=k3DF.iloc[:,-2:-1].mean(axis=0)[0] if (a1 < a2) & (a2 < a3): KclusterDF_c1=k1DF.copy() KclusterDF_c2=k2DF.copy() KclusterDF_c3=k3DF.copy() elif (a1 < a3) & (a3 < a2): KclusterDF_c1=k1DF.copy() KclusterDF_c2=k3DF.copy() KclusterDF_c3=k2DF.copy() elif (a2 < a1) & (a1 < a3): KclusterDF_c1=k2DF.copy() KclusterDF_c2=k1DF.copy() KclusterDF_c3=k3DF.copy() elif (a2 < a3) & (a3 < a1): KclusterDF_c1=k2DF.copy() KclusterDF_c2=k3DF.copy() KclusterDF_c3=k1DF.copy() elif (a3 < a1) & (a1 < a2): KclusterDF_c1=k3DF.copy() KclusterDF_c2=k1DF.copy() KclusterDF_c3=k2DF.copy() elif (a3 < a2) & (a2 < a1): KclusterDF_c1=k3DF.copy() KclusterDF_c2=k2DF.copy() KclusterDF_c3=k1DF.copy() KclusterBed_c1=BedDF.loc[KclusterDF_c1.index] KclusterBed_c2=BedDF.loc[KclusterDF_c2.index] KclusterBed_c3=BedDF.loc[KclusterDF_c3.index] KclusterBed_c1.to_csv(kmeansDir+'KmeansCluster_c1.bed',sep='\t',header=False,index=False) KclusterBed_c2.to_csv(kmeansDir+'KmeansCluster_c2.bed',sep='\t',header=False,index=False) KclusterBed_c3.to_csv(kmeansDir+'KmeansCluster_c3.bed',sep='\t',header=False,index=False) KclusterDF_c1.to_csv(kmeansDir+'KmeansCluster_c1.txt',sep='\t') KclusterDF_c2.to_csv(kmeansDir+'KmeansCluster_c2.txt',sep='\t') KclusterDF_c3.to_csv(kmeansDir+'KmeansCluster_c3.txt',sep='\t') KclusterDF_c1c2c3=pd.concat([KclusterDF_c1,KclusterDF_c2,KclusterDF_c3],axis=0) KclusterDF_c1c2c3.to_csv(kmeansDir+'KmeansCluster_all.txt',sep='\t') KclusterBed_c1c2c3=BedDF.loc[KclusterDF_c1c2c3.index] KclusterBed_c1c2c3.to_csv(kmeansDir+'KmeansCluster_all.bed',sep='\t',header=False,index=False) def DFmean(inputDF,C): Df=DiseaseSP_DF.loc[inputDF.index] hc=Df[HCs] oa=Df[OAs] ra=Df[RAs] hcmean=hc.mean(axis=1) hcmeanDF = hcmean.to_frame() hcmeanDF.rename(columns={0:'HC'}, inplace = True) oamean=oa.mean(axis=1) oameanDF = oamean.to_frame() oameanDF.rename(columns={0:'OA'}, inplace = True) ramean=ra.mean(axis=1) rameanDF = ramean.to_frame() rameanDF.rename(columns={0:'RA'}, inplace = True) MergeM = pd.concat([hcmeanDF,oameanDF,rameanDF],axis=1) MergeM.to_csv(kmeansDir+'KmeansCluster_{}.average.txt'.format(C),sep='\t') #Boxplot plt.style.use('seaborn-white') fig=plt.figure(figsize=(1.5,2)) ax=fig.add_axes([0.2,0.2,0.75,0.75]) #sns.violinplot(data=AA,ax=ax1,palette=(['steelblue','gold','orangered'])) sns.boxplot(data=MergeM,ax=ax,palette=(['steelblue','gold','orangered']),whis=0.5,fliersize=0.5,width=0.7,showfliers=False,medianprops={'linewidth':0.5},whiskerprops={'linewidth':0.5},boxprops={'linewidth':0.5},capprops={'linewidth':0.5}) ax.tick_params(labelsize=8,width=0.5,direction='out') #ax.set_ylim([0,10]) ax.spines['bottom'].set_linewidth(0.25) ax.spines['left'].set_linewidth(0.25) ax.spines['right'].set_linewidth(0.25) ax.spines['top'].set_linewidth(0.25) fig.savefig(kmeansDir+'KmeansCluster_{}_average.boxplot.pdf'.format(C)) plt.show() DFmean(KclusterDF_c1,'c1') DFmean(KclusterDF_c2,'c2') DFmean(KclusterDF_c3,'c3') #zcore,plot heatmap: KclusterDFall_Z=KclusterDF_c1c2c3.apply(scipy.stats.zscore,axis=1,result_type='broadcast') KclusterDFc1_Z=KclusterDF_c1.apply(scipy.stats.zscore,axis=1,result_type='broadcast') KclusterDFc2_Z=KclusterDF_c2.apply(scipy.stats.zscore,axis=1,result_type='broadcast') KclusterDFc3_Z=KclusterDF_c3.apply(scipy.stats.zscore,axis=1,result_type='broadcast') fig1=sns.clustermap(KclusterDFall_Z,figsize=(4,5),center=0,vmin=-2,vmax=2,col_cluster=False,row_cluster=False,cmap='RdYlBu_r') fig1.savefig(kmeansDir+'KmeansCluster_all.heatmap.png',dpi=200) plt.show() plt.close('all') fig2=sns.clustermap(KclusterDFc1_Z,figsize=(4,0.0009*len(KclusterDFc1_Z)),center=0,vmin=-2,vmax=2,col_cluster=False,row_cluster=False,cmap='RdYlBu_r') fig2.savefig(kmeansDir+'KmeansCluster_c1.heatmap.png',dpi=500) plt.show() plt.close('all') fig3=sns.clustermap(KclusterDFc2_Z,figsize=(4,0.0009*len(KclusterDFc2_Z)),center=0,vmin=-2,vmax=2,col_cluster=False,row_cluster=False,cmap='RdYlBu_r') fig3.savefig(kmeansDir+'KmeansCluster_c2.heatmap.png',dpi=500) plt.show() plt.close('all') fig4=sns.clustermap(KclusterDFc3_Z,figsize=(4,0.0009*len(KclusterDFc3_Z)),center=0,vmin=-2,vmax=2,col_cluster=False,row_cluster=False,cmap='RdYlBu_r') fig4.savefig(kmeansDir+'KmeansCluster_c3.heatmap.png',dpi=500) plt.show() plt.close('all') HCz=KclusterDFall_Z[HCs] OAz=KclusterDFall_Z[OAs] RAz=KclusterDFall_Z[RAs] HCmean=HCz.mean(axis=1) HCmeanDF = HCmean.to_frame() HCmeanDF.rename(columns={0:'HC'}, inplace = True) OAmean=OAz.mean(axis=1) OAmeanDF = OAmean.to_frame() OAmeanDF.rename(columns={0:'OA'}, inplace = True) RAmean=RAz.mean(axis=1) RAmeanDF = RAmean.to_frame() RAmeanDF.rename(columns={0:'RA'}, inplace = True) KclusterDFall_Z_average = pd.concat([HCmeanDF,OAmeanDF,RAmeanDF],axis=1) fig4=sns.clustermap(KclusterDFall_Z_average,figsize=(1,6),center=0,vmin=-2,vmax=2,col_cluster=False,row_cluster=False,cmap='RdYlBu_r') fig4.savefig(kmeansDir+'KmeansCluster_all.heatmap.average.pdf') plt.show() plt.close('all') # In[6]: k='3' PlotKmeanCluster_K3(k) # In[ ]: # In[ ]: # In[ ]: # In[ ]:
6,615
0
45
53904af3d0ccc22a36392a88bb13320439f920d6
515
py
Python
Versuch5/versuch5/task1.py
Tobias-Schoch/SSS
f8b078ca7f6482fc7c89d5f9e784a549459eefb7
[ "MIT" ]
null
null
null
Versuch5/versuch5/task1.py
Tobias-Schoch/SSS
f8b078ca7f6482fc7c89d5f9e784a549459eefb7
[ "MIT" ]
null
null
null
Versuch5/versuch5/task1.py
Tobias-Schoch/SSS
f8b078ca7f6482fc7c89d5f9e784a549459eefb7
[ "MIT" ]
1
2022-01-06T12:47:53.000Z
2022-01-06T12:47:53.000Z
import redlab as rl print("-------einzelneWerte-------------------------") print("16BitValue:" + str(rl.cbAIn(0, 0, 1))) print("VoltageValue:" + str(rl.cbVIn(0, 0, 1))) print("-------Messreihe-------------------------") print("Messreihe:" + str(rl.cbAInScan(0, 0, 0, 300, 8000, 1))) print("Messreihe:" + str(rl.cbVInScan(0, 0, 0, 300, 8000, 1))) print("Samplerate:" + str(rl.cbInScanRate(0, 0, 0, 8000))) print("Nyquist:" + str(rl.cbInScanRate(0, 0, 0, 8000) / 2)) print("-------Ausgabe-------------------------")
42.916667
62
0.528155
import redlab as rl print("-------einzelneWerte-------------------------") print("16BitValue:" + str(rl.cbAIn(0, 0, 1))) print("VoltageValue:" + str(rl.cbVIn(0, 0, 1))) print("-------Messreihe-------------------------") print("Messreihe:" + str(rl.cbAInScan(0, 0, 0, 300, 8000, 1))) print("Messreihe:" + str(rl.cbVInScan(0, 0, 0, 300, 8000, 1))) print("Samplerate:" + str(rl.cbInScanRate(0, 0, 0, 8000))) print("Nyquist:" + str(rl.cbInScanRate(0, 0, 0, 8000) / 2)) print("-------Ausgabe-------------------------")
0
0
0
d95999f3508116ce94aa9c4342d68c3a5e9948c8
1,021
py
Python
foursight_core/stage.py
4dn-dcic/foursight-core
2e5ea594d38d04ad58f63ee42e5fb4b920bfb63c
[ "MIT" ]
null
null
null
foursight_core/stage.py
4dn-dcic/foursight-core
2e5ea594d38d04ad58f63ee42e5fb4b920bfb63c
[ "MIT" ]
3
2021-08-11T07:09:24.000Z
2022-02-16T18:58:45.000Z
foursight_core/stage.py
4dn-dcic/foursight-core
2e5ea594d38d04ad58f63ee42e5fb4b920bfb63c
[ "MIT" ]
null
null
null
import os
26.868421
89
0.629775
import os class Stage(object): prod_stage_name = 'prod' def __init__(self, foursight_prefix): self.prefix = foursight_prefix @classmethod def get_stage_from_env_variable(cls): # set environmental variables in .chalice/config.json return os.environ.get('chalice_stage', 'dev') # default to dev @classmethod def get_stage(cls): stage = cls.get_stage_from_env_variable() if stage == 'test': stage = 'dev' return stage def get_queue_name(self): return '-'.join([self.prefix, self.get_stage_from_env_variable(), 'check_queue']) def get_runner_name(self): check_runner = os.environ.get('CHECK_RUNNER', None) if not check_runner: check_runner = '-'.join([self.prefix, self.get_stage(), 'check_runner']) return check_runner @classmethod def is_stage_prod(cls): if cls.get_stage() == cls.prod_stage_name: return True else: return False
745
242
23
c8e80520bc7afbfcc20d06219c5fce1dcc434c47
7,628
py
Python
Groups/Group_ID_3/Resources/dgcca_pckg/dgcca.py
gupta19avaneesh/DataScience
b37fc1208fc47187352b2066dbdca629014d92db
[ "MIT" ]
null
null
null
Groups/Group_ID_3/Resources/dgcca_pckg/dgcca.py
gupta19avaneesh/DataScience
b37fc1208fc47187352b2066dbdca629014d92db
[ "MIT" ]
null
null
null
Groups/Group_ID_3/Resources/dgcca_pckg/dgcca.py
gupta19avaneesh/DataScience
b37fc1208fc47187352b2066dbdca629014d92db
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """DGCCA.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/15L_7jxf0KH81UjAO6waIbQso1nqML0kD # Deep Generalized Cannonical Correlation Analysis Implementaion for 3 views cca-zoo package is used for the implemntaion of gcca (Generalized Cannonical Correlation Analysis) """ #install the cca-zoo package #pip install cca-zoo #importing required libraries import torch import cca_zoo import torch.nn as nn import torch.optim as optim import GCCA_loss #to be implemented """# Class DNN : Creates a new Deep Neural Network **Parameters** : * **layer_size** - It is the list of size of each layer in the DNN staring from the input layer * **activation** - The type of activation function to be used. Choose from 'relu' , 'tanh' , 'sigmoid' . By default, sigmoid. **Methods** * **forward(self, l)** : forward propogates input l into the DNN and returns the output """ """# Class : DGCCA_architecture - Defines the architecture for three DNNs **Parameters** * **layer_size1 , layer_size2 , layer_size3** : list of sizes of each layer of first, second and third DNN(view) respectively. **Methods** * **forward(self, x1, x2, x3)** : forward propogates x1 into the first DNN,x2 into the second DNN and x3 into the third DNN and returns the outputs. """ """# Class DGCCA : Implements the DGCCA Algorithm **Parameters** * **architecture** : object of DGCCA_architecture class. * **gcca_wrraper** : from cca-zoo package to implement gcca * **learning_rate** : learning_rate of the network * **epoch_num** :How long to train the model. * **batch_size** : Number of example per minibatch. * **reg_param** : the regularization parameter of the network * **out_size** : the size of the new space learned by the model (number of the new features) **Methods** * **fit(self, train_x1, train_x2, train_x3, test_x1, test_x2, test_x3)** - trains and tests the networks batch-wise. Also, back propogates the ggca loss. First three parameters are the training set for each view respectively. The last three parameters are the testing set for each view respectively * **_get_outputs(self, x1, x2, x3)** - returns gcca loss and output as both lists for given inputs x1, x2, x3 for view first, second, third respectively. * **test(self, x1, x2, x3)** - returns gcca loss mean and output as list for given inputs x1, x2, x3 for view first, second, third respectively. * **train_gcca(self, x1, x2, x3)** - uses the gcca.fit() from cca zoo on given inputs x1,x2,x3 """ #def train_gcca(self, x1, x2, x3): # self.gcca_wrapper = cca_zoo.wrapper.Wrapper(latent_dims=latent_dims, method='gcca') # self.gcca.fit(x1, x2, self.outdim_size)
37.950249
300
0.62795
# -*- coding: utf-8 -*- """DGCCA.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/15L_7jxf0KH81UjAO6waIbQso1nqML0kD # Deep Generalized Cannonical Correlation Analysis Implementaion for 3 views cca-zoo package is used for the implemntaion of gcca (Generalized Cannonical Correlation Analysis) """ #install the cca-zoo package #pip install cca-zoo #importing required libraries import torch import cca_zoo import torch.nn as nn import torch.optim as optim import GCCA_loss #to be implemented """# Class DNN : Creates a new Deep Neural Network **Parameters** : * **layer_size** - It is the list of size of each layer in the DNN staring from the input layer * **activation** - The type of activation function to be used. Choose from 'relu' , 'tanh' , 'sigmoid' . By default, sigmoid. **Methods** * **forward(self, l)** : forward propogates input l into the DNN and returns the output """ class DNN(nn.Module): def __init__(self, layer_size, activation): super(DNN, self).__init__() layers = [] self.activation = activation # Defaults to sigmoid if self.activation == 'relu': self.activation_func = nn.RelU() elif self.activation == 'tanh': self.activation_func = nn.Tanh() elif self.activation == 'sigmoid': self.activation_func = nn.Sigmoid() else: self.activation_func = nn.Sigmoid() for l_id in range(len(layer_sizes) - 1): if l_id == len(layer_sizes) - 2: #second last layer layers.append(nn.Sequential( nn.BatchNorm1d(num_features=layer_sizes[l_id], affine=False), nn.Linear(layer_sizes[l_id], layer_sizes[l_id + 1]), )) else: #all other layers layers.append(nn.Sequential( nn.Linear(layer_sizes[l_id], layer_sizes[l_id + 1]), self.activation_func, nn.BatchNorm1d(num_features=layer_sizes[l_id + 1], affine=False), )) self.layers = nn.ModuleList(layers) def forward(self, l): for layer in self.layers: l = layer(l) return l """# Class : DGCCA_architecture - Defines the architecture for three DNNs **Parameters** * **layer_size1 , layer_size2 , layer_size3** : list of sizes of each layer of first, second and third DNN(view) respectively. **Methods** * **forward(self, x1, x2, x3)** : forward propogates x1 into the first DNN,x2 into the second DNN and x3 into the third DNN and returns the outputs. """ class DGCCA_architecture(nn.Module): # for thee vies def __init__(self, layer_size1, layer_size2, layer_size3): #, use_all_singular_values, device=torch.device('cpu')): super(DGCCA, self).__init__() self.model1 = DNN(layer_sizes1, input_size1).double() self.model2 = DNN(layer_sizes2, input_size2).double() self.model3 = DNN(layer_sizes2, input_size2).double() def forward(self, x1, x2, x3): output1 = self.model1(x1) output2 = self.model2(x2) output3 = self.model3(x3) return output1, output2, output3 """# Class DGCCA : Implements the DGCCA Algorithm **Parameters** * **architecture** : object of DGCCA_architecture class. * **gcca_wrraper** : from cca-zoo package to implement gcca * **learning_rate** : learning_rate of the network * **epoch_num** :How long to train the model. * **batch_size** : Number of example per minibatch. * **reg_param** : the regularization parameter of the network * **out_size** : the size of the new space learned by the model (number of the new features) **Methods** * **fit(self, train_x1, train_x2, train_x3, test_x1, test_x2, test_x3)** - trains and tests the networks batch-wise. Also, back propogates the ggca loss. First three parameters are the training set for each view respectively. The last three parameters are the testing set for each view respectively * **_get_outputs(self, x1, x2, x3)** - returns gcca loss and output as both lists for given inputs x1, x2, x3 for view first, second, third respectively. * **test(self, x1, x2, x3)** - returns gcca loss mean and output as list for given inputs x1, x2, x3 for view first, second, third respectively. * **train_gcca(self, x1, x2, x3)** - uses the gcca.fit() from cca zoo on given inputs x1,x2,x3 """ class DGCCA(nn.Module): def __init__(self, architecture, gcca_wrapper, learning_rate, epoch_num, batch_size, reg_par, out_size): super(DGCCA, self).__init__() self.arch = nn.DataParallel(architecture) self.lr =learning_rate self.reg_par = reg_par # Stochastic Gradient Descent used as optimizer self.optimizer = torch.optim.SGD(model.parameters(), lr=lr, weight_decay=reg_par)#, momentum=0.9, weight_decay=0.5 self.epoch_num = epoch_num self.batch_size = batch_size self.out_size = out_size self.gcca = gcca_wrapper # The GCCA loss function self.loss = GCCA_loss(out_size) self.outdim_size = outdim_size def _get_outputs(self, x1, x2, x3): with torch.no_grad(): self.arch.eval() data_size = x1.size(0) batch_idxs = list(BatchSampler(SequentialSampler( range(data_size)), batch_size=self.batch_size, drop_last=False)) losses = [] outputs1 = [] outputs2 = [] outputs3 = [] for batch_idx in batch_idxs: batch_x1 = x1[batch_idx, :] batch_x2 = x2[batch_idx, :] batch_x3 = x3[batch_idx, :] o1, o2, o3 = self.model(batch_x1, batch_x2, batch_x3) outputs1.append(o1) outputs2.append(o2) outputs3.append(o3) loss = self.loss(o1, o2, o3) losses.append(loss.item()) outputs = [torch.cat(outputs1, dim=0).numpy(), torch.cat(outputs2, dim=0).numpy(), torch.cat(outputs3, dim=0).numpy()] return losses, outputs def test(self, x1, x2, x3): with torch.no_grad(): losses, outputs = self._get_outputs(x1, x2, x3) return np.mean(losses), outputs #def train_gcca(self, x1, x2, x3): # self.gcca_wrapper = cca_zoo.wrapper.Wrapper(latent_dims=latent_dims, method='gcca') # self.gcca.fit(x1, x2, self.outdim_size) def fit(self, train_x1, train_x2, train_x3, test_x1, test_x2, test_x3): train_losses = [] for epoch in range(self.epoch_num): epoch_start_time = time.time() self.model.train() batch_idxs = list(BatchSampler(RandomSampler( range(data_size)), batch_size=self.batch_size, drop_last=False)) for batch_idx in batch_idxs: self.optimizer.zero_grad() batch_x1 = train_x1[batch_idx, :] batch_x2 = train_x2[batch_idx, :] batch_x3 = train_x3[batch_idx, :] o1, o2, o3 = self.model(batch_x1, batch_x2, batch_x3) loss = self.loss(o1, o2, o3) train_losses.append(loss.item()) loss.backward() self.optimizer.step() train_loss = np.mean(train_losses) # train_gcca _, outputs = self._get_outputs(x1, x2, x3) self.train_gcca(outputs[0], outputs[1], outputs[2]) loss = self.test(test_x1, test_x2, test_x3) print('loss on test data: {:.4f}'.format(loss))
4,538
33
274
50d399aabd96ba4e8fc399b405e8d3519c4ea599
1,016
py
Python
Calibration/HcalIsolatedTrackReco/python/isolPixelTrackProdL1T_cfi.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
Calibration/HcalIsolatedTrackReco/python/isolPixelTrackProdL1T_cfi.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
Calibration/HcalIsolatedTrackReco/python/isolPixelTrackProdL1T_cfi.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
import FWCore.ParameterSet.Config as cms #IsolatedPixelTrackCandidateProducer default configuration isolPixelTrackProd = cms.EDProducer("IsolatedPixelTrackCandidateL1TProducer", L1eTauJetsSource = cms.InputTag( 'hltGtStage2Digis','Tau' ), tauAssociationCone = cms.double( 0.0 ), tauUnbiasCone = cms.double( 1.2 ), PixelTracksSources = cms.VInputTag( "hltPixelTracks" ), ExtrapolationConeSize = cms.double(1.0), PixelIsolationConeSizeAtEC = cms.double(40), L1GTSeedLabel = cms.InputTag( "hltL1sV0SingleJet60" ), MaxVtxDXYSeed = cms.double( 101.0 ), MaxVtxDXYIsol = cms.double( 101.0 ), VertexLabel = cms.InputTag( "hltTrimmedPixelVertices" ), MagFieldRecordName = cms.string("VolumeBasedMagneticField"), minPTrack = cms.double( 5.0 ), maxPTrackForIsolation = cms.double( 3.0 ), EBEtaBoundary = cms.double(1.479) )
46.181818
80
0.629921
import FWCore.ParameterSet.Config as cms #IsolatedPixelTrackCandidateProducer default configuration isolPixelTrackProd = cms.EDProducer("IsolatedPixelTrackCandidateL1TProducer", L1eTauJetsSource = cms.InputTag( 'hltGtStage2Digis','Tau' ), tauAssociationCone = cms.double( 0.0 ), tauUnbiasCone = cms.double( 1.2 ), PixelTracksSources = cms.VInputTag( "hltPixelTracks" ), ExtrapolationConeSize = cms.double(1.0), PixelIsolationConeSizeAtEC = cms.double(40), L1GTSeedLabel = cms.InputTag( "hltL1sV0SingleJet60" ), MaxVtxDXYSeed = cms.double( 101.0 ), MaxVtxDXYIsol = cms.double( 101.0 ), VertexLabel = cms.InputTag( "hltTrimmedPixelVertices" ), MagFieldRecordName = cms.string("VolumeBasedMagneticField"), minPTrack = cms.double( 5.0 ), maxPTrackForIsolation = cms.double( 3.0 ), EBEtaBoundary = cms.double(1.479) )
0
0
0
509f3b7436d7a1154fae2d44583752b3127e2a1d
3,406
py
Python
tools/utilities/pythonlibs/procmon.py
awf/ELL
25c94a1422efc41d5560db11b136f9d8f957ad41
[ "MIT" ]
2,094
2016-09-28T05:55:24.000Z
2019-05-04T19:06:36.000Z
tools/utilities/pythonlibs/procmon.py
awesomemachinelearning/ELL
cb897e3aec148a1e9bd648012b5f53ab9d0dd20c
[ "MIT" ]
213
2017-06-30T12:53:40.000Z
2019-05-03T06:35:38.000Z
tools/utilities/pythonlibs/procmon.py
awesomemachinelearning/ELL
cb897e3aec148a1e9bd648012b5f53ab9d0dd20c
[ "MIT" ]
301
2017-03-24T08:40:00.000Z
2019-05-02T21:22:28.000Z
#!/usr/bin/env python3 #################################################################################################### # # Project: Embedded Learning Library (ELL) # File: procmon.py # Authors: Lisa Ong # # Requires: Python 3.4+, psutil (pip install psutil) # #################################################################################################### import argparse import json import psutil import statistics from time import sleep if __name__ == "__main__": parser = argparse.ArgumentParser() # required arguments parser.add_argument("process_id", type=int, help="process identifier to monitor") # options parser.add_argument("--interval", type=float, default=1, help="monitoring interval in seconds") parser.add_argument("--logfile", help="path to the output file") args = parser.parse_args() pm = ProcessMonitor(args.process_id, args.logfile, args.interval) pm.start()
36.623656
110
0.576629
#!/usr/bin/env python3 #################################################################################################### # # Project: Embedded Learning Library (ELL) # File: procmon.py # Authors: Lisa Ong # # Requires: Python 3.4+, psutil (pip install psutil) # #################################################################################################### import argparse import json import psutil import statistics from time import sleep class ProcessMonitor: def __init__(self, process_id, output_file, interval): self.output_file = output_file self.interval = interval self.process = psutil.Process(process_id) def start(self): """While the process is running, monitor its vitals (e.g. resource usage) and log the results """ stats = [] try: firstcall = True while True: stat = self.process.as_dict(attrs=[ 'cpu_times', 'cpu_percent', 'num_threads', 'memory_info']) stat['timestamp'] = psutil.boot_time() stat['system_cpu_percent'] = psutil.cpu_percent(interval=None, percpu=True) stat['system_cpu_freq'] = psutil.cpu_freq(percpu=True) if firstcall: # first call is throwaway (see documentation for cpu_percent on why) firstcall = False else: stats.append(stat) sleep(self.interval) except psutil.NoSuchProcess: print("Process has exited") finally: summary = self.summarize(stats) results = {} results["stats"] = stats results["summary"] = summary if self.output_file: # write Windows line endings with open(self.output_file, 'w', encoding='utf-8', newline='\r\n') as outfile: json.dump(results, outfile, ensure_ascii=False, indent=2, sort_keys=True) def summarize(self, stats): summary = {} # 'cpu_percent' is an aggregate across the logical CPUs in use summary["mean_cpu_percent"] = statistics.mean([x['cpu_percent'] for x in stats]) summary["mean_num_threads"] = statistics.mean([x['num_threads'] for x in stats]) summary["mean_system_cpu_percent"] = [ statistics.mean([x['system_cpu_percent'][y] for x in stats]) for y in range(0, psutil.cpu_count()) ] summary["user_cpu_time_s"] = stats[-1]['cpu_times'].user summary["system_cpu_time_s"] = stats[-1]['cpu_times'].system # for best portablity, stick to subset of fields that are present in all OSes summary["mean_resident_set_b"] = statistics.mean([x['memory_info'].rss for x in stats]) summary["mean_virtual_memory_b"] = statistics.mean([x['memory_info'].vms for x in stats]) return summary if __name__ == "__main__": parser = argparse.ArgumentParser() # required arguments parser.add_argument("process_id", type=int, help="process identifier to monitor") # options parser.add_argument("--interval", type=float, default=1, help="monitoring interval in seconds") parser.add_argument("--logfile", help="path to the output file") args = parser.parse_args() pm = ProcessMonitor(args.process_id, args.logfile, args.interval) pm.start()
1,040
1,402
23
a67a765a9a4713271e8e62411009037eac7253b5
2,047
py
Python
chapter5/logistic_regression_tf.py
arifmudi/Python-Machine-Learning-By-Example-Third-Edition
7bdc45df2b519e3c0a929b03f0ac6fe30e028382
[ "MIT" ]
49
2020-03-21T08:37:46.000Z
2022-02-01T12:48:23.000Z
chapter5/logistic_regression_tf.py
hmoharrer/Python-Machine-Learning-By-Example-Third-Edition
7bdc45df2b519e3c0a929b03f0ac6fe30e028382
[ "MIT" ]
2
2021-03-28T17:25:57.000Z
2021-04-05T18:14:55.000Z
chapter5/logistic_regression_tf.py
hmoharrer/Python-Machine-Learning-By-Example-Third-Edition
7bdc45df2b519e3c0a929b03f0ac6fe30e028382
[ "MIT" ]
40
2020-05-02T18:30:00.000Z
2022-02-27T09:15:16.000Z
''' Source codes for Python Machine Learning By Example 3rd Edition (Packt Publishing) Chapter 5 Predicting Online Ads Click-through with Logistic Regression Author: Yuxi (Hayden) Liu (yuxi.liu.ece@gmail.com) ''' import tensorflow as tf import pandas as pd n_rows = 300000 df = pd.read_csv("train", nrows=n_rows) X = df.drop(['click', 'id', 'hour', 'device_id', 'device_ip'], axis=1).values Y = df['click'].values n_train = int(n_rows * 0.9) X_train = X[:n_train] Y_train = Y[:n_train].astype('float32') X_test = X[n_train:] Y_test = Y[n_train:].astype('float32') from sklearn.preprocessing import OneHotEncoder enc = OneHotEncoder(handle_unknown='ignore') X_train_enc = enc.fit_transform(X_train).toarray().astype('float32') X_test_enc = enc.transform(X_test).toarray().astype('float32') batch_size = 1000 train_data = tf.data.Dataset.from_tensor_slices((X_train_enc, Y_train)) train_data = train_data.repeat().shuffle(5000).batch(batch_size).prefetch(1) n_features = int(X_train_enc.shape[1]) W = tf.Variable(tf.zeros([n_features, 1])) b = tf.Variable(tf.zeros([1])) learning_rate = 0.0008 optimizer = tf.optimizers.Adam(learning_rate) training_steps = 6000 for step, (batch_x, batch_y) in enumerate(train_data.take(training_steps), 1): run_optimization(batch_x, batch_y) if step % 500 == 0: logits = tf.add(tf.matmul(batch_x, W), b)[:, 0] loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=batch_y, logits=logits)) print("step: %i, loss: %f" % (step, loss)) logits = tf.add(tf.matmul(X_test_enc, W), b)[:, 0] pred = tf.nn.sigmoid(logits) auc_metric = tf.keras.metrics.AUC() auc_metric.update_state(Y_test, pred) print(f'AUC on testing set: {auc_metric.result().numpy():.3f}')
32.492063
101
0.713727
''' Source codes for Python Machine Learning By Example 3rd Edition (Packt Publishing) Chapter 5 Predicting Online Ads Click-through with Logistic Regression Author: Yuxi (Hayden) Liu (yuxi.liu.ece@gmail.com) ''' import tensorflow as tf import pandas as pd n_rows = 300000 df = pd.read_csv("train", nrows=n_rows) X = df.drop(['click', 'id', 'hour', 'device_id', 'device_ip'], axis=1).values Y = df['click'].values n_train = int(n_rows * 0.9) X_train = X[:n_train] Y_train = Y[:n_train].astype('float32') X_test = X[n_train:] Y_test = Y[n_train:].astype('float32') from sklearn.preprocessing import OneHotEncoder enc = OneHotEncoder(handle_unknown='ignore') X_train_enc = enc.fit_transform(X_train).toarray().astype('float32') X_test_enc = enc.transform(X_test).toarray().astype('float32') batch_size = 1000 train_data = tf.data.Dataset.from_tensor_slices((X_train_enc, Y_train)) train_data = train_data.repeat().shuffle(5000).batch(batch_size).prefetch(1) n_features = int(X_train_enc.shape[1]) W = tf.Variable(tf.zeros([n_features, 1])) b = tf.Variable(tf.zeros([1])) learning_rate = 0.0008 optimizer = tf.optimizers.Adam(learning_rate) def run_optimization(x, y): with tf.GradientTape() as g: logits = tf.add(tf.matmul(x, W), b)[:, 0] cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=logits)) gradients = g.gradient(cost, [W, b]) optimizer.apply_gradients(zip(gradients, [W, b])) training_steps = 6000 for step, (batch_x, batch_y) in enumerate(train_data.take(training_steps), 1): run_optimization(batch_x, batch_y) if step % 500 == 0: logits = tf.add(tf.matmul(batch_x, W), b)[:, 0] loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=batch_y, logits=logits)) print("step: %i, loss: %f" % (step, loss)) logits = tf.add(tf.matmul(X_test_enc, W), b)[:, 0] pred = tf.nn.sigmoid(logits) auc_metric = tf.keras.metrics.AUC() auc_metric.update_state(Y_test, pred) print(f'AUC on testing set: {auc_metric.result().numpy():.3f}')
280
0
23
7afcc3bdf1870a247dc1c1d0dbd2c4cb23972b11
1,243
py
Python
zipline/research/utils.py
zhangshoug/czipline
6bce0abd4772443547f44669c0adb2b5c63f64db
[ "Apache-2.0" ]
9
2019-05-18T10:44:48.000Z
2022-01-01T15:12:49.000Z
zipline/research/utils.py
yuanyichuangzhi/czipline
6bce0abd4772443547f44669c0adb2b5c63f64db
[ "Apache-2.0" ]
null
null
null
zipline/research/utils.py
yuanyichuangzhi/czipline
6bce0abd4772443547f44669c0adb2b5c63f64db
[ "Apache-2.0" ]
10
2019-05-18T10:58:55.000Z
2022-03-24T13:37:17.000Z
""" 辅助函数 """ from cswd.common.utils import ensure_list from .core import symbols, to_tdates def select_output_by(output, start=None, end=None, assets=None): """ 按时间及代码选择`pipeline`输出数据框 专用于研究环境下的run_pipeline输出结果分析 参数 ---- output : MultiIndex DataFrame pipeline输出结果 start : str 开始时间 end : str 结束时间 assets : 可迭代对象或str 股票代码 案例 ---- >>> # result 为运行`pipeline`输出结果 >>> select_output_by(result,'2018-04-23','2018-04-24',stock_codes=['000585','600871']) mean_10 2018-04-23 00:00:00+00:00 *ST东电(000585) 2.7900 *ST油服(600871) 2.0316 2018-04-24 00:00:00+00:00 *ST东电(000585) 2.7620 *ST油服(600871) 2.0316 """ nlevels = output.index.nlevels _, start, end = to_tdates(start, end) if nlevels != 2: raise ValueError('输入数据框只能是run_pipeline输出结果,MultiIndex DataFrame') if start: output = output.loc[start:] if end: output = output.loc[:end] if assets is not None: assets = symbols(assets) return output.loc[(slice(None), assets), :] else: return output
25.367347
90
0.552695
""" 辅助函数 """ from cswd.common.utils import ensure_list from .core import symbols, to_tdates def select_output_by(output, start=None, end=None, assets=None): """ 按时间及代码选择`pipeline`输出数据框 专用于研究环境下的run_pipeline输出结果分析 参数 ---- output : MultiIndex DataFrame pipeline输出结果 start : str 开始时间 end : str 结束时间 assets : 可迭代对象或str 股票代码 案例 ---- >>> # result 为运行`pipeline`输出结果 >>> select_output_by(result,'2018-04-23','2018-04-24',stock_codes=['000585','600871']) mean_10 2018-04-23 00:00:00+00:00 *ST东电(000585) 2.7900 *ST油服(600871) 2.0316 2018-04-24 00:00:00+00:00 *ST东电(000585) 2.7620 *ST油服(600871) 2.0316 """ nlevels = output.index.nlevels _, start, end = to_tdates(start, end) if nlevels != 2: raise ValueError('输入数据框只能是run_pipeline输出结果,MultiIndex DataFrame') if start: output = output.loc[start:] if end: output = output.loc[:end] if assets is not None: assets = symbols(assets) return output.loc[(slice(None), assets), :] else: return output
0
0
0
c9b3342238ce0d993691b8b895b75a947e901c00
3,162
py
Python
utils/data/processing/cityscapes.py
m-zayan/deeplearning_utils
cc40636f46d13a81cb1020fdcfae84e52456fa06
[ "MIT" ]
null
null
null
utils/data/processing/cityscapes.py
m-zayan/deeplearning_utils
cc40636f46d13a81cb1020fdcfae84e52456fa06
[ "MIT" ]
null
null
null
utils/data/processing/cityscapes.py
m-zayan/deeplearning_utils
cc40636f46d13a81cb1020fdcfae84e52456fa06
[ "MIT" ]
null
null
null
from typing import Callable, Dict, Any import numpy as np from . import _abstract from ...ops.reshape import aligned_with
18.172414
94
0.529096
from typing import Callable, Dict, Any import numpy as np from . import _abstract from ...ops.reshape import aligned_with def __np_as_type__(dtype): def to_nbits(func: Callable): def inner(*args, **kwargs) -> np.ndarray: return func(*args, **kwargs).astype(dtype) return inner return to_nbits class Meta(_abstract.Meta): @staticmethod def __multi_level_map__(ref: dict, _id: str, nlevels: int, sep: str = '_', _reversed=True, start=None, end=None, extra_value='') -> None: str_list = _id.split(sep) if _reversed: str_list = str_list[::-1] if start is None: start = 0 if end is None: end = len(str_list) str_list = str_list[start:end] str_list.append(extra_value) n = min(len(str_list), nlevels) def new_key(_ref, key, value): if key not in _ref: _ref[key] = value def normalize_ref(_ref, i=1): if i >= n: return k1 = str_list[i - 1] k2 = str_list[i] if i == n - 1: new_key(_ref[k1], k2, []) return new_key(_ref[k1], k2, {}) normalize_ref(_ref[k1], i + 1) def ref_value(_ref, i=0): if i >= n: return key = str_list[i] if i == n - 1: value = sep.join(str_list[i + 1:]) _ref[key].append(value) return ref_value(_ref[key], i + 1) if n > 0: new_key(ref, str_list[0], {}) normalize_ref(ref, 1) ref_value(ref, 0) @staticmethod def multi_level_id(id_list, nlevels) -> Dict[Any, Any]: ref = {} for i, iid in enumerate(id_list): Meta.__multi_level_map__(ref, iid, nlevels, start=1, end=None, extra_value=str(i)) return ref @staticmethod def ids_xy_format(data_ids, ann_ids): return data_ids, aligned_with(data_ids, ann_ids, ann_ids) class Annotation: @staticmethod def __digits_count__(a): if a == 0: return 1 return int(np.log10(a) + 1) @staticmethod @__np_as_type__(dtype=np.int32) def cv_load_fix(image): min_value = image.min() if min_value == 0: min_value = 1e-45 return image / min_value @staticmethod def segmentation_level(pixel_value): ndigits = Annotation.__digits_count__(pixel_value) if ndigits <= 2: return 0 elif ndigits <= 5: return 1 elif ndigits <= 7: return 2 else: raise ValueError('Invalid Annotation, ndigits > 7') @staticmethod def mask_segmentation_level(image, dtype=np.int32): mask = np.zeros_like(image, dtype=dtype) mview1d = mask.ravel() iview1d = image.ravel() size = len(iview1d) for i in range(size): mview1d[i] = Annotation.segmentation_level(iview1d[i]) return mask
2,612
353
69
0704bd67d154c320c0f8e0d4454e9c628972c408
2,068
py
Python
NodeGraphQt/widgets/actions.py
uclatommy/NodeGraphQt
aaf09fa6e7cd0745218e6039ee2befdab117daec
[ "MIT" ]
582
2018-03-04T10:25:32.000Z
2022-03-31T06:41:17.000Z
NodeGraphQt/widgets/actions.py
zhollosy/NodeGraphQt
c2ad7ce3ee31e348207f18636571bcb53ac8f5b9
[ "MIT" ]
156
2018-03-03T21:41:36.000Z
2022-03-29T02:14:42.000Z
NodeGraphQt/widgets/actions.py
zhollosy/NodeGraphQt
c2ad7ce3ee31e348207f18636571bcb53ac8f5b9
[ "MIT" ]
160
2018-03-09T10:29:42.000Z
2022-03-31T06:41:23.000Z
#!/usr/bin/python from Qt import QtCore, QtWidgets from .stylesheet import STYLE_QMENU # disable for issue #142 # def hideEvent(self, event): # super(BaseMenu, self).hideEvent(event) # for a in self.actions(): # if hasattr(a, 'node_id'): # a.node_id = None
28.722222
59
0.587524
#!/usr/bin/python from Qt import QtCore, QtWidgets from .stylesheet import STYLE_QMENU class BaseMenu(QtWidgets.QMenu): def __init__(self, *args, **kwargs): super(BaseMenu, self).__init__(*args, **kwargs) self.setStyleSheet(STYLE_QMENU) self.node_class = None self.graph = None # disable for issue #142 # def hideEvent(self, event): # super(BaseMenu, self).hideEvent(event) # for a in self.actions(): # if hasattr(a, 'node_id'): # a.node_id = None def get_menu(self, name, node_id=None): for action in self.actions(): menu = action.menu() if not menu: continue if menu.title() == name: return menu if node_id and menu.node_class: node = menu.graph.get_node_by_id(node_id) if isinstance(node, menu.node_class): return menu def get_menus(self, node_class): menus = [] for action in self.actions(): menu = action.menu() if menu.node_class: if issubclass(menu.node_class, node_class): menus.append(menu) return menus class GraphAction(QtWidgets.QAction): executed = QtCore.Signal(object) def __init__(self, *args, **kwargs): super(GraphAction, self).__init__(*args, **kwargs) self.graph = None self.triggered.connect(self._on_triggered) def _on_triggered(self): self.executed.emit(self.graph) def get_action(self, name): for action in self.qmenu.actions(): if not action.menu() and action.text() == name: return action class NodeAction(GraphAction): executed = QtCore.Signal(object, object) def __init__(self, *args, **kwargs): super(NodeAction, self).__init__(*args, **kwargs) self.node_id = None def _on_triggered(self): node = self.graph.get_node_by_id(self.node_id) self.executed.emit(self.graph, node)
1,350
255
150
18f3fbdbd98aa3ec3c80b36171574776f4dfd9c4
1,270
py
Python
src/common/status.py
cchienhao/data_collector
89546e6445f51ce29197c2bdc508d495a100ffb0
[ "Apache-2.0" ]
1
2016-02-05T06:54:15.000Z
2016-02-05T06:54:15.000Z
src/common/status.py
cchienhao/data_collector
89546e6445f51ce29197c2bdc508d495a100ffb0
[ "Apache-2.0" ]
null
null
null
src/common/status.py
cchienhao/data_collector
89546e6445f51ce29197c2bdc508d495a100ffb0
[ "Apache-2.0" ]
null
null
null
''' Created on Aug 29 2015 @author: kevin.chien@94301.ca ''' # server info status code OK = FlamesStatus(0, 'common.ok', 'OK.') # server error status code UNEXPECTED_EXCEPTION = FlamesStatus(1000001, 'common.unexpected_exception', 'Unknown Error.') UNKNOWN_RESOURCE = FlamesStatus(1000002, 'common.unknown_resource', 'Unknown Resource.') PARAMETER_VALIDATED_FAILED = FlamesStatus(1000003, 'common.parameter_validated_failed', 'Parameter validated error : {messages}') AUTH_FAILED = FlamesStatus(1000004, 'common.auth_failed', "Authorization failed : {messages}") JSON_PARSING_FAILED = FlamesStatus(1000004, 'common.json_parsing_failed', 'Parsing json string failed : {message}') USER_DUPLICATE = FlamesStatus(1080001, "user_duplicate", "'{user_id}' is existed") USER_NOT_FOUND = FlamesStatus(1080002, "user_not_found", "'{user_id}' is not found")
36.285714
115
0.687402
''' Created on Aug 29 2015 @author: kevin.chien@94301.ca ''' class FlamesStatus(object): def __init__(self, code, key, message): self.code = code self.key = key self.message = message def __eq__(self, other): if isinstance(other, FlamesStatus): return other.code == self.code return False def __ne__(self, other): return not self.__eq__(other) # server info status code OK = FlamesStatus(0, 'common.ok', 'OK.') # server error status code UNEXPECTED_EXCEPTION = FlamesStatus(1000001, 'common.unexpected_exception', 'Unknown Error.') UNKNOWN_RESOURCE = FlamesStatus(1000002, 'common.unknown_resource', 'Unknown Resource.') PARAMETER_VALIDATED_FAILED = FlamesStatus(1000003, 'common.parameter_validated_failed', 'Parameter validated error : {messages}') AUTH_FAILED = FlamesStatus(1000004, 'common.auth_failed', "Authorization failed : {messages}") JSON_PARSING_FAILED = FlamesStatus(1000004, 'common.json_parsing_failed', 'Parsing json string failed : {message}') USER_DUPLICATE = FlamesStatus(1080001, "user_duplicate", "'{user_id}' is existed") USER_NOT_FOUND = FlamesStatus(1080002, "user_not_found", "'{user_id}' is not found")
258
6
106
9bfb88480b31160b32d5aff1097a83c3f22006d7
3,125
py
Python
tests/unittests/pytorch_lightning/test_prepare.py
lf1-io/padl-extensions
f82c9591e07e30d770ea8ec4ae411d9b4838ac0a
[ "Apache-2.0" ]
1
2022-03-15T14:16:01.000Z
2022-03-15T14:16:01.000Z
tests/unittests/pytorch_lightning/test_prepare.py
lf1-io/padl-extensions
f82c9591e07e30d770ea8ec4ae411d9b4838ac0a
[ "Apache-2.0" ]
4
2022-03-07T13:54:01.000Z
2022-03-09T08:48:19.000Z
tests/unittests/pytorch_lightning/test_prepare.py
lf1-io/padl-extensions
f82c9591e07e30d770ea8ec4ae411d9b4838ac0a
[ "Apache-2.0" ]
null
null
null
import pytest import torch import tempfile import shutil import os from tests.material import utils import padl from padl import transform, identity, batch from padl_ext.pytorch_lightning.prepare import LightningModule try: import pytorch_lightning as pl from pytorch_lightning.callbacks import ModelCheckpoint except (ImportError, ModuleNotFoundError): pass @transform @transform @transform @pytest.mark.skipif((not utils.check_if_module_installed('pytorch_lightning')), reason="requires the torchserve and torch-model-archiver")
32.552083
89
0.67872
import pytest import torch import tempfile import shutil import os from tests.material import utils import padl from padl import transform, identity, batch from padl_ext.pytorch_lightning.prepare import LightningModule try: import pytorch_lightning as pl from pytorch_lightning.callbacks import ModelCheckpoint except (ImportError, ModuleNotFoundError): pass @transform class PadlEncoder(torch.nn.Module): def __init__(self): super().__init__() self.encoder = torch.nn.Sequential( torch.nn.Linear(28 * 28, 128), torch.nn.ReLU(), torch.nn.Linear(128, 3) ) def forward(self, x): embedding = self.encoder(x) return embedding @transform class PadlDecoder(torch.nn.Module): def __init__(self): super().__init__() self.decoder = torch.nn.Sequential( torch.nn.Linear(3, 128), torch.nn.ReLU(), torch.nn.Linear(128, 28 * 28) ) def forward(self, x): decoding = self.decoder(x) return decoding @transform def padl_loss(reconstruction, original): return torch.nn.functional.mse_loss(reconstruction, original) class MyModule(LightningModule): def configure_optimizers(self): optimizer = torch.optim.Adam(self.parameters(), lr=1e-4) return optimizer @pytest.mark.skipif((not utils.check_if_module_installed('pytorch_lightning')), reason="requires the torchserve and torch-model-archiver") class TestPadlLightning: @pytest.fixture(autouse=True, scope='class') def init(self, request): autoencoder = PadlEncoder() >> PadlDecoder() padl_training_model = ( identity >> batch >> transform(lambda x: x.view(x.size(0), -1)) >> autoencoder + identity >> padl_loss ) request.cls.transform_1 = padl_training_model request.cls.train_data = [torch.randn([28, 28])] * 16 request.cls.val_data = [torch.randn([28, 28])] * 8 def test_training_from_load(self): dirpath = tempfile.mkdtemp() model_dir = os.path.join(dirpath, 'model.padl') padl.save(self.transform_1, model_dir) trainer = pl.Trainer(max_epochs=4, default_root_dir=dirpath, log_every_n_steps=2) padl_lightning = MyModule(model_dir, trainer, batch_size=2, num_workers=0) padl_lightning.fit(train_data=self.train_data, val_data=self.val_data) shutil.rmtree(dirpath) def test_reload_checkpoint(self): dirpath = tempfile.mkdtemp() dirpath = padl.value(dirpath) model_dir = os.path.join(dirpath, 'tmp.padl') padl.save(self.transform_1, model_dir) trainer = pl.Trainer(max_epochs=4, default_root_dir=dirpath, log_every_n_steps=2) padl_lightning = MyModule(model_dir, trainer, batch_size=2, num_workers=0) padl_lightning.fit(train_data=self.train_data, val_data=self.val_data) loaded_padl_lightning = padl.load(padl_lightning.best_model_path) loaded_padl_lightning.fit(train_data=self.train_data, val_data=self.val_data) shutil.rmtree(dirpath)
2,136
171
243
a8240d96ece80865b53b5757db01662c80a211d1
1,314
py
Python
pyAnaf/console.py
agilegeeks/pyAnaf
764f7d8fb300135a3d98559b953e3ca2a4507216
[ "MIT" ]
null
null
null
pyAnaf/console.py
agilegeeks/pyAnaf
764f7d8fb300135a3d98559b953e3ca2a4507216
[ "MIT" ]
null
null
null
pyAnaf/console.py
agilegeeks/pyAnaf
764f7d8fb300135a3d98559b953e3ca2a4507216
[ "MIT" ]
null
null
null
# coding: utf-8 from __future__ import print_function import sys import os import datetime import pprint try: from pyAnaf.api import Anaf except: sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from pyAnaf.api import Anaf if __name__ == '__main__': main()
21.540984
86
0.614916
# coding: utf-8 from __future__ import print_function import sys import os import datetime import pprint try: from pyAnaf.api import Anaf except: sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from pyAnaf.api import Anaf class MyPrettyPrinter(pprint.PrettyPrinter): def format(self, object, context, maxlevels, level): #print (type(object)) #print (object) # if isinstance(object, str): # return (object.encode('utf8'), True, False) return pprint.PrettyPrinter.format(self, object, context, maxlevels, level) def print_err(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def main(): if len(sys.argv) < 2: print_err("usage: %s <cuis_separated_by_comma> <limit>\n" % sys.argv[0]) sys.exit(-255) limit = 5 cuis = sys.argv[1].split(',') try: limit = int(sys.argv[2]) except: pass today = datetime.date.today() anaf = Anaf() anaf.setLimit(limit) for cui in cuis: try: anaf.addCUI(int(cui), date=today) except Exception as e: print_err(e) anaf.Request() pp = MyPrettyPrinter(indent=4) for entry in anaf.result: pp.pprint(entry) if __name__ == '__main__': main()
884
23
95
c0ac5e48f23ca1dfac72643fbf846a6c9a2d0143
3,837
py
Python
binary_validation.py
jorgessanchez7/Global_Forecast_Validation
d3178acaa2a67801e832554a3f871b36c266fe3a
[ "MIT" ]
null
null
null
binary_validation.py
jorgessanchez7/Global_Forecast_Validation
d3178acaa2a67801e832554a3f871b36c266fe3a
[ "MIT" ]
null
null
null
binary_validation.py
jorgessanchez7/Global_Forecast_Validation
d3178acaa2a67801e832554a3f871b36c266fe3a
[ "MIT" ]
null
null
null
import pandas as pd df = pd.read_csv('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Stations_Selected_Colombia_RT.csv') IDs = df['Codigo'].tolist() COMIDs = df['COMID'].tolist() Names = df['Nombre'].tolist() Rivers = df['Corriente'].tolist() '''Get Historical Observed Water Levels''' observed_wl_dir = '/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Water_Level' waterLevelData = pd.read_csv('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_IDEAM/NIVEL.tr5.csv', index_col=1) waterLevelData.index = pd.to_datetime(waterLevelData.index) fechas = waterLevelData.index.tolist() estaciones = waterLevelData['ESTACION'].tolist() valores = waterLevelData['VALOR'].tolist() for id in IDs: waterLevel = [] dates = [] for i in range (0, len(estaciones)): if (id == estaciones[i]): waterLevel.append(valores[i]) dates.append(fechas[i]) pairs = [list(a) for a in zip(dates, waterLevel)] pd.DataFrame(pairs, columns= ['Datetime', 'oberved water level (cm)']).to_csv(observed_wl_dir + "/{}_historic_observed_water_level.csv".format(id), encoding='utf-8', header=True, index=0) print("{}_historic_observed_water_level.csv".format(id)) #Reading historic simulated and historic observed data historicSimulatedFiles = [] historicObservedFiles = [] historicWaterLevelFiles = [] for id, comid in zip(IDs, COMIDs): historicObservedFiles.append('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Observed/' + str(id) + '_historic_observed.csv') historicSimulatedFiles.append('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Simulated/' + str(comid) + '_historic_simulatied.csv') historicSimulatedFiles.append('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Water_Level/' + str(id) + '_historic_observed_water_level.csv') for id, comid, name, rio, obsFile, simFile, wlFile in zip(IDs, COMIDs, Names, Rivers, historicObservedFiles, historicSimulatedFiles, historicWaterLevelFiles): print(id, comid, name, rio) '''Get Real Time Observed Water Levels''' observed_WL_dir = '/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Real_Time_Water_Level' daily_wl_dir = '/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Daily_Real_Time_Water_Level' waterLevelData = pd.read_csv('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Real_Time_IDEAM/NIVEL-DHIME.csv', index_col=9) waterLevelData.index = pd.to_datetime(waterLevelData.index) fechas = waterLevelData.index.tolist() estaciones = waterLevelData['CodigoEstacion'].tolist() valores = waterLevelData['Valor'].tolist() # for id in IDs: # waterLevel = [] # dates = [] # # for i in range (0, len(estaciones)): # if (id == estaciones[i]): # waterLevel.append(valores[i]) # dates.append(fechas[i]) # # pairs = [list(a) for a in zip(dates, waterLevel)] # pd.DataFrame(pairs, columns= ['Datetime', 'oberved water level (cm)']).to_csv(observed_WL_dir + "/{}_real_time_observed_water_level.csv".format(id), encoding='utf-8', header=True, index=0) # print("{}_real_time_observed_water_level.csv".format(id)) # # data = pd.read_csv("/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Real_Time_Water_Level/{0}_real_time_observed_water_level.csv".format(id), index_col=0) # # data.index = pd.to_datetime(data.index) # # daily_df = data.groupby(data.index.strftime("%Y/%m/%d")).mean() # daily_df.index = pd.to_datetime(daily_df.index) # # daily_df.to_csv("/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Daily_Real_Time_Water_Level/{0}_real_time_observed_water_level.csv".format(id), index_label="Datetime") # # print(daily_df) #Defining the return periods for the historical Simulation
44.103448
192
0.765181
import pandas as pd df = pd.read_csv('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Stations_Selected_Colombia_RT.csv') IDs = df['Codigo'].tolist() COMIDs = df['COMID'].tolist() Names = df['Nombre'].tolist() Rivers = df['Corriente'].tolist() '''Get Historical Observed Water Levels''' observed_wl_dir = '/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Water_Level' waterLevelData = pd.read_csv('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_IDEAM/NIVEL.tr5.csv', index_col=1) waterLevelData.index = pd.to_datetime(waterLevelData.index) fechas = waterLevelData.index.tolist() estaciones = waterLevelData['ESTACION'].tolist() valores = waterLevelData['VALOR'].tolist() for id in IDs: waterLevel = [] dates = [] for i in range (0, len(estaciones)): if (id == estaciones[i]): waterLevel.append(valores[i]) dates.append(fechas[i]) pairs = [list(a) for a in zip(dates, waterLevel)] pd.DataFrame(pairs, columns= ['Datetime', 'oberved water level (cm)']).to_csv(observed_wl_dir + "/{}_historic_observed_water_level.csv".format(id), encoding='utf-8', header=True, index=0) print("{}_historic_observed_water_level.csv".format(id)) #Reading historic simulated and historic observed data historicSimulatedFiles = [] historicObservedFiles = [] historicWaterLevelFiles = [] for id, comid in zip(IDs, COMIDs): historicObservedFiles.append('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Observed/' + str(id) + '_historic_observed.csv') historicSimulatedFiles.append('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Simulated/' + str(comid) + '_historic_simulatied.csv') historicSimulatedFiles.append('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Historical_Water_Level/' + str(id) + '_historic_observed_water_level.csv') for id, comid, name, rio, obsFile, simFile, wlFile in zip(IDs, COMIDs, Names, Rivers, historicObservedFiles, historicSimulatedFiles, historicWaterLevelFiles): print(id, comid, name, rio) '''Get Real Time Observed Water Levels''' observed_WL_dir = '/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Real_Time_Water_Level' daily_wl_dir = '/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Daily_Real_Time_Water_Level' waterLevelData = pd.read_csv('/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Real_Time_IDEAM/NIVEL-DHIME.csv', index_col=9) waterLevelData.index = pd.to_datetime(waterLevelData.index) fechas = waterLevelData.index.tolist() estaciones = waterLevelData['CodigoEstacion'].tolist() valores = waterLevelData['Valor'].tolist() # for id in IDs: # waterLevel = [] # dates = [] # # for i in range (0, len(estaciones)): # if (id == estaciones[i]): # waterLevel.append(valores[i]) # dates.append(fechas[i]) # # pairs = [list(a) for a in zip(dates, waterLevel)] # pd.DataFrame(pairs, columns= ['Datetime', 'oberved water level (cm)']).to_csv(observed_WL_dir + "/{}_real_time_observed_water_level.csv".format(id), encoding='utf-8', header=True, index=0) # print("{}_real_time_observed_water_level.csv".format(id)) # # data = pd.read_csv("/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Real_Time_Water_Level/{0}_real_time_observed_water_level.csv".format(id), index_col=0) # # data.index = pd.to_datetime(data.index) # # daily_df = data.groupby(data.index.strftime("%Y/%m/%d")).mean() # daily_df.index = pd.to_datetime(daily_df.index) # # daily_df.to_csv("/Users/student/Dropbox/PhD/2019 Summer/Dissertation_v7/Colombia/Data/Daily_Real_Time_Water_Level/{0}_real_time_observed_water_level.csv".format(id), index_label="Datetime") # # print(daily_df) #Defining the return periods for the historical Simulation
0
0
0
cde94d9729ccd961b34eeaae00dfaa61f34100e7
484
py
Python
solutions/python3/366.py
sm2774us/amazon_interview_prep_2021
f580080e4a6b712b0b295bb429bf676eb15668de
[ "MIT" ]
42
2020-08-02T07:03:49.000Z
2022-03-26T07:50:15.000Z
solutions/python3/366.py
ajayv13/leetcode
de02576a9503be6054816b7444ccadcc0c31c59d
[ "MIT" ]
null
null
null
solutions/python3/366.py
ajayv13/leetcode
de02576a9503be6054816b7444ccadcc0c31c59d
[ "MIT" ]
40
2020-02-08T02:50:24.000Z
2022-03-26T15:38:10.000Z
# Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None
26.888889
56
0.508264
# Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution: def findLeaves(self, root): res = [] def dfs(node): if not node: return -1 i = max(dfs(node.left), dfs(node.right)) + 1 try: res[i].append(node.val) except: res.append([node.val]) return i dfs(root) return res
280
-6
49
803db500d5206ea38a29770ec6d7410e96768f5b
15,094
py
Python
data_loader.py
AnticPan/Hierarchical-GEC
c657ed21be6c01209d0e03e8e742365c26550947
[ "MIT" ]
1
2021-08-24T06:06:38.000Z
2021-08-24T06:06:38.000Z
data_loader.py
AnticPan/Hierarchical-GEC
c657ed21be6c01209d0e03e8e742365c26550947
[ "MIT" ]
null
null
null
data_loader.py
AnticPan/Hierarchical-GEC
c657ed21be6c01209d0e03e8e742365c26550947
[ "MIT" ]
null
null
null
import random import time import numpy as np import os import difflib import torch from utils.structure import Example, Batch, Patch, lists2tensor, Token, BIO from utils.tokenizer import Tokenizer from typing import List, Union from tqdm import tqdm from collections import Counter import Levenshtein import math import copy
46.443077
112
0.567908
import random import time import numpy as np import os import difflib import torch from utils.structure import Example, Batch, Patch, lists2tensor, Token, BIO from utils.tokenizer import Tokenizer from typing import List, Union from tqdm import tqdm from collections import Counter import Levenshtein import math import copy class Dataset(object): def __init__(self, data_dir:str, batch_size:int, inference:bool, tokenizer:Tokenizer, discriminating:bool=False, detecting: bool = False, correcting:bool=False, dir_del:bool=False, only_wrong:bool=False, truncate: int = 512): # self._data_paths, self.total_example_num = devide_large_file(data_dir, cache_dir, single_pass) self.inference = inference self.truncate = truncate self.example_num = 0 self.wrong_example_ids = [] self.right_example_ids = [] self.tsv_examples, self.domain_words = self.load_tsv(data_dir) self.batch_size = batch_size self.tokenizer = tokenizer self.discriminating = discriminating self.detecting = detecting self.correcting = correcting self.dir_del = dir_del self.only_wrong = only_wrong def load_tsv(self, data_dir:str): if os.path.isdir(data_dir): data_paths = [os.path.join(data_dir, name) for name in os.listdir(data_dir)] elif os.path.isfile(data_dir): data_paths = [data_dir] else: raise ValueError(f"{data_dir} is neither a file nor a directory.") tsv_examples = [] domain_words = [] for data_path in sorted(data_paths): with open(data_path, 'r', encoding='utf-8') as f: for i, line in enumerate(f): sentences = line.strip("\n").split("\t") if self.inference: tsv_examples.append(sentences) else: assert len(sentences) >= 2, f"line-{i} error in {data_path}" is_correct = any(sentences[0] == sentence for sentence in sentences[1:]) if is_correct: self.right_example_ids.append(self.example_num) else: self.wrong_example_ids.append(self.example_num) # record the words from target sentence #domain_words.extend(sentences[1].split()) tsv_examples.append(sentences) self.example_num += 1 if self.inference: domain_words_freq = None else: domain_words_freq = Counter(domain_words) return tsv_examples, domain_words_freq def get_batch_num(self): return math.ceil(len(self.tsv_examples)/self.batch_size) def generator(self): if self.inference: example_ids = list(range(self.example_num)) else: # example_ids = list(range(self.example_num)) # random.shuffle(example_ids) if self.only_wrong: example_ids = copy.copy(self.wrong_example_ids) random.shuffle(example_ids) else: error_ratio = len(self.wrong_example_ids) / self.example_num batch_wrong_num = int(error_ratio * self.batch_size) batch_right_num = self.batch_size - batch_wrong_num wrong_example_ids = copy.copy(self.wrong_example_ids) right_example_ids = copy.copy(self.right_example_ids) random.shuffle(wrong_example_ids) random.shuffle(right_example_ids) example_ids = [] for ptr in range( max(math.ceil(len(wrong_example_ids) / batch_wrong_num), math.ceil(len(right_example_ids) / batch_right_num))): example_ids.extend(wrong_example_ids[ptr * batch_wrong_num:(ptr + 1) * batch_wrong_num]) example_ids.extend(right_example_ids[ptr * batch_right_num:(ptr + 1) * batch_right_num]) for ptr in range(math.ceil(len(example_ids) / self.batch_size)): ids = example_ids[ptr * self.batch_size:(ptr + 1) * self.batch_size] es = [] for idx in ids: tsv_example = self.tsv_examples[idx] source_sentence, *target_sentences = tsv_example es.append(self.make_example(source_sentence, target_sentences)) batch = self.make_batch(es) yield self.to_device(batch) @staticmethod def to_device(batch): if torch.cuda.is_available(): input_ids = batch.input_ids.cuda() attention_mask = batch.attention_mask.cuda() token_type_ids = batch.token_type_ids.cuda() if batch.target_tfs is not None: target_tfs = batch.target_tfs.cuda() else: target_tfs = None if batch.target_labels is not None: error_example_mask = batch.error_example_mask.cuda() target_labels = batch.target_labels.cuda() else: target_labels = None error_example_mask = None if batch.target_ids is not None: target_ids = batch.target_ids.cuda() else: target_ids = None return Batch(batch.examples, input_ids, attention_mask, token_type_ids, target_tfs, target_labels, error_example_mask, batch.target_starts, batch.target_ends, target_ids) else: return batch def make_batch(self, examples:List[Example]): pad_token_id = 0 input_ids = [] target_labels = [] target_starts = [[], []] target_ends = [[], []] target_ids = [] error_example_mask = [0] * len(examples) for i, example in enumerate(examples): input_tokens = example.tokens patches = example.patches ids = [] for token in input_tokens: ids.extend(token.ids) input_ids.append(ids) labels = [ BIO["O"]] * len(ids) if patches is not None: pre_type = None for patch in patches: if patch.start >= self.truncate or patch.end >= self.truncate: break error_example_mask[i] = 1 if patch.start == patch.end: # insert labels[patch.start] = BIO["B-M"] target_starts[0].append(i) target_starts[1].append(patch.start-1) target_ends[0].append(i) target_ends[1].append(patch.end) target_ids.append([]) for token in patch.tokens: target_ids[-1].extend(token.ids) target_ids[-1].append(self.tokenizer.PATCH_END_ID) elif patch.tokens[0].word == '': # delete labels[patch.start] = BIO["B-R"] labels[patch.start+1:patch.end] = [BIO["I-R"]]*(patch.end-patch.start-1) if self.dir_del: continue target_starts[0].append(i) target_starts[1].append(patch.start-1) target_ends[0].append(i) target_ends[1].append(patch.end) target_ids.append([]) for token in patch.tokens: target_ids[-1].extend(token.ids) target_ids[-1].append(self.tokenizer.PATCH_END_ID) else: # replace labels[patch.start] = BIO["B-WS"] labels[patch.start+1:patch.end] = [BIO["I-WS"]]*(patch.end-patch.start-1) target_starts[0].append(i) target_starts[1].append(patch.start-1) target_ends[0].append(i) target_ends[1].append(patch.end) target_ids.append([]) for token in patch.tokens: target_ids[-1].extend(token.ids) target_ids[-1].append(self.tokenizer.PATCH_END_ID) target_labels.append(labels) input_max_len = max([len(id_list) for id_list in input_ids]) input_ids = lists2tensor(input_ids, input_max_len, self.truncate, 0) attention_mask = torch.full( input_ids.size(), pad_token_id, dtype=torch.bool) attention_mask[torch.where(input_ids != pad_token_id)] = 1 token_type_ids = torch.zeros(input_ids.size(), dtype=torch.long) if self.discriminating: # 0 wrong, 1 correct target_tfs = torch.tensor([0 if value==1 else 1 for value in error_example_mask], dtype=torch.float) else: target_tfs = None if self.detecting: error_example_mask = torch.tensor(error_example_mask).bool() target_labels = lists2tensor( target_labels, input_max_len, self.truncate, -100) else: error_example_mask = None target_labels = None if self.correcting: if target_ids: target_max_len = max(len(id_list) for id_list in target_ids) target_ids = lists2tensor( target_ids, target_max_len, 20, -100) else: target_starts = None target_ends = None target_ids = None else: target_starts = None target_ends = None target_ids = None return Batch(examples, input_ids, attention_mask, token_type_ids, target_tfs, target_labels, error_example_mask, target_starts, target_ends, target_ids) def make_example(self, source_sentence:str, target_sentences:List[str]): source_words = list(filter(lambda x:x!='',source_sentence.strip().split(" "))) source_tokens, oovs = self.tokenizer.encode(source_words, is_patch=False) if len(target_sentences) == 0 or any(source_sentence == sentence for sentence in target_sentences): example = Example(source_tokens, None, oovs, target_sentences) else: if len(target_sentences) > 1: levenshtein_distances = [] for target_sentence in target_sentences: target_words = list(filter(lambda x:x!='',target_sentence.strip().split(" "))) distance = Levenshtein_distance_list(source_words, target_words) levenshtein_distances.append(distance) min_index = levenshtein_distances.index(min(levenshtein_distances)) target_sentence = target_sentences[min_index] else: target_sentence = target_sentences[0] patch_list = [] # target_words = [token.text for token in nlp(target_sentence)] target_words = list(filter(lambda x:x!='',target_sentence.strip().split(" "))) source_words = ['[CLS]'] + source_words + ['[SEP]'] target_words = ['[CLS]'] + target_words + ['[SEP]'] matcher = difflib.SequenceMatcher(None, source_words, target_words) ops = matcher.get_opcodes() # https://docs.python.org/3.8/library/difflib.html#difflib.SequenceMatcher.get_opcodes for tag, s1, s2, t1, t2 in ops: if tag == 'equal': continue start = source_tokens[s1].start end = source_tokens[s2].start if tag == 'replace': target_tokens, _ = self.tokenizer.encode(target_words[t1:t2], is_patch=True) elif tag == 'delete': target_tokens, _ = self.tokenizer.encode([], is_patch=True) elif tag == 'insert': target_tokens, _ = self.tokenizer.encode(target_words[t1:t2], is_patch=True) patch = Patch(start, end, target_tokens) # if not equal(source_tokens, patch): # patch_list.append(patch) patch_list.append(patch) example = Example(source_tokens, patch_list, oovs, target_sentences) return example def Levenshtein_distance_list(source, target): unique_elements = sorted(set(source + target)) char_list = [chr(i) for i in range(len(unique_elements))] if len(unique_elements) > len(char_list): raise Exception("too many elements") else: unique_element_map = {ele:char_list[i] for i, ele in enumerate(unique_elements)} source_str = ''.join([unique_element_map[ele] for ele in source]) target_str = ''.join([unique_element_map[ele] for ele in target]) distance = Levenshtein.distance(source_str, target_str) return distance def devide_large_file(data_dir, output_dir, no_split=False, max_line_num=100000): if os.path.isdir(data_dir): data_paths = [os.path.join(data_dir, name) for name in os.listdir(data_dir)] elif os.path.isfile(data_dir): data_paths = [data_dir] else: raise ValueError(f"{data_dir} is neither a file nor a directory.") line_nums = [] if not os.path.exists(output_dir): os.makedirs(output_dir) new_data_paths = [] for data_path in data_paths: line_num = 0 with open(data_path,"r",encoding="utf-8") as f: for line in f: line_num += 1 line_nums.append(line_num) if line_num == 0: raise ValueError("empty file: %s"%data_path) elif line_num <= max_line_num or no_split: new_data_paths.append(data_path) else: with open(data_path, 'r', encoding='utf-8') as fin: devide_num = line_num // max_line_num+1 for part in range(devide_num): _, file_name = os.path.split(data_path) new_path = os.path.join(output_dir, f"{file_name}.part.{part}") new_data_paths.append(new_path) with open(new_path, "w") as fout: for idx, line in enumerate(fin): fout.write(line) if idx == max_line_num-1: break return new_data_paths, sum(line_nums) def equal(source_tokens: List[Token], patch: Patch): # FIXME: what if the token is oov or something like? start = patch.start end = patch.end source_ids = [] for token in source_tokens: if token.start>=start and token.end <= end: source_ids.extend(token.ids) target_ids = [i for token in patch.tokens for i in token.ids] if source_ids == target_ids: return True return False
14,461
216
92
f431633ee4462edfe7c7f4f450d9d0fb74a2ba86
372
py
Python
game/drawing/background.py
samer25/Game-Tank
fa5b63f6b224e56205ab75b8aefcf557405e1ffe
[ "MIT" ]
null
null
null
game/drawing/background.py
samer25/Game-Tank
fa5b63f6b224e56205ab75b8aefcf557405e1ffe
[ "MIT" ]
1
2020-04-13T21:04:46.000Z
2020-04-13T22:17:12.000Z
game/drawing/background.py
samer25/Game-Tank
fa5b63f6b224e56205ab75b8aefcf557405e1ffe
[ "MIT" ]
null
null
null
from main_dir.drawing.background_loads import BackgroundLoads """getting the screen and setting background """ class BackGround: """taking the image""" view_background = BackgroundLoads().load_and_move() def redraw_game_window(self, screen): """setting background in the screen at position x y""" screen.blit(self.view_background, (0, 0))
28.615385
62
0.715054
from main_dir.drawing.background_loads import BackgroundLoads """getting the screen and setting background """ class BackGround: """taking the image""" view_background = BackgroundLoads().load_and_move() def redraw_game_window(self, screen): """setting background in the screen at position x y""" screen.blit(self.view_background, (0, 0))
0
0
0
81315fed9dd12dbca559c8e90236ab5b3cad7118
3,011
py
Python
yellowbrick/utils/decorators.py
souravsingh/yellowbrick
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
[ "Apache-2.0" ]
20
2018-03-24T02:29:20.000Z
2022-03-03T05:01:40.000Z
yellowbrick/utils/decorators.py
souravsingh/yellowbrick
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
[ "Apache-2.0" ]
4
2018-03-20T12:01:17.000Z
2019-04-07T16:02:19.000Z
yellowbrick/utils/decorators.py
souravsingh/yellowbrick
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
[ "Apache-2.0" ]
5
2018-03-17T08:18:57.000Z
2019-11-15T02:20:20.000Z
# yellowbrick.utils.decorators # Decorators and descriptors for annotating yellowbrick library functions. # # Author: Benjamin Bengfort <bbengfort@districtdatalabs.com> # Created: Thu May 18 15:13:33 2017 -0400 # # Copyright (C) 2017 District Data Labs # For license information, see LICENSE.txt # # ID: decorators.py [79cd8cf] benjamin@bengfort.com $ """ Decorators and descriptors for annotating yellowbrick library functions. """ ########################################################################## ## Imports ########################################################################## from functools import wraps ########################################################################## ## Decorators ########################################################################## def memoized(fget): """ Return a property attribute for new-style classes that only calls its getter on the first access. The result is stored and on subsequent accesses is returned, preventing the need to call the getter any more. Parameters ---------- fget: function The getter method to memoize for subsequent access. See also -------- python-memoized-property `python-memoized-property <https://github.com/estebistec/python-memoized-property>`_ """ attr_name = '_{0}'.format(fget.__name__) @wraps(fget) return property(fget_memoized) class docutil(object): """ This decorator can be used to apply the doc string from another function to the decorated function. This is used for our single call wrapper functions who implement the visualizer API without forcing the user to jump through all the hoops. The docstring of both the visualizer and the single call wrapper should be identical, this decorator ensures that we only have to edit one doc string. Usage:: @docutil(Visualizer.__init__) def visualize(*args, **kwargs): pass The basic usage is that you instantiate the decorator with the function whose docstring you want to copy, then apply that decorator to the the function whose docstring you would like modified. Note that this decorator performs no wrapping of the target function. """ def __init__(self, func): """Create a decorator to document other functions with the specified function's doc string. Parameters ---------- func : function The function whose doc string we should decorate with """ self.doc = func.__doc__ def __call__(self, func): """Modify the decorated function with the stored doc string. Parameters ---------- func: function The function to apply the saved doc string to. """ func.__doc__ = self.doc return func
31.041237
92
0.608436
# yellowbrick.utils.decorators # Decorators and descriptors for annotating yellowbrick library functions. # # Author: Benjamin Bengfort <bbengfort@districtdatalabs.com> # Created: Thu May 18 15:13:33 2017 -0400 # # Copyright (C) 2017 District Data Labs # For license information, see LICENSE.txt # # ID: decorators.py [79cd8cf] benjamin@bengfort.com $ """ Decorators and descriptors for annotating yellowbrick library functions. """ ########################################################################## ## Imports ########################################################################## from functools import wraps ########################################################################## ## Decorators ########################################################################## def memoized(fget): """ Return a property attribute for new-style classes that only calls its getter on the first access. The result is stored and on subsequent accesses is returned, preventing the need to call the getter any more. Parameters ---------- fget: function The getter method to memoize for subsequent access. See also -------- python-memoized-property `python-memoized-property <https://github.com/estebistec/python-memoized-property>`_ """ attr_name = '_{0}'.format(fget.__name__) @wraps(fget) def fget_memoized(self): if not hasattr(self, attr_name): setattr(self, attr_name, fget(self)) return getattr(self, attr_name) return property(fget_memoized) class docutil(object): """ This decorator can be used to apply the doc string from another function to the decorated function. This is used for our single call wrapper functions who implement the visualizer API without forcing the user to jump through all the hoops. The docstring of both the visualizer and the single call wrapper should be identical, this decorator ensures that we only have to edit one doc string. Usage:: @docutil(Visualizer.__init__) def visualize(*args, **kwargs): pass The basic usage is that you instantiate the decorator with the function whose docstring you want to copy, then apply that decorator to the the function whose docstring you would like modified. Note that this decorator performs no wrapping of the target function. """ def __init__(self, func): """Create a decorator to document other functions with the specified function's doc string. Parameters ---------- func : function The function whose doc string we should decorate with """ self.doc = func.__doc__ def __call__(self, func): """Modify the decorated function with the stored doc string. Parameters ---------- func: function The function to apply the saved doc string to. """ func.__doc__ = self.doc return func
133
0
26
47aea7edbb1624ac0e6d24aad6bfb86d4c710c29
1,169
py
Python
manage.py
AymanKandil/OHAS
1f19a790a8e8c6a864c0dcb75cf127f591121d3b
[ "MIT" ]
null
null
null
manage.py
AymanKandil/OHAS
1f19a790a8e8c6a864c0dcb75cf127f591121d3b
[ "MIT" ]
null
null
null
manage.py
AymanKandil/OHAS
1f19a790a8e8c6a864c0dcb75cf127f591121d3b
[ "MIT" ]
null
null
null
import argparse from werkzeug.security import generate_password_hash import secrets import string from modules.Auth.auth import auth from modules.Auth.user_db import UserDatabase from app import app main()
29.974359
86
0.640719
import argparse from werkzeug.security import generate_password_hash import secrets import string from modules.Auth.auth import auth from modules.Auth.user_db import UserDatabase from app import app def main(): parser = argparse.ArgumentParser(description="Archive posters from impawards.com") parser.add_argument("--init", help="Starting pages number", action="store_true") args = parser.parse_args() if args.init: print(app.config["USERS_DB"]) db = UserDatabase(app.config["USERS_DB"]) resp = db.create_tables() if resp: print("Created database tables") else: print("Failed to insert database tables") return False alphabet = string.ascii_letters + string.digits password = ''.join(secrets.choice(alphabet) for i in range(12)) resp = db.insert_user( "admin", "admin", "user", "admin@ohas.com", generate_password_hash(password, method="sha256"), "", 24, 1 ) if resp: print(f"Inserted admin user with password {password}") else: print("Failed to insert admin user") main()
938
0
23
6d7e33ce60a96fcf9572c9733c365e3a8e958a32
12,085
py
Python
version/database/db.py
rabaarabaa/happypanda
e35fe4b32ea4fd5f373f226c4d6026e0d6d11e80
[ "Apache-2.0" ]
null
null
null
version/database/db.py
rabaarabaa/happypanda
e35fe4b32ea4fd5f373f226c4d6026e0d6d11e80
[ "Apache-2.0" ]
4
2020-11-10T01:43:50.000Z
2021-01-14T21:14:38.000Z
version/database/db.py
rabaarabaa/happypanda
e35fe4b32ea4fd5f373f226c4d6026e0d6d11e80
[ "Apache-2.0" ]
null
null
null
# """ # This file is part of Happypanda. # Happypanda is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # any later version. # Happypanda is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with Happypanda. If not, see <http://www.gnu.org/licenses/>. # """ import logging import os import sqlite3 from sqlite3 import Connection from typing import Tuple, List, Callable, Union, Optional from . import db_constants log = logging.getLogger(__name__) log_i = log.info log_d = log.debug log_w = log.warning log_e = log.error log_c = log.critical STRUCTURE_SCRIPT_FUNCS: List[Callable[[], Tuple[str, List[str]]]] STRUCTURE_SCRIPT_FUNCS = [series_sql, chapters_sql, namespaces_sql, tags_sql, tags_mappings_sql, series_tags_mappings_sql, hashes_sql, list_sql, series_list_map_sql] STRUCTURE_SCRIPT = ''.join(f()[0] for f in STRUCTURE_SCRIPT_FUNCS) def global_db_convert(conn: sqlite3.dbapi2.Connection) -> sqlite3.dbapi2.Cursor: """ Takes care of converting tables and columns. Don't use this method directly. Use the add_db_revisions instead. """ log_i('Converting tables') c = conn.cursor() series, series_cols = series_sql() chapters, chapters_cols = chapters_sql() namespaces, namespaces_cols = namespaces_sql() tags, tags_cols = tags_sql() tags_mappings, tags_mappings_cols = tags_mappings_sql() series_tags_mappings, series_tags_mappings_cols = series_tags_mappings_sql() hashes, hashes_cols = hashes_sql() _list, list_cols = list_sql() series_list_map, series_list_map_cols = series_list_map_sql() t_d = { 'series': series_cols, 'chapters': chapters_cols, 'namespaces': namespaces_cols, 'tags': tags_cols, 'tags_mappings': tags_mappings_cols, 'series_tags_mappings': series_tags_mappings_cols, 'hashes': hashes_cols, 'list': list_cols, 'series_list_map': series_list_map_cols } log_d('Checking table structures') c.executescript(STRUCTURE_SCRIPT) conn.commit() log_d('Checking columns') for table in t_d: for col in t_d[table]: try: c.execute('ALTER TABLE {} ADD COLUMN {}'.format(table, col)) log_d('Added new column: {}'.format(col)) except sqlite3.OperationalError: log_d('Skipped column: {}'.format(col)) conn.commit() log_d('Committed DB changes') return c def add_db_revisions(old_db: Union[str, 'os.PathLike']) -> None: """ Adds specific DB revisions items. Note: pass a path to db """ log_i('Converting DB') conn = sqlite3.connect(old_db, check_same_thread=False) conn.row_factory = sqlite3.Row log_i('Converting tables and columns') c = global_db_convert(conn) log_d('Updating DB version') c.execute('UPDATE version SET version=? WHERE 1', (db_constants.CURRENT_DB_VERSION,)) conn.commit() conn.close() return def check_db_version(conn: sqlite3.dbapi2.Connection) -> bool: """Checks if DB version is allowed. Raises dialog if not.""" vs = "SELECT version FROM version" c = conn.cursor() c.execute(vs) log_d('Checking DB Version') db_vs = c.fetchone() db_constants.REAL_DB_VERSION = db_vs[0] if db_vs[0] not in db_constants.DB_VERSION: msg = "Incompatible database" log_c(msg) log_d('Local database version: {}\nProgram database version:{}'.format(db_vs[0], db_constants.CURRENT_DB_VERSION)) # ErrorQueue.put(msg) return False return True def init_db(path: Union[str, 'os.PathLike'] = db_constants.DB_PATH) -> Optional[sqlite3.dbapi2.Connection]: """Initialises the DB. Returns a sqlite3 connection, which will be passed to the db thread. """ # TODO: change saving version from float to string if os.path.isfile(path): conn = new_db(path) if path == db_constants.DB_PATH and not check_db_version(conn): return None else: create_db_path() conn = new_db(path, True) conn.isolation_level = None conn.execute("PRAGMA foreign_keys = on") return conn class DBBase: """The base DB class. _DB_CONN should be set at runtime on startup""" _DB_CONN: Optional[Connection] = None _AUTO_COMMIT = True _STATE = {'active': False} @classmethod def begin(cls) -> None: """Useful when modifying for a large amount of data""" if not cls._STATE['active']: cls._AUTO_COMMIT = False cls.execute("BEGIN TRANSACTION") cls._STATE['active'] = True # print("STARTED DB OPTIMIZE") @classmethod def end(cls) -> None: """Called to commit and end transaction""" if cls._STATE['active']: try: cls.execute("COMMIT") except sqlite3.OperationalError: pass cls._AUTO_COMMIT = True cls._STATE['active'] = False # print("ENDED DB OPTIMIZE") @classmethod def execute(cls, *args): """Same as cursor.execute""" if not cls._DB_CONN: raise db_constants.NoDatabaseConnection log_d('DB Query: {}'.format(args).encode(errors='ignore')) if cls._AUTO_COMMIT: try: with cls._DB_CONN: return cls._DB_CONN.execute(*args) except sqlite3.InterfaceError: return cls._DB_CONN.execute(*args) else: return cls._DB_CONN.execute(*args) @classmethod def executemany(cls, *args): """Same as cursor.executemany""" if not cls._DB_CONN: raise db_constants.NoDatabaseConnection log_d('DB Query: {}'.format(args).encode(errors='ignore')) if cls._AUTO_COMMIT: with cls._DB_CONN: return cls._DB_CONN.executemany(*args) else: c = cls._DB_CONN.executemany(*args) return c @classmethod @classmethod @classmethod if __name__ == '__main__': raise RuntimeError("Unit tests not yet implemented") # unit tests here!
31.146907
112
0.629458
# """ # This file is part of Happypanda. # Happypanda is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # any later version. # Happypanda is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with Happypanda. If not, see <http://www.gnu.org/licenses/>. # """ import logging import os import sqlite3 from sqlite3 import Connection from typing import Tuple, List, Callable, Union, Optional from . import db_constants log = logging.getLogger(__name__) log_i = log.info log_d = log.debug log_w = log.warning log_e = log.error log_c = log.critical def hashes_sql() -> Tuple[str, List[str]]: col_list = [ 'hash_id INTEGER PRIMARY KEY', 'hash BLOB', 'series_id INTEGER', 'chapter_id INTEGER', 'page INTEGER', 'FOREIGN KEY(series_id) REFERENCES series(series_id) ON DELETE CASCADE', 'FOREIGN KEY(chapter_id) REFERENCES chapters(chapter_id) ON DELETE CASCADE', 'UNIQUE(hash, series_id, chapter_id, page)' ] sql = "CREATE TABLE IF NOT EXISTS hashes({});".format(",".join(col_list)) return sql, col_list def series_sql() -> Tuple[str, List[str]]: col_list = [ 'series_id INTEGER PRIMARY KEY', 'title TEXT', 'artist TEXT', 'profile BLOB', 'series_path BLOB', 'is_archive INTEGER', 'path_in_archive BLOB', 'info TEXT', 'fav INTEGER', 'type TEXT', 'link BLOB', 'language TEXT', 'rating INTEGER NOT NULL DEFAULT 0', 'status TEXT', 'pub_date TEXT', 'date_added TEXT', 'last_read TEXT', 'times_read INTEGER', 'exed INTEGER NOT NULL DEFAULT 0', 'db_v REAL', 'view INTEGER DEFAULT 1' ] sql = "CREATE TABLE IF NOT EXISTS series({});".format(",".join(col_list)) return sql, col_list def chapters_sql() -> Tuple[str, List[str]]: col_list = [ 'chapter_id INTEGER PRIMARY KEY', 'series_id INTEGER', "chapter_title TEXT NOT NULL DEFAULT ''", 'chapter_number INTEGER', 'chapter_path BLOB', 'pages INTEGER', 'in_archive INTEGER', 'FOREIGN KEY(series_id) REFERENCES series(series_id) ON DELETE CASCADE' ] sql = "CREATE TABLE IF NOT EXISTS chapters({});".format(",".join(col_list)) return sql, col_list def namespaces_sql() -> Tuple[str, List[str]]: col_list = [ 'namespace_id INTEGER PRIMARY KEY', 'namespace TEXT NOT NULL UNIQUE' ] sql = "CREATE TABLE IF NOT EXISTS namespaces({});".format(",".join(col_list)) return sql, col_list def tags_sql() -> Tuple[str, List[str]]: col_list = [ 'tag_id INTEGER PRIMARY KEY', 'tag TEXT NOT NULL UNIQUE' ] sql = "CREATE TABLE IF NOT EXISTS tags({});".format(",".join(col_list)) return sql, col_list def tags_mappings_sql() -> Tuple[str, List[str]]: col_list = [ 'tags_mappings_id INTEGER PRIMARY KEY', 'namespace_id INTEGER', 'tag_id INTEGER', 'FOREIGN KEY(namespace_id) REFERENCES namespaces(namespace_id) ON DELETE CASCADE', 'FOREIGN KEY(tag_id) REFERENCES tags(tag_id) ON DELETE CASCADE', 'UNIQUE(namespace_id, tag_id)' ] sql = "CREATE TABLE IF NOT EXISTS tags_mappings({});".format(",".join(col_list)) return sql, col_list def series_tags_mappings_sql() -> Tuple[str, List[str]]: col_list = [ 'series_id INTEGER', 'tags_mappings_id INTEGER', 'FOREIGN KEY(series_id) REFERENCES series(series_id) ON DELETE CASCADE', 'FOREIGN KEY(tags_mappings_id) REFERENCES tags_mappings(tags_mappings_id) ON DELETE CASCADE', 'UNIQUE(series_id, tags_mappings_id)' ] sql = "CREATE TABLE IF NOT EXISTS series_tags_map({});".format(",".join(col_list)) return sql, col_list def list_sql() -> Tuple[str, List[str]]: col_list = [ 'list_id INTEGER PRIMARY KEY', "list_name TEXT NOT NULL DEFAULT ''", 'list_filter TEXT', "profile BLOB", "type INTEGER DEFAULT 0", "enforce INTEGER DEFAULT 0", "regex INTEGER DEFAULT 0", "l_case INTEGER DEFAULT 0", "strict INTEGER DEFAULT 0", ] sql = "CREATE TABLE IF NOT EXISTS list({});".format(",".join(col_list)) return sql, col_list def series_list_map_sql() -> Tuple[str, List[str]]: col_list = [ 'list_id INTEGER NOT NULL', 'series_id INTEGER INTEGER NOT NULL', 'FOREIGN KEY(list_id) REFERENCES list(list_id) ON DELETE CASCADE', 'FOREIGN KEY(series_id) REFERENCES series(series_id) ON DELETE CASCADE', 'UNIQUE(list_id, series_id)' ] sql = "CREATE TABLE IF NOT EXISTS series_list_map({});".format(",".join(col_list)) return sql, col_list STRUCTURE_SCRIPT_FUNCS: List[Callable[[], Tuple[str, List[str]]]] STRUCTURE_SCRIPT_FUNCS = [series_sql, chapters_sql, namespaces_sql, tags_sql, tags_mappings_sql, series_tags_mappings_sql, hashes_sql, list_sql, series_list_map_sql] STRUCTURE_SCRIPT = ''.join(f()[0] for f in STRUCTURE_SCRIPT_FUNCS) def global_db_convert(conn: sqlite3.dbapi2.Connection) -> sqlite3.dbapi2.Cursor: """ Takes care of converting tables and columns. Don't use this method directly. Use the add_db_revisions instead. """ log_i('Converting tables') c = conn.cursor() series, series_cols = series_sql() chapters, chapters_cols = chapters_sql() namespaces, namespaces_cols = namespaces_sql() tags, tags_cols = tags_sql() tags_mappings, tags_mappings_cols = tags_mappings_sql() series_tags_mappings, series_tags_mappings_cols = series_tags_mappings_sql() hashes, hashes_cols = hashes_sql() _list, list_cols = list_sql() series_list_map, series_list_map_cols = series_list_map_sql() t_d = { 'series': series_cols, 'chapters': chapters_cols, 'namespaces': namespaces_cols, 'tags': tags_cols, 'tags_mappings': tags_mappings_cols, 'series_tags_mappings': series_tags_mappings_cols, 'hashes': hashes_cols, 'list': list_cols, 'series_list_map': series_list_map_cols } log_d('Checking table structures') c.executescript(STRUCTURE_SCRIPT) conn.commit() log_d('Checking columns') for table in t_d: for col in t_d[table]: try: c.execute('ALTER TABLE {} ADD COLUMN {}'.format(table, col)) log_d('Added new column: {}'.format(col)) except sqlite3.OperationalError: log_d('Skipped column: {}'.format(col)) conn.commit() log_d('Committed DB changes') return c def add_db_revisions(old_db: Union[str, 'os.PathLike']) -> None: """ Adds specific DB revisions items. Note: pass a path to db """ log_i('Converting DB') conn = sqlite3.connect(old_db, check_same_thread=False) conn.row_factory = sqlite3.Row log_i('Converting tables and columns') c = global_db_convert(conn) log_d('Updating DB version') c.execute('UPDATE version SET version=? WHERE 1', (db_constants.CURRENT_DB_VERSION,)) conn.commit() conn.close() return def create_db_path(db_path: Union[str, 'os.PathLike'] = db_constants.DB_PATH): head = os.path.split(db_path)[0] os.makedirs(head, exist_ok=True) if not os.path.isfile(db_path): with open(db_path, 'x') as _f: pass return db_path def check_db_version(conn: sqlite3.dbapi2.Connection) -> bool: """Checks if DB version is allowed. Raises dialog if not.""" vs = "SELECT version FROM version" c = conn.cursor() c.execute(vs) log_d('Checking DB Version') db_vs = c.fetchone() db_constants.REAL_DB_VERSION = db_vs[0] if db_vs[0] not in db_constants.DB_VERSION: msg = "Incompatible database" log_c(msg) log_d('Local database version: {}\nProgram database version:{}'.format(db_vs[0], db_constants.CURRENT_DB_VERSION)) # ErrorQueue.put(msg) return False return True def init_db(path: Union[str, 'os.PathLike'] = db_constants.DB_PATH) -> Optional[sqlite3.dbapi2.Connection]: """Initialises the DB. Returns a sqlite3 connection, which will be passed to the db thread. """ # TODO: change saving version from float to string def db_layout(cursor: sqlite3.dbapi2.Cursor) -> None: c = cursor # version c.execute(""" CREATE TABLE IF NOT EXISTS version(version REAL) """) c.execute("""INSERT INTO version(version) VALUES(?)""", (db_constants.CURRENT_DB_VERSION,)) log_i("Constructing database layout") log_d("Database Layout:\n\t{}".format(STRUCTURE_SCRIPT)) c.executescript(STRUCTURE_SCRIPT) def new_db(p: Union[str, 'os.PathLike'], new: bool = False) -> sqlite3.dbapi2.Connection: connection = sqlite3.connect(p, check_same_thread=False) connection.row_factory = sqlite3.Row if new: c = connection.cursor() db_layout(c) connection.commit() return connection if os.path.isfile(path): conn = new_db(path) if path == db_constants.DB_PATH and not check_db_version(conn): return None else: create_db_path() conn = new_db(path, True) conn.isolation_level = None conn.execute("PRAGMA foreign_keys = on") return conn class DBBase: """The base DB class. _DB_CONN should be set at runtime on startup""" _DB_CONN: Optional[Connection] = None _AUTO_COMMIT = True _STATE = {'active': False} @classmethod def begin(cls) -> None: """Useful when modifying for a large amount of data""" if not cls._STATE['active']: cls._AUTO_COMMIT = False cls.execute("BEGIN TRANSACTION") cls._STATE['active'] = True # print("STARTED DB OPTIMIZE") @classmethod def end(cls) -> None: """Called to commit and end transaction""" if cls._STATE['active']: try: cls.execute("COMMIT") except sqlite3.OperationalError: pass cls._AUTO_COMMIT = True cls._STATE['active'] = False # print("ENDED DB OPTIMIZE") @classmethod def execute(cls, *args): """Same as cursor.execute""" if not cls._DB_CONN: raise db_constants.NoDatabaseConnection log_d('DB Query: {}'.format(args).encode(errors='ignore')) if cls._AUTO_COMMIT: try: with cls._DB_CONN: return cls._DB_CONN.execute(*args) except sqlite3.InterfaceError: return cls._DB_CONN.execute(*args) else: return cls._DB_CONN.execute(*args) @classmethod def executemany(cls, *args): """Same as cursor.executemany""" if not cls._DB_CONN: raise db_constants.NoDatabaseConnection log_d('DB Query: {}'.format(args).encode(errors='ignore')) if cls._AUTO_COMMIT: with cls._DB_CONN: return cls._DB_CONN.executemany(*args) else: c = cls._DB_CONN.executemany(*args) return c @classmethod def commit(cls) -> None: cls._DB_CONN.commit() @classmethod def analyze(cls) -> None: cls._DB_CONN.execute('ANALYZE') @classmethod def close(cls) -> None: cls._DB_CONN.close() if __name__ == '__main__': raise RuntimeError("Unit tests not yet implemented") # unit tests here!
5,093
0
361
ed614a563d311fbd9e62fbe065c720a65f267c7e
2,788
py
Python
DestroyerIGN/captchaServer.py
Ravenclaw-OIer/ISML_auto_voter
9c53bd87530697d374163f571186542c3fc9734b
[ "MIT" ]
128
2020-11-16T09:28:17.000Z
2022-03-14T10:38:52.000Z
DestroyerIGN/captchaServer.py
Ravenclaw-OIer/ISML_auto_voter
9c53bd87530697d374163f571186542c3fc9734b
[ "MIT" ]
7
2020-11-27T14:45:19.000Z
2022-02-15T09:47:12.000Z
DestroyerIGN/captchaServer.py
Ravenclaw-OIer/ISML_auto_voter
9c53bd87530697d374163f571186542c3fc9734b
[ "MIT" ]
11
2020-12-11T12:24:38.000Z
2022-02-20T12:42:31.000Z
#coding:utf-8 portList=(8888,8889)#本服务器监听端口 import tornado.ioloop import tornado.web import numpy as np from time import sleep #import shutil #import os from random import random from io import BytesIO from PIL import Image from base64 import b64decode import utils model = utils.loadmodel('Model.json', 'Weights.h5') REFSTR = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' ## def get(self): ## #允许浏览器直接访问,手动上传图片并识别。此功能仅用于测试和娱乐 ## self.write(''' ##<html> ## <head><title>Upload File</title></head> ## <body> ## <form action='file' enctype="multipart/form-data" method='post'> ## <input type='file' name='file'/><br/> ## <input type='submit' value='submit'/> ## </form> ## </body> ##</html> ##''') if __name__ == '__main__': from multiprocessing import Process length=len(portList) for port in range(length-1): p=Process(target=run_proc, args=(portList[port],)) p.start() run_proc(portList[length-1])
31.325843
79
0.581062
#coding:utf-8 portList=(8888,8889)#本服务器监听端口 import tornado.ioloop import tornado.web import numpy as np from time import sleep #import shutil #import os from random import random from io import BytesIO from PIL import Image from base64 import b64decode import utils model = utils.loadmodel('Model.json', 'Weights.h5') REFSTR = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' def decaptcha(img): try: #upload_path=os.path.join(os.path.dirname(__file__),'files') #文件的暂存路径 #file_metas=self.request.files['file'] #提取表单中‘name’为‘file’的文件元数据 #print(file_metas) #print(self) #img0 = b64decode(self.get_argument('file')) #img = Image.open(BytesIO(img0)) img = Image.open(BytesIO(img)) img = 255-np.array(img.convert('L') ) #转化为灰度图 cnt,img = utils.splitimage(img) img = np.expand_dims(img, axis=-1)#到此result还是个图片 img = model.predict(img) img = np.argmax(img, axis=-1) img = ''.join(REFSTR[ch] for ch in img) ## for meta in file_metas: ## filename=meta['filename'] ## filepath=os.path.join(upload_path,filename) ## with open(filepath,'wb') as up: #有些文件需要已二进制的形式存储,实际中可以更改 ## up.write(meta['body']) if(random()<0.8568): img = img.lower() #self.write(img) #print(img) ## try: ## with open('%s.txt'%('未执行投票'+img),'wb') as imgSave: ## #imgSave.write(img0) ## pass ## except Exception: ## print ('存验证码出错 可能硬盘过载!') return img except Exception as e: return('!') print(e) class MainHandler(tornado.web.RequestHandler): ## def get(self): ## #允许浏览器直接访问,手动上传图片并识别。此功能仅用于测试和娱乐 ## self.write(''' ##<html> ## <head><title>Upload File</title></head> ## <body> ## <form action='file' enctype="multipart/form-data" method='post'> ## <input type='file' name='file'/><br/> ## <input type='submit' value='submit'/> ## </form> ## </body> ##</html> ##''') def get(self): self.write('Captcha server is ready!') async def post(self): #img = b64decode(self.get_argument('file')) img=self.request.body self.write(decaptcha(img)) def run_proc(port): app=tornado.web.Application([ (r'/',MainHandler), ]) app.listen(port) print('CaptchaServer@localhost:%d'%(port)) tornado.ioloop.IOLoop.instance().start() if __name__ == '__main__': from multiprocessing import Process length=len(portList) for port in range(length-1): p=Process(target=run_proc, args=(portList[port],)) p.start() run_proc(portList[length-1])
1,790
25
129
07acdd890b5325633a96917335e808b0db43398b
2,344
py
Python
Emall/base_api.py
syz247179876/e_mall
f94e39e091e098242342f532ae371b8ff127542f
[ "Apache-2.0" ]
7
2021-04-10T13:20:56.000Z
2022-03-29T15:00:29.000Z
Emall/base_api.py
syz247179876/E_mall
f94e39e091e098242342f532ae371b8ff127542f
[ "Apache-2.0" ]
9
2021-05-11T03:53:31.000Z
2022-03-12T00:58:03.000Z
Emall/base_api.py
syz247179876/E_mall
f94e39e091e098242342f532ae371b8ff127542f
[ "Apache-2.0" ]
2
2020-11-24T08:59:22.000Z
2020-11-24T14:10:59.000Z
# -*- coding: utf-8 -*- # @Time : 2020/11/21 上午11:27 # @Author : 司云中 # @File : base_api.py # @Software: Pycharm """ 通用API共享函数 """ from rest_framework import status from rest_framework.response import Response from rest_framework.generics import GenericAPIView from Emall.exceptions import SqlServerError from Emall.response_code import response_code def check_code(redis, validated_data): """校验验证码""" code_status = redis.check_code(validated_data.get('phone'), validated_data.get('code')) # 验证码错误或者过期 if not code_status: return Response(response_code.verification_code_error, status=status.HTTP_400_BAD_REQUEST) class BackendGenericApiView(GenericAPIView): """用于后台操作的通用API""" serializer_class = None serializer_delete_class = None def post(self, request): """添加""" serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.add() def get(self, request): """获取单个/多个记录""" pk = request.query_params.get(self.lookup_field, None) if pk: obj = self.get_obj(pk) serializer = self.get_serializer(instance=obj) return Response(serializer.data) else: queryset = self.get_queryset() serializer = self.get_serializer(instance=queryset, many=True) return Response({ 'count': queryset.count(), 'data': serializer.data }) def put(self, request): """修改""" serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.modify() def delete(self, request): """删除""" serializer = self.serializer_delete_class(data=request.data) if self.request.query_params.get('all', None) == 'true': result_num, _ = self.get_queryset().delete() else: serializer.is_valid(raise_exception=True) result_num, _ = serializer.delete() return result_num
30.441558
98
0.65273
# -*- coding: utf-8 -*- # @Time : 2020/11/21 上午11:27 # @Author : 司云中 # @File : base_api.py # @Software: Pycharm """ 通用API共享函数 """ from rest_framework import status from rest_framework.response import Response from rest_framework.generics import GenericAPIView from Emall.exceptions import SqlServerError from Emall.response_code import response_code def check_code(redis, validated_data): """校验验证码""" code_status = redis.check_code(validated_data.get('phone'), validated_data.get('code')) # 验证码错误或者过期 if not code_status: return Response(response_code.verification_code_error, status=status.HTTP_400_BAD_REQUEST) class BackendGenericApiView(GenericAPIView): """用于后台操作的通用API""" serializer_class = None serializer_delete_class = None def get_queryset(self): return self.serializer_class.Meta.model.objects.all() def get_obj(self, pk): try: return self.serializer_class.Meta.model.objects.get(pk=pk) except self.serializer_class.Meta.model.DoesNotExist: raise SqlServerError('数据不存在') def post(self, request): """添加""" serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.add() def get(self, request): """获取单个/多个记录""" pk = request.query_params.get(self.lookup_field, None) if pk: obj = self.get_obj(pk) serializer = self.get_serializer(instance=obj) return Response(serializer.data) else: queryset = self.get_queryset() serializer = self.get_serializer(instance=queryset, many=True) return Response({ 'count': queryset.count(), 'data': serializer.data }) def put(self, request): """修改""" serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.modify() def delete(self, request): """删除""" serializer = self.serializer_delete_class(data=request.data) if self.request.query_params.get('all', None) == 'true': result_num, _ = self.get_queryset().delete() else: serializer.is_valid(raise_exception=True) result_num, _ = serializer.delete() return result_num
263
0
54
0c04a654f297ea6cc5bad97d952631d9da83c061
2,532
py
Python
commands/slowmode.py
edazpotato/bot
e2d7742b9432ea49552d2b73aed040330b1b3b0b
[ "MIT" ]
null
null
null
commands/slowmode.py
edazpotato/bot
e2d7742b9432ea49552d2b73aed040330b1b3b0b
[ "MIT" ]
null
null
null
commands/slowmode.py
edazpotato/bot
e2d7742b9432ea49552d2b73aed040330b1b3b0b
[ "MIT" ]
null
null
null
""" MIT License Copyright (c) 2020 GamingGeek Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from fire.converters import TextChannel, Category from discord.ext import commands import traceback import discord import typing
43.655172
133
0.697867
""" MIT License Copyright (c) 2020 GamingGeek Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from fire.converters import TextChannel, Category from discord.ext import commands import traceback import discord import typing class Slowmode(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(name='slowmode') async def slowmodecmd(self, ctx, delay: int = 0, channel: typing.Union[TextChannel, Category] = None): if not channel: channel = ctx.channel if isinstance(channel, discord.CategoryChannel): channels = channel.channels.copy() for c in channels: try: await c.edit(slowmode_delay=delay) channels.remove(c) except Exception: pass if channels: return await ctx.error(f'Failed to set slowmode for {", ".join([c.name for c in channels])}') return await ctx.success(f'Successfully set slowmode for all channels in {channel}') try: await channel.edit(slowmode_delay=delay) return await ctx.success(f'Successfully set slowmode for {channel}') except Exception: return await ctx.error(f'Failed to set slowmode for {channel}') def setup(bot): try: bot.add_cog(Slowmode(bot)) bot.logger.info(f'$GREENLoaded $CYAN"slowmode" $GREENcommand!') except Exception as e: bot.logger.error(f'$REDError while adding command $CYAN"slowmode"', exc_info=e)
1,180
100
46
f408d2dd962ede10ebbdcaff4c81acd93a0057e9
8,155
py
Python
mamba/FastSubdirData.py
dmaljovec/mamba
599c2820086b5d69df8c73a98b2a4acf5dffe0df
[ "BSD-3-Clause" ]
null
null
null
mamba/FastSubdirData.py
dmaljovec/mamba
599c2820086b5d69df8c73a98b2a4acf5dffe0df
[ "BSD-3-Clause" ]
null
null
null
mamba/FastSubdirData.py
dmaljovec/mamba
599c2820086b5d69df8c73a98b2a4acf5dffe0df
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from __future__ import absolute_import, division, print_function, unicode_literals import bz2 import sys, os from collections import defaultdict from contextlib import closing from errno import EACCES, ENODEV, EPERM from genericpath import getmtime, isfile import hashlib import json from logging import DEBUG, getLogger from mmap import ACCESS_READ, mmap from os.path import dirname, isdir, join, splitext import re from time import time import warnings from io import open as io_open from conda import CondaError from conda._vendor.auxlib.ish import dals from conda._vendor.auxlib.logz import stringify from conda._vendor.toolz import concat, take from conda.base.constants import CONDA_HOMEPAGE_URL, REPODATA_FN from conda.base.context import context from conda.common.compat import (ensure_binary, ensure_text_type, ensure_unicode, iteritems, string_types, text_type, with_metaclass) from conda.common.io import ThreadLimitedThreadPoolExecutor, as_completed from conda.common.url import join_url, maybe_unquote from conda.core.package_cache_data import PackageCacheData from conda.exceptions import (CondaDependencyError, CondaHTTPError, CondaUpgradeError, NotWritableError, UnavailableInvalidChannel) from conda.gateways.connection import (ConnectionError, HTTPError, InsecureRequestWarning, InvalidSchema, SSLError) from conda.gateways.connection.session import CondaSession from conda.gateways.disk import mkdir_p, mkdir_p_sudo_safe from conda.gateways.disk.delete import rm_rf from conda.gateways.disk.update import touch from conda.models.channel import Channel, all_channel_urls from conda.models.match_spec import MatchSpec from conda.models.records import PackageRecord from conda.core.subdir_data import * log = getLogger(__name__) stderrlog = getLogger('conda.stderrlog') REPODATA_PICKLE_VERSION = 28 MAX_REPODATA_VERSION = 1 REPODATA_HEADER_RE = b'"(_etag|_mod|_cache_control)":[ ]?"(.*?[^\\\\])"[,\}\s]' # NOQA @with_metaclass(SubdirDataType)
39.396135
104
0.638872
# -*- coding: utf-8 -*- # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from __future__ import absolute_import, division, print_function, unicode_literals import bz2 import sys, os from collections import defaultdict from contextlib import closing from errno import EACCES, ENODEV, EPERM from genericpath import getmtime, isfile import hashlib import json from logging import DEBUG, getLogger from mmap import ACCESS_READ, mmap from os.path import dirname, isdir, join, splitext import re from time import time import warnings from io import open as io_open from conda import CondaError from conda._vendor.auxlib.ish import dals from conda._vendor.auxlib.logz import stringify from conda._vendor.toolz import concat, take from conda.base.constants import CONDA_HOMEPAGE_URL, REPODATA_FN from conda.base.context import context from conda.common.compat import (ensure_binary, ensure_text_type, ensure_unicode, iteritems, string_types, text_type, with_metaclass) from conda.common.io import ThreadLimitedThreadPoolExecutor, as_completed from conda.common.url import join_url, maybe_unquote from conda.core.package_cache_data import PackageCacheData from conda.exceptions import (CondaDependencyError, CondaHTTPError, CondaUpgradeError, NotWritableError, UnavailableInvalidChannel) from conda.gateways.connection import (ConnectionError, HTTPError, InsecureRequestWarning, InvalidSchema, SSLError) from conda.gateways.connection.session import CondaSession from conda.gateways.disk import mkdir_p, mkdir_p_sudo_safe from conda.gateways.disk.delete import rm_rf from conda.gateways.disk.update import touch from conda.models.channel import Channel, all_channel_urls from conda.models.match_spec import MatchSpec from conda.models.records import PackageRecord from conda.core.subdir_data import * log = getLogger(__name__) stderrlog = getLogger('conda.stderrlog') REPODATA_PICKLE_VERSION = 28 MAX_REPODATA_VERSION = 1 REPODATA_HEADER_RE = b'"(_etag|_mod|_cache_control)":[ ]?"(.*?[^\\\\])"[,\}\s]' # NOQA class SubdirDataType(type): def __call__(cls, channel, channel_idx, repodata_fn): assert channel.subdir assert not channel.package_filename assert type(channel) is Channel cache_key = channel.url(with_credentials=True) if not cache_key.startswith('file://') and cache_key in FastSubdirData._cache_: return FastSubdirData._cache_[cache_key] subdir_data_instance = super(SubdirDataType, cls).__call__(channel, channel_idx, repodata_fn) FastSubdirData._cache_[cache_key] = subdir_data_instance return subdir_data_instance @with_metaclass(SubdirDataType) class FastSubdirData(object): _cache_ = {} def __init__(self, channel, channel_idx, repodata_fn): assert channel.subdir if channel.package_filename: parts = channel.dump() del parts['package_filename'] channel = Channel(**parts) self.channel = channel self.channel_idx = channel_idx self.url_w_subdir = self.channel.url(with_credentials=False) self.url_w_credentials = self.channel.url(with_credentials=True) self.repodata_fn = repodata_fn self._loaded = False # if the cache doesn't change, this stays False self.cache_content_changed = False def reload(self): self._loaded = False self.load() return self @property def cache_path_base(self): return join( create_cache_dir(), splitext(cache_fn_url(self.url_w_credentials, self.repodata_fn))[0]) @property def url_w_repodata_fn(self): return self.url_w_subdir + '/' + self.repodata_fn @property def cache_path_json(self): return self.cache_path_base + '.json' @property def cache_path_solv(self): return self.cache_path_base + '.solv' def get_loaded_file_path(self): if sys.platform == 'win32': return self.cache_path_json if os.path.exists(self.cache_path_solv) and \ self.cache_content_changed == False and \ os.path.getmtime(self.cache_path_solv) > os.path.getmtime(self.cache_path_json): return self.cache_path_solv return self.cache_path_json def load(self): self._load() self._loaded = True return self def iter_records(self): if not self._loaded: self.load() return iter(self._package_records) def _load(self): try: mtime = getmtime(self.cache_path_json) except (IOError, OSError): log.debug("No local cache found for %s at %s", self.url_w_repodata_fn, self.cache_path_json) if context.use_index_cache or (context.offline and not self.url_w_subdir.startswith('file://')): log.debug("Using cached data for %s at %s forced. Returning empty repodata.", self.url_w_repodata_fn, self.cache_path_json) return { '_package_records': (), '_names_index': defaultdict(list), '_track_features_index': defaultdict(list), } else: mod_etag_headers = {} else: mod_etag_headers = read_mod_and_etag(self.cache_path_json) if context.use_index_cache: log.debug("Using cached repodata for %s at %s because use_cache=True", self.url_w_repodata_fn, self.cache_path_json) return if context.local_repodata_ttl > 1: max_age = context.local_repodata_ttl elif context.local_repodata_ttl == 1: max_age = get_cache_control_max_age(mod_etag_headers.get('_cache_control', '')) else: max_age = 0 timeout = mtime + max_age - time() if (timeout > 0 or context.offline) and not self.url_w_repodata_fn.startswith('file://'): log.debug("Using cached repodata for %s at %s. Timeout in %d sec", self.url_w_repodata_fn, self.cache_path_json, timeout) return log.debug("Local cache timed out for %s at %s", self.url_w_repodata_fn, self.cache_path_json) try: raw_repodata_str = fetch_repodata_remote_request(self.url_w_credentials, mod_etag_headers.get('_etag'), mod_etag_headers.get('_mod'), repodata_fn=self.repodata_fn) if not raw_repodata_str and self.repodata_fn != REPODATA_FN: raise UnavailableInvalidChannel(self.url_w_repodata_fn, 404) except UnavailableInvalidChannel: if self.repodata_fn != REPODATA_FN: self.repodata_fn = REPODATA_FN return self._load() else: raise except Response304ContentUnchanged: log.debug("304 NOT MODIFIED for '%s'. Updating mtime and loading from disk", self.url_w_repodata_fn) # Do not touch here, so we can compare the creation date of solv vs. json file # for mamba, and regenerate the solv file if updated from conda. # touch(self.cache_path_json) return else: if not isdir(dirname(self.cache_path_json)): mkdir_p(dirname(self.cache_path_json)) try: with io_open(self.cache_path_json, 'w') as fh: fh.write(raw_repodata_str or '{}') self.cache_content_changed = True except (IOError, OSError) as e: if e.errno in (EACCES, EPERM): raise NotWritableError(self.cache_path_json, e.errno, caused_by=e) else: raise
5,570
357
72
1cdd6ff900ddffaaf978064ae475159f69ceb0b1
14,328
py
Python
webenmr/controllers/xplor.py
andreagia/WEBNMR
512a8cc04cf69300796585feae722614501389a9
[ "Apache-2.0" ]
null
null
null
webenmr/controllers/xplor.py
andreagia/WEBNMR
512a8cc04cf69300796585feae722614501389a9
[ "Apache-2.0" ]
null
null
null
webenmr/controllers/xplor.py
andreagia/WEBNMR
512a8cc04cf69300796585feae722614501389a9
[ "Apache-2.0" ]
null
null
null
from lxml import etree import logging import random import os import shutil import types, cgi from pylons import config from pylons import request, response, session, tmpl_context as c, url from pylons.controllers.util import abort, redirect from webenmr.model import Projects, Calculations, Jobs, CalculationTipology, Users from webenmr.model.meta import Session from webenmr.lib.base import * from webenmr.lib.base import BaseController, render from webenmr.lib.xplor_analysis import * from webenmr.lib.make_xplor import * from webenmr.lib.JobManagementSystem import * log = logging.getLogger(__name__)
43.418182
165
0.542155
from lxml import etree import logging import random import os import shutil import types, cgi from pylons import config from pylons import request, response, session, tmpl_context as c, url from pylons.controllers.util import abort, redirect from webenmr.model import Projects, Calculations, Jobs, CalculationTipology, Users from webenmr.model.meta import Session from webenmr.lib.base import * from webenmr.lib.base import BaseController, render from webenmr.lib.xplor_analysis import * from webenmr.lib.make_xplor import * from webenmr.lib.JobManagementSystem import * log = logging.getLogger(__name__) class XplorController(BaseController): def __before__(self): c.page_base = u'Xplor-NIH' c.page_title = u'Xplor-NIH' random.seed() if session.get('DIR_CACHE', None) is None: session['DIR_CACHE'] = os.path.join(config['app_conf']['xplor_data'], str(random.randint(100000000, 999999999))) session.save() elif 'xplor_data' not in session.get('DIR_CACHE'): session['DIR_CACHE'] = os.path.join(config['app_conf']['xplor_data'], str(random.randint(100000000, 999999999))) session.save() if not os.path.isdir(session.get('DIR_CACHE')): os.makedirs(session.get('DIR_CACHE')) def index(self): return render('/calculations/xplor_calc.mako') def helpinfo(self): field = request.POST.get('field') info = open(os.path.join(config['app_conf']['properties'], "xplor-info.properties")) l = 'pipppo' for line in info: if line.startswith(field): l = line.split('=')[1] break return l def isCalcExist(self): owner = Session.query(Users).get(session['REMOTE_USER']) proj = request.POST.get('proj') calc = request.POST.get('calc') print proj, calc cname = Session.query(Calculations).filter(and_(Calculations.name==calc, Calculations.removed == False, Calculations.project_id==int(proj))).first() if cname: return "no" else: return "ok" def removefile(self): f = request.POST.get("file") def uploadfile(self): filepath = os.path.join(config['app_conf']['xplor_data'], session['DIR_CACHE']) field = request.POST.get("namefield") f = request.POST.get(field) self.saveFile(f, filepath) def saveFile(self, fname, where, name=None): if name: namef = name else: namef = fname.filename.split('\\')[0] namef.replace(" ", "_") #f = fname.filename.split('\\') rfname = os.path.join(where, namef) permanent_file = open(rfname, 'wb') shutil.copyfileobj(fname.file, permanent_file) fname.file.close() permanent_file.close() return rfname def submitXplor(self): basepath = os.path.join(config['app_conf']['xplor_data'], session['DIR_CACHE']) root_st = etree.Element("xplor-calculation") xplor_prj_id = request.POST.get("xplor-prj_id") proj = Session.query(Projects).filter(and_(Projects.owner_id == session['REMOTE_USER'], Projects.removed == False, Projects.id == int(xplor_prj_id))).first() proj_name = proj.name te = {} te["name"] = proj_name etree.SubElement(root_st, "project", te) xplor_namecalc = request.POST.get("xplor-namecalc") te = {} te["name"] = xplor_namecalc etree.SubElement(root_st, "calculation", te) xplor_seqfile = request.POST.getall("xplor-seqfile") xplor_residuenum = request.POST.getall("xplor-residuenum") xplor_chainname = request.POST.getall("xplor-chainname") xplor_posseq = request.POST.getall("xplor-posseq") te={} idx = 0 for it in xplor_seqfile: te["path"] = self.saveFile(it, basepath) te["numfres"] = xplor_residuenum[idx] te["chain"] = xplor_chainname[idx] te["pos"] = xplor_posseq[idx] idx += 1 etree.SubElement(root_st, "sequence" , te ) if request.POST.get("nonstdres") == 'yes': xplor_nonstdres_top = request.POST.getall("xplor-nonstdrestop") xplor_nonstdres_par = request.POST.getall("xplor-nonstdrespar") if len(xplor_nonstdres_top) >= 1: te = {} for i,e in enumerate(xplor_nonstdres_top): te["parameter_path"] = self.saveFile(xplor_nonstdres_par[i], basepath) te["topology_path"] = self.saveFile(e, basepath) etree.SubElement(root_st, "nonstdresidues", te) if request.POST.get("parcenter") == 'yes': xplor_m_atomname = request.POST.getall("m_atom_name") xplor_m_element = request.POST.getall("m_element") xplor_m_res_name = request.POST.getall("m_res_name") xplor_m_res_num = request.POST.getall("m_res_num") xplor_m_charge = request.POST.getall("m_charge") xplor_m_rvdw = request.POST.getall("m_rvdw") xplor_m_eps = request.POST.getall("m_eps") xplor_m_bind_res_str = request.POST.getall("m_bind_res") print xplor_m_bind_res_str met_res_diz = {} for br in xplor_m_bind_res_str: br_list = br.split(":::") met_res_diz[br_list[0]] = br_list[1] te = {} idx = 0 for it in xplor_m_atomname: if xplor_m_element[idx] != 'n/a': te["type"] = "metal" else: te["type"] = "cofactor" te["atom_name"] = it te["element"] = xplor_m_element[idx] te["res_name"] = xplor_m_res_name[idx] te["res_num"] = xplor_m_res_num[idx] te["charge"] = xplor_m_charge[idx] te["rvdw"] = xplor_m_rvdw[idx] te["epsilon"] = xplor_m_eps[idx] met_el = etree.SubElement(root_st, "metal", te) if met_res_diz.has_key(xplor_m_res_num[idx]): te2 = {} xplor_m_bind_res_list = met_res_diz.get(xplor_m_res_num[idx]).split(',') for multi in xplor_m_bind_res_list: multi_splitted = multi.split() te2["resnum"] = multi_splitted[0].strip() te2["atom_name"] = multi_splitted[1].strip() if len(multi_splitted) == 4: te2["segid"] = multi_splitted[2].strip() else: te2["segid"] = "" te2["distance"] = multi_splitted[2].strip() etree.SubElement(met_el, "restraint", te2) idx += 1 if request.POST.get("cof") == 'yes': xplor_cofpdb = request.POST.getall("xplor-cofpdb") xplor_coftop = request.POST.getall("xplor-coftop") xplor_cofpar = request.POST.getall("xplor-cofpar") xplor_poscof = request.POST.getall("xplor-poscof") if 'hpatch-name' in request.params: patchname = request.POST.getall("hpatch-name") patchres1 = request.POST.getall("hpatch-res1") patchres1chain = request.POST.getall("hpatch-res1chain") patchres2 = request.POST.getall("hpatch-res2") patchres2chain = request.POST.getall("hpatch-res2chain") if len(patchres1chain) == 0: patchres1chain = ['']*len(patchname) patchres2chain = ['']*len(patchname) for i,pn in enumerate(patchname): #print i,pn #print patchres1[i] #print patchres2[i] te = {} te["type"] = pn te["resnuma"] = patchres1[i].split(',')[0].strip() if len(patchres1[i].split(',')) > 1: te["chaina"] = patchres1chain[i] else: te["chaina"] = " " te["resnumb"] = patchres2[i].split(',')[0].strip() if len(patchres2[i].split(',')) > 1: te["chainb"] = patchres2chain[i] else: te["chainb"] = " " etree.SubElement(root_st, "patchcof", te) te = {} idx = 0 for it in xplor_cofpdb: te["pdb_path"] = self.saveFile(it, basepath) te["topology_path"] = self.saveFile(xplor_coftop[idx], basepath) te["par_path"] = self.saveFile(xplor_cofpar[idx], basepath) te["pos"] = xplor_poscof[idx] etree.SubElement(root_st, "cofactor", te) idx += 1 if request.POST.get("dis") == 'yes': #xplor_disatoma = request.POST.getall("xplor-disatoma") xplor_disresnamea = request.POST.getall("xplor-disresnuma") xplor_dischaina = request.POST.getall("chain-selector-disa") xplor_dischainb = request.POST.getall("chain-selector-disb") xplor_disresnameb = request.POST.getall("xplor-disresnumb") if len(xplor_dischaina) == 0: xplor_dischaina = ['']*len(xplor_disresnamea) xplor_dischainb = ['']*len(xplor_disresnamea) te = {} idx = 0 for it in xplor_disresnamea: #te["atoma"] = it te["resnuma"] = it te["segida"] = xplor_dischaina[idx] te["resnumb"] = xplor_disresnameb[idx] te["segidb"] = xplor_dischainb[idx] etree.SubElement(root_st, "disulfide", te) idx += 1 if request.POST.get("phis") == 'yes': xplor_phisresnum = request.POST.getall("xplor-phisresnum") xplor_phischain = request.POST.getall("chain-selector-phis") xplor_phistype = request.POST.getall("xplor-phistype") if len(xplor_phischain) == 0: xplor_phischain = ['']*len(xplor_phisresnum) te = {} idx = 0 for it in xplor_phisresnum: te["resnum"] = it te["segid"] = xplor_phischain[idx] te["type"] = xplor_phistype[idx] etree.SubElement(root_st, "histidine", te) idx += 1 xplor_noefile = request.POST.getall("xplor-noefile") if len(xplor_noefile) > 1 or (len(xplor_noefile) == 1 and xplor_noefile[0] != ''): te = {} for it in xplor_noefile: te["path"] = self.saveFile(it, basepath) etree.SubElement(root_st, "noe", te) xplor_dihfile = request.POST.getall("xplor-dihfile") if len(xplor_dihfile) > 1 or (len(xplor_dihfile) == 1 and xplor_dihfile[0] != ''): te = {} for it in xplor_dihfile: te["path"] = self.saveFile(it, basepath) etree.SubElement(root_st, "dih", te) xplor_rdc_metal = request.POST.getall("xplor-rdc-metal") xplor_rdc_ax = request.POST.getall("xplor-rdctnsax") xplor_rdc_rh = request.POST.getall("xplor-rdctnsrh") xplor_rdc_file = request.POST.getall("xplor-rdcfile") te = {} idx = 0; if len(xplor_rdc_file) > 1 or (len(xplor_rdc_file) == 1 and xplor_rdc_file[0] != ''): for it in xplor_rdc_file: te["res_num"] = xplor_rdc_metal[idx] te["path"] = self.saveFile(it, basepath) te["ax"] = xplor_rdc_ax[idx] te["rh"] = xplor_rdc_rh[idx] etree.SubElement(root_st, "rdc", te) idx += 1 xplor_pcs_metal = request.POST.getall("xplor-pcs-metal") xplor_pcs_ax = request.POST.getall("xplor-pcstnsax") xplor_pcs_rh = request.POST.getall("xplor-pcstnsrh") xplor_pcs_file = request.POST.getall("xplor-pcsfile") te = {} idx = 0; if len(xplor_pcs_file) > 1 or (len(xplor_pcs_file) == 1 and xplor_pcs_file[0] != ''): for it in xplor_pcs_file: te["res_num"] = xplor_pcs_metal[idx] te["path"] = self.saveFile(it, basepath) te["ax"] = xplor_pcs_ax[idx] te["rh"] = xplor_pcs_rh[idx] etree.SubElement(root_st, "pcs", te) idx += 1 xplor_nrostruct = request.POST.get("xplor-nrostruct") te = {} te["nro_struct"] = xplor_nrostruct etree.SubElement(root_st, "output", te) print etree.tostring(root_st, pretty_print=True) make_xplor(basepath, os.path.join(config['app_conf']['xplor_templ'], "template_xplor_pm.inp"), root_st) jms = JobManagementSystem(basepath, root_st, session['PORTAL']) ret = jms.setupJobs(xplor_nrostruct) if ret: h.redirect('/jobs/show/all') h.redirect('/jobs/show/all') def selectPatch(self): basepath = os.path.join(config['app_conf']['xplor_data'], session['DIR_CACHE']) root_st = etree.Element("xplor-patch") te = {} numpos = int(request.POST.get("pos")) - 1 pdbfile = request.POST.getall("xplor-cofpdb") te["pdb"] = self.saveFile(pdbfile[numpos], basepath) topfile = request.POST.getall("xplor-coftop") te["top"] = self.saveFile(topfile[numpos], basepath) etree.SubElement(root_st, "filetopatch", te) patch_info = extract_patch_top(root_st) ret = '' if isinstance(patch_info, types.DictType ): for k in patch_info.keys(): info_list = patch_info[k].split("-") ret += k+','+','.join(info_list) ret = ret[:-1] + '::' else: ret = patch_info return ret
13,386
17
310
9c5a68a883a88ef8d7d76077b1df4d21455cf9d3
1,633
py
Python
lib/galaxy/model/migrate/versions/0135_add_library_tags.py
yvanlebras/galaxy
6b8489ca866825bcdf033523120a8b24ea6e6342
[ "CC-BY-3.0" ]
null
null
null
lib/galaxy/model/migrate/versions/0135_add_library_tags.py
yvanlebras/galaxy
6b8489ca866825bcdf033523120a8b24ea6e6342
[ "CC-BY-3.0" ]
2
2017-05-18T16:12:55.000Z
2022-03-08T12:08:43.000Z
lib/galaxy/model/migrate/versions/0135_add_library_tags.py
yvanlebras/galaxy
6b8489ca866825bcdf033523120a8b24ea6e6342
[ "CC-BY-3.0" ]
null
null
null
""" This migration script adds support for storing tags in the context of a dataset in a library """ import logging from sqlalchemy import ( Column, ForeignKey, Integer, MetaData, Table, ) # Need our custom types, but don't import anything else from model from galaxy.model.custom_types import TrimmedString log = logging.getLogger(__name__) metadata = MetaData() LibraryDatasetDatasetAssociationTagAssociation_table = Table( "library_dataset_dataset_association_tag_association", metadata, Column("id", Integer, primary_key=True), Column( "library_dataset_dataset_association_id", Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, ), Column("tag_id", Integer, ForeignKey("tag.id"), index=True), Column("user_tname", TrimmedString(255), index=True), Column("value", TrimmedString(255), index=True), Column("user_value", TrimmedString(255), index=True), Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True), )
27.677966
92
0.73117
""" This migration script adds support for storing tags in the context of a dataset in a library """ import logging from sqlalchemy import ( Column, ForeignKey, Integer, MetaData, Table, ) # Need our custom types, but don't import anything else from model from galaxy.model.custom_types import TrimmedString log = logging.getLogger(__name__) metadata = MetaData() LibraryDatasetDatasetAssociationTagAssociation_table = Table( "library_dataset_dataset_association_tag_association", metadata, Column("id", Integer, primary_key=True), Column( "library_dataset_dataset_association_id", Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, ), Column("tag_id", Integer, ForeignKey("tag.id"), index=True), Column("user_tname", TrimmedString(255), index=True), Column("value", TrimmedString(255), index=True), Column("user_value", TrimmedString(255), index=True), Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True), ) def upgrade(migrate_engine): metadata.bind = migrate_engine print(__doc__) metadata.reflect() try: LibraryDatasetDatasetAssociationTagAssociation_table.create() except Exception: log.exception("Creating library_dataset_association_tag_association table failed.") def downgrade(migrate_engine): metadata.bind = migrate_engine metadata.reflect() try: LibraryDatasetDatasetAssociationTagAssociation_table.drop() except Exception: log.exception("Dropping library_dataset_association_tag_association table failed.")
537
0
46
621bf6e7e7084f4fb8c71412d057f29ec2921329
7,780
py
Python
tests/testactions.py
HeliumEdu/helium-cli
926018a09b3e868a894a32295fdfcb2d134c9d41
[ "MIT" ]
2
2019-01-23T19:08:37.000Z
2021-05-27T23:25:48.000Z
tests/testactions.py
HeliumEdu/helium-cli
926018a09b3e868a894a32295fdfcb2d134c9d41
[ "MIT" ]
17
2018-04-20T14:45:29.000Z
2020-03-16T19:03:31.000Z
tests/testactions.py
HeliumEdu/helium-cli
926018a09b3e868a894a32295fdfcb2d134c9d41
[ "MIT" ]
2
2018-05-04T21:50:29.000Z
2018-06-01T19:29:13.000Z
import os from unittest import mock from heliumcli import utils, settings from heliumcli.main import main from tests.helpers.commonhelper import given_config_exists from .helpers import testcase, commonhelper __author__ = "Alex Laird" __copyright__ = "Copyright 2018, Helium Edu" __version__ = "1.6.0"
39.492386
115
0.64473
import os from unittest import mock from heliumcli import utils, settings from heliumcli.main import main from tests.helpers.commonhelper import given_config_exists from .helpers import testcase, commonhelper __author__ = "Alex Laird" __copyright__ = "Copyright 2018, Helium Edu" __version__ = "1.6.0" class TestActionsTestCase(testcase.HeliumCLITestCase): def test_update(self): # WHEN main(["main.py", "update"]) # THEN self.mock_subprocess_call.assert_called_once_with(["pip", "install", "--upgrade", "heliumcli"]) @mock.patch("os.path.exists", return_value=False) def test_update_clone_projects(self, mock_path_exists): # GIVEN given_config_exists() # WHEN main(["main.py", "update-projects"]) # THEN self.assertEqual(self.mock_git_repo.clone_from.call_count, 2) self.mock_subprocess_call.assert_any_call( ["make", "install", "-C", os.path.join(utils.get_projects_dir(), "platform")]) self.mock_subprocess_call.assert_any_call( ["make", "install", "-C", os.path.join(utils.get_projects_dir(), "frontend")]) @mock.patch("os.path.exists", return_value=True) def test_update_projects(self, mock_path_exists): # GIVEN utils._save_config(os.environ.get("HELIUMCLI_CONFIG_PATH"), settings.get_default_settings()) # WHEN main(["main.py", "update-projects"]) # THEN self.assertEqual(self.mock_git_repo.return_value.git.pull.call_count, 3) self.mock_subprocess_call.assert_any_call( ["make", "install", "-C", os.path.join(utils.get_projects_dir(), "platform")]) self.mock_subprocess_call.assert_any_call( ["make", "install", "-C", os.path.join(utils.get_projects_dir(), "frontend")]) @mock.patch("os.path.exists", return_value=True) def test_set_build(self, mock_path_exists): # GIVEN utils._save_config(os.environ.get("HELIUMCLI_CONFIG_PATH"), settings.get_default_settings()) # WHEN main(["main.py", "set-build", "1.2.3"]) # THEN self.mock_git_repo.return_value.git.checkout.assert_has_calls([mock.call("1.2.3"), mock.call("1.2.3")]) self.mock_subprocess_call.assert_any_call( ["make", "install", "-C", os.path.join(utils.get_projects_dir(), "platform")]) self.mock_subprocess_call.assert_any_call( ["make", "install", "-C", os.path.join(utils.get_projects_dir(), "frontend")]) def test_start_servers(self): # GIVEN commonhelper.given_runserver_exists("platform") # WHEN main(["main.py", "start-servers"]) # THEN self.mock_subprocess_popen.assert_called_once_with( os.path.join(utils.get_projects_dir(), "platform", utils.get_config(True)["serverBinFilename"])) def test_deploy_build(self): self.subprocess_popen.stop() # GIVEN commonhelper.given_hosts_file_exists() # WHEN main(["main.py", "deploy-build", "1.2.3", "devbox"]) # THEN self.assertEqual(self.mock_subprocess_call.call_count, 2) self.mock_subprocess_call.assert_any_call( ["ssh", "-t", "vagrant@heliumedu.test", utils.get_config(True)["hostProvisionCommand"]]) self.mock_subprocess_call.assert_any_call( ["ansible-playbook", "--inventory-file={}/hosts/devbox".format(utils.get_ansible_dir()), "-v", "--extra-vars", "build_version=1.2.3", "{}/{}.yml".format(utils.get_ansible_dir(), "devbox")]) self.subprocess_popen.start() def test_deploy_build_code_limit_hosts(self): # GIVEN given_config_exists() # WHEN main(["main.py", "deploy-build", "1.2.3", "devbox", "--code", "--roles", "host1,host2"]) # THEN self.mock_subprocess_call.assert_called_once_with( ["ansible-playbook", "--inventory-file={}/hosts/devbox".format(utils.get_ansible_dir()), "-v", "--extra-vars", "build_version=1.2.3", "--tags", "code", "--limit", "host1,host2", "{}/{}.yml".format(utils.get_ansible_dir(), "devbox")]) def test_deploy_build_all_tags(self): # GIVEN given_config_exists() # WHEN main(["main.py", "deploy-build", "1.2.3", "devbox", "--code", "--migrate", "--envvars", "--conf", "--ssl"]) # THEN self.mock_subprocess_call.assert_called_once_with( ["ansible-playbook", "--inventory-file={}/hosts/devbox".format(utils.get_ansible_dir()), "-v", "--extra-vars", "build_version=1.2.3", "--tags", "code,migrate,envvars,conf,ssl", "{}/{}.yml".format(utils.get_ansible_dir(), "devbox")]) def test_prep_code(self): # GIVEN commonhelper.given_python_version_file_exists("1.2.3") versioned_file1_path = commonhelper.given_project_python_versioned_file_exists("platform") versioned_file2_path = commonhelper.given_project_js_versioned_file_exists("frontend") repo_instance = self.mock_git_repo.return_value latest_tag = repo_instance.tags[-1] latest_tag.commit = mock.MagicMock("git.commit.Commit") diff1 = mock.MagicMock("git.diff.Diff") diff1.b_rawpath = versioned_file1_path.encode("utf-8") diff2 = mock.MagicMock("git.diff.Diff") diff2.b_rawpath = versioned_file2_path.encode("utf-8") latest_tag.commit.diff = mock.MagicMock(side_effect=[[diff1], [diff2]]) # WHEN main(["main.py", "prep-code"]) # THEN commonhelper.verify_versioned_file_updated(self, versioned_file1_path, "1.2.3") commonhelper.verify_versioned_file_updated(self, versioned_file2_path, "1.2.3") def test_build_release(self): # GIVEN version_file_path = commonhelper.given_python_version_file_exists() package_file_path = commonhelper.given_project_package_json_exists("frontend") versioned_file_path = commonhelper.given_project_python_versioned_file_exists("platform") repo_instance = self.mock_git_repo.return_value repo_instance.untracked_files = [] repo_instance.is_dirty = mock.MagicMock(side_effect=[False, False, True, True]) latest_tag = repo_instance.tags[-1] latest_tag.commit = mock.MagicMock("git.commit.Commit") diff1 = mock.MagicMock("git.diff.Diff") diff1.b_rawpath = versioned_file_path.encode("utf-8") latest_tag.commit.diff = mock.MagicMock(side_effect=[[diff1], []]) # WHEN main(["main.py", "build-release", "1.2.3"]) # THEN self.assertEqual(self.mock_git_repo.return_value.create_tag.call_count, 2) self.assertEqual(self.mock_git_repo.return_value.git.commit.call_count, 2) commonhelper.verify_versioned_file_updated(self, version_file_path, "1.2.3") commonhelper.verify_versioned_file_updated(self, package_file_path, "1.2.3") commonhelper.verify_versioned_file_updated(self, versioned_file_path, "1.2.3") def test_list_builds(self): # GIVEN given_config_exists() # WHEN main(["main.py", "--silent", "list-builds"]) def test_build_release_fails_when_dirty(self): # GIVEN repo_instance = self.mock_git_repo.return_value repo_instance.is_dirty = mock.MagicMock(return_value=True) given_config_exists() # WHEN main(["main.py", "build-release", "1.2.3"]) # THEN self.mock_git_repo.return_value.create_tag.assert_not_called() self.mock_git_repo.return_value.git.commit.assert_not_called()
6,936
516
23
69cac34efbadf6fd73233a4b23e60acd1487b619
168
py
Python
tests/test_bi.py
kvh/dags-bi
2e0877b19fb0738ba384b798ad1c5c33c4b3111e
[ "BSD-3-Clause" ]
1
2020-12-22T23:22:10.000Z
2020-12-22T23:22:10.000Z
tests/test_bi.py
kvh/dags-bi
2e0877b19fb0738ba384b798ad1c5c33c4b3111e
[ "BSD-3-Clause" ]
null
null
null
tests/test_bi.py
kvh/dags-bi
2e0877b19fb0738ba384b798ad1c5c33c4b3111e
[ "BSD-3-Clause" ]
1
2021-07-11T10:07:16.000Z
2021-07-11T10:07:16.000Z
from loguru import logger logger.enable("snapflow") if __name__ == "__main__": test()
12
40
0.672619
from loguru import logger logger.enable("snapflow") def test(): from snapflow_bi import module as bi bi.run_tests() if __name__ == "__main__": test()
51
0
23
3ca576d08c7f3e8c7d9ea7df90eeae962a62ad25
3,045
py
Python
ixian_docker/modules/docker/utils/client.py
kreneskyp/ixian-docker
ce7a6cee2f961b8446dc3d9429a809ab5a235467
[ "Apache-2.0" ]
null
null
null
ixian_docker/modules/docker/utils/client.py
kreneskyp/ixian-docker
ce7a6cee2f961b8446dc3d9429a809ab5a235467
[ "Apache-2.0" ]
null
null
null
ixian_docker/modules/docker/utils/client.py
kreneskyp/ixian-docker
ce7a6cee2f961b8446dc3d9429a809ab5a235467
[ "Apache-2.0" ]
null
null
null
# Copyright [2018-2020] Peter Krenesky # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import logging import boto3 import docker from ixian.config import CONFIG from ixian.utils.decorators import cached_property logger = logging.getLogger(__name__) # Global cache of registries that are created. DOCKER_REGISTRIES = {} class UnknownRegistry(Exception): """Exception raised when registry is not configured""" pass
29
94
0.663054
# Copyright [2018-2020] Peter Krenesky # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import logging import boto3 import docker from ixian.config import CONFIG from ixian.utils.decorators import cached_property logger = logging.getLogger(__name__) # Global cache of registries that are created. DOCKER_REGISTRIES = {} def docker_client(): return docker.from_env() class UnknownRegistry(Exception): """Exception raised when registry is not configured""" pass class DockerClient: def __init__(self, registry, **options): self.registry = registry self.options = options @classmethod def for_registry(cls, registry): try: return DOCKER_REGISTRIES[registry] except KeyError: pass # Instantiate client for registry try: config = CONFIG.DOCKER.REGISTRIES[registry] except KeyError: logger.warn(f"Registry missing from DOCKER.REGISTRIES: {registry}") raise UnknownRegistry(registry) Client = config["client"] instance = Client(registry, **config.get("options", {})) DOCKER_REGISTRIES[registry] = instance return instance @cached_property def client(self): return docker_client() def login(self): # authenticate username = self.options.get("username", None) password = self.options.get("password", None) if not username: raise KeyError(f"Cannot login to {self.registry}, username not found in options.") if not password: raise KeyError(f"Cannot login to {self.registry}, password not found in options.") self.client.login(username, password, "", registry=self.registry) class ECRDockerClient(DockerClient): @cached_property def ecr_client(self): kwargs = dict(region_name="us-west-2") kwargs.update(self.options) return boto3.client("ecr", **kwargs) def login(self): # fetch credentials from ECR logger.debug( "Authenticating with ECR: {}".format(self.options.get("region_name", "us-west-2")) ) token = self.ecr_client.get_authorization_token() username, password = ( base64.b64decode(token["authorizationData"][0]["authorizationToken"]) .decode() .split(":") ) registry = token["authorizationData"][0]["proxyEndpoint"] # authenticate self.client.login(username, password, "", registry=registry)
1,798
232
69
5ebe2bc6d18e0027e8dc40e731b3338063ebca8f
1,011
py
Python
api/routers.py
V-Holodov/pets_accounting
300cb8748124b6f767e85404ee372b93b097098c
[ "MIT" ]
null
null
null
api/routers.py
V-Holodov/pets_accounting
300cb8748124b6f767e85404ee372b93b097098c
[ "MIT" ]
1
2021-12-22T14:08:37.000Z
2021-12-22T14:08:37.000Z
api/routers.py
V-Holodov/pets_accounting
300cb8748124b6f767e85404ee372b93b097098c
[ "MIT" ]
1
2021-12-24T11:50:26.000Z
2021-12-24T11:50:26.000Z
from rest_framework.routers import DefaultRouter, DynamicRoute, Route class CustomBulkDeleteRouter(DefaultRouter): """ A custom URL router for the Product API that correctly routes DELETE requests with multiple query parameters. """ routes = [ Route( url=r"^{prefix}$", mapping={"get": "list", "post": "create", "delete": "destroy"}, name="{basename}-list", detail=False, initkwargs={"suffix": "List"}, ), Route( url=r"^{prefix}/{lookup}$", mapping={ "get": "retrieve", "put": "update", "patch": "partial_update", }, name="{basename}-detail", detail=True, initkwargs={"suffix": "Detail"}, ), DynamicRoute( url=r"^{prefix}/{lookup}/{url_path}$", name="{basename}-{url_name}", detail=True, initkwargs={}, ), ]
28.083333
75
0.485658
from rest_framework.routers import DefaultRouter, DynamicRoute, Route class CustomBulkDeleteRouter(DefaultRouter): """ A custom URL router for the Product API that correctly routes DELETE requests with multiple query parameters. """ routes = [ Route( url=r"^{prefix}$", mapping={"get": "list", "post": "create", "delete": "destroy"}, name="{basename}-list", detail=False, initkwargs={"suffix": "List"}, ), Route( url=r"^{prefix}/{lookup}$", mapping={ "get": "retrieve", "put": "update", "patch": "partial_update", }, name="{basename}-detail", detail=True, initkwargs={"suffix": "Detail"}, ), DynamicRoute( url=r"^{prefix}/{lookup}/{url_path}$", name="{basename}-{url_name}", detail=True, initkwargs={}, ), ]
0
0
0
ff6285e126baa8fdebcfba48c1625e8679c9397b
229
py
Python
mysite/annotatorapp/forms.py
hareeshbabu82ns/smart-sanskrit-annotator
763e1dd067107334761ba66796a559c18c2a2807
[ "MIT" ]
7
2019-04-15T14:33:21.000Z
2021-12-20T10:04:30.000Z
mysite/annotatorapp/forms.py
hareeshbabu82ns/smart-sanskrit-annotator
763e1dd067107334761ba66796a559c18c2a2807
[ "MIT" ]
4
2020-07-05T17:46:45.000Z
2021-12-11T16:46:40.000Z
mysite/annotatorapp/forms.py
hareeshbabu82ns/smart-sanskrit-annotator
763e1dd067107334761ba66796a559c18c2a2807
[ "MIT" ]
6
2018-07-16T10:42:52.000Z
2021-09-26T06:54:55.000Z
#landing page inputs taken as a form input. from django import forms from . import models
22.9
45
0.759825
#landing page inputs taken as a form input. from django import forms from . import models class inputlineform(forms.Form): model = forms line = forms.CharField(max_length = 100) linetype = forms.CharField(max_length = 100)
0
114
23
ecd4753d62aad3a33f16e3c8b73b945e530c8b87
95
py
Python
upgrade_tests/conftest.py
dineshjoshi/cassandra-dtest-1
18ccbb4d308c27b67a8d81a2c849dc27fc3e2b5c
[ "Apache-2.0" ]
44
2017-07-13T14:20:42.000Z
2022-03-27T23:55:27.000Z
upgrade_tests/conftest.py
dineshjoshi/cassandra-dtest-1
18ccbb4d308c27b67a8d81a2c849dc27fc3e2b5c
[ "Apache-2.0" ]
64
2017-07-26T16:06:01.000Z
2022-03-17T22:57:03.000Z
upgrade_tests/conftest.py
dineshjoshi/cassandra-dtest-1
18ccbb4d308c27b67a8d81a2c849dc27fc3e2b5c
[ "Apache-2.0" ]
105
2017-07-13T14:28:14.000Z
2022-03-23T04:22:46.000Z
from .upgrade_manifest import set_config
19
40
0.810526
from .upgrade_manifest import set_config def pytest_configure(config): set_config(config)
31
0
23
fdf35f4f013b36b2c85286b3ae07725d2a4be6ed
1,923
py
Python
curation_portal/views/project_variants.py
macarthur-lab/variant-curation-portal
bd1e3afd609924fde727116525a00e67d433f1da
[ "MIT" ]
1
2021-03-26T19:44:19.000Z
2021-03-26T19:44:19.000Z
curation_portal/views/project_variants.py
macarthur-lab/variant-curation-portal
bd1e3afd609924fde727116525a00e67d433f1da
[ "MIT" ]
50
2019-08-23T12:07:11.000Z
2022-02-26T16:45:08.000Z
curation_portal/views/project_variants.py
macarthur-lab/variant-curation-portal
bd1e3afd609924fde727116525a00e67d433f1da
[ "MIT" ]
null
null
null
from django.db import transaction from rest_framework.exceptions import NotFound, PermissionDenied from rest_framework.generics import get_object_or_404 from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.serializers import ModelSerializer from rest_framework.views import APIView from curation_portal.models import Project, Variant from curation_portal.serializers import VariantSerializer as UploadedVariantSerializer
34.339286
88
0.716069
from django.db import transaction from rest_framework.exceptions import NotFound, PermissionDenied from rest_framework.generics import get_object_or_404 from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.serializers import ModelSerializer from rest_framework.views import APIView from curation_portal.models import Project, Variant from curation_portal.serializers import VariantSerializer as UploadedVariantSerializer class VariantSerializer(ModelSerializer): class Meta: model = Variant fields = ("variant_id",) class ProjectVariantsView(APIView): permission_classes = (IsAuthenticated,) def get_project(self): project = get_object_or_404(Project, id=self.kwargs["project_id"]) if not self.request.user.has_perm("curation_portal.change_project", project): if not self.request.user.has_perm("curation_portal.view_project", project): raise NotFound raise PermissionDenied return project def get(self, request, *args, **kwargs): # pylint: disable=unused-argument project = self.get_project() variants = project.variants.all() serializer = VariantSerializer(variants, many=True) return Response({"variants": serializer.data}) def post(self, request, *args, **kwargs): # pylint: disable=unused-argument project = self.get_project() if not request.user.has_perm("curation_portal.add_variant_to_project", project): raise PermissionDenied serializer = UploadedVariantSerializer( data=request.data, context={"project": project}, many=True ) serializer.is_valid(raise_exception=True) with transaction.atomic(): serializer.save() project.save() # Save project to set updated_at timestamp return Response({})
1,154
232
46
7013918c81c6bad76dfdcdb70889dc3508a62702
328
py
Python
URI/1 - INICIANTE/Python/1182 - ColunaNaMatriz.py
william-james-pj/LogicaProgramacao
629f746e34da2e829dc7ea2e489ac36bb1b1fb13
[ "MIT" ]
1
2020-04-14T16:48:16.000Z
2020-04-14T16:48:16.000Z
URI/1 - INICIANTE/Python/1182 - ColunaNaMatriz.py
william-james-pj/LogicaProgramacao
629f746e34da2e829dc7ea2e489ac36bb1b1fb13
[ "MIT" ]
null
null
null
URI/1 - INICIANTE/Python/1182 - ColunaNaMatriz.py
william-james-pj/LogicaProgramacao
629f746e34da2e829dc7ea2e489ac36bb1b1fb13
[ "MIT" ]
null
null
null
l = int(input()) operacao = input() matriz = [] soma = 0.0 for x in range(0, 12): linha = [] for y in range(0, 12): linha.append(float(input())) matriz.append(linha) for x in range(0, 12): soma += matriz[x][l] if operacao == 'S': print('{:.1f}'.format(soma)) else: print('{:.1f}'.format(soma/12))
21.866667
36
0.554878
l = int(input()) operacao = input() matriz = [] soma = 0.0 for x in range(0, 12): linha = [] for y in range(0, 12): linha.append(float(input())) matriz.append(linha) for x in range(0, 12): soma += matriz[x][l] if operacao == 'S': print('{:.1f}'.format(soma)) else: print('{:.1f}'.format(soma/12))
0
0
0
fb5ab216047bdc84770b95501d224e4d5a3c6122
20,802
py
Python
src/data/readers/load_hrit.py
gridl/kcl-fire-aot
2833df64b289f5ef02b8b93c3c620d0ba702f5b5
[ "MIT" ]
null
null
null
src/data/readers/load_hrit.py
gridl/kcl-fire-aot
2833df64b289f5ef02b8b93c3c620d0ba702f5b5
[ "MIT" ]
null
null
null
src/data/readers/load_hrit.py
gridl/kcl-fire-aot
2833df64b289f5ef02b8b93c3c620d0ba702f5b5
[ "MIT" ]
1
2021-03-26T02:28:33.000Z
2021-03-26T02:28:33.000Z
# open Himawari-8 standard format # output data dictionary ready to put in frp_pixel.py # the struction of the dictionary should be like this # ['ir39', 'ir12', 'saa', 'ir11', 'cloudfree', 'lat', # 'ir11rad', 'diff', 'sun_glint', 'ACQTIME', 'vza', # 'vaa', 'lon', 'cmask', 'CMa_TEST', 'pixsize', 'szen', # 'tirradratio', 'infos', 'redrad', 'visradratio', # 'tcwv', 'ir39rad', 'lcov'] import os import datetime import struct import numpy as np import scipy.ndimage from subprocess import call import bz2file as h8_bz2 import sys def sunglint(vzen, vaz, szen, saz): """ all the input in degrees calculation from Prins et al. enhanced fired detection 1998 """ vzen_r = np.radians(vzen) vaz_r = np.radians(vaz) szen_r = np.radians(szen) saz_r = np.radians(saz) raz_r = vaz_r - saz_r G = np.cos(vzen_r) * np.cos(szen_r) - np.sin(vzen_r) * np.sin(szen_r) * np.cos(raz_r) sun_glint = np.degrees(np.arccos(G)) return sun_glint def cloud_mask(data): """ A simple cloud masking for Himawari8 for fire detection """ # threshold for Albedo vis_day = 0.1 bt10_day = 290. bt10_day_vz = 290 bt4_ni = 272. bt10_ni = 268. bt11_ni = 268. Diffthresh_day = 15.0 Diffthresh_ni = 10.0 Diff2thresh = 13.7 data['cmask'] = np.zeros(data['ir39'].shape, dtype=np.int8) - 1 # work in the satellite visible area and the land mask = ((data['vza'] > 0.0) & (data['lcov'] < 20)) data['cmask'][mask] = 0. # check if day or night - day <70.degrees and night gt 70.degrees # work on day time first day = ((data['szen'] < 75.) & (data['szen'] > 0.0) & \ (data['lcov'] < 20) & (data['vza'] > 0.0)) # visible band vis_thresh = ((data['vis'] > vis_day) & (data['ir11'] < bt10_day) & \ (data['cmask'] < 1) & (day > 0)) data['cmask'][vis_thresh] = 1 # 10mincron threshold tir_thresh = ((data['ir11'] < bt10_day) & (data['cmask'] < 1) & (day > 0)) data['cmask'][tir_thresh] = 2 # his is the mid infrared temperature threshold used here for cloud detected # bt4_th = -0.35 * data['szen'] + 300 diff = data['ir39'] - data['ir11'] # 10mincron and 3.9um difference threshold dif_thresh = ((data['vis'] > vis_day / 3) & (diff > Diffthresh_day) & (data['ir11'] < bt10_day_vz) & \ (data['cmask'] < 1) & (day > 0)) data['cmask'][dif_thresh] = 5 # night time night = ((data['szen'] > 90.0) & (data['vza'] > 0.) & (data['lcov'] < 20)) # MIR # mir_thresh = ((data['ir39'] < bt4_ni) & (np.abs(diff) > 2) & (night > 0)) # data['cmask'][mir_thresh] = 7 # 10mincron tir_thresh = ((data['ir11'] < bt10_ni) & (np.abs(diff) > 4) & (night > 0)) data['cmask'][tir_thresh] = 8 # 10mincron and 3.9um difference threshold dif_thresh = ((diff > Diffthresh_ni) & (data['ir39'] < 275) & (night > 0)) data['cmask'][dif_thresh] = 5 # twilight time twilight = ((data['szen'] >= 75.0) & (data['szen'] < 90.0) & (data['vza'] > 0.) & (data['lcov'] < 20)) vis_thresh = ((data['vis'] > vis_day / 4) & (data['cmask'] < 1) & (twilight > 0)) data['cmask'][vis_thresh] = 1 # sun glint affest area glint = (data['sun_glint'] < 20.0) vis_thresh = ((data['vis'] > vis_day / 4) & (data['cmask'] < 1) & (glint > 0)) data['cmask'][vis_thresh] = 1 # this is clear sky data['cloudfree'] = (data['cmask'] < 1.0) & (data['cmask'] > -1.0) return data def water_mask(data): """ A simple water masking for Himawari8 for fire georeference land: 1, water: 0, background:-1 """ # threshold for Albedo sir_day = 0.05 data['wmask'] = np.zeros(data['ir39'].shape, dtype=np.int8) - 1 # work in the satellite visible area and the land mask = data['vza'] > 0.0 data['wmask'][mask] = 0. # check if day or night - day <70.degrees and night gt 70.degrees # work on day time first day = ((data['szen'] < 80.) & (data['szen'] > 0.0) & (data['vza'] > 0.0)) # sir band sir_thresh = ((data['sir'] > sir_day) & (data['wmask'] < 1) & \ (data['cmask'] < 1) & (day > 0) & (data['diff'] < 20)) data['wmask'][sir_thresh] = 1 return data def geo_read(f, verbose=False): """ read in the static data like view angle, landcover put them in a data dictionary """ dim = 5500 # hard coded for Himawari8 possible it is 5500 in which case we need to zoom if verbose: print 'reading file %s' % f dtype = np.float32 shape = (2, dim, dim) data = np.fromfile(f, dtype=dtype).reshape(shape) lat = data[0, :, :].astype(dtype) lon = data[1, :, :].astype(dtype) return lat, lon def static_read(file_dict, verbose=False): """ read in the static data like view angle, landcover put them in a data dictionary """ d = {} dim = 5500 # hard coded for Himawari8 for key in file_dict.keys(): file = file_dict[key][0][0] if verbose: print 'file path %s' % key print 'reading file %s' % file if key == 'landcover_path': dtype = np.int8 shape = (dim, dim) data = np.fromfile(file, dtype=dtype).reshape(shape) data_key = file_dict[key][1] d[data_key] = data.astype(dtype) elif key == 'fixed_position_path': dtype = np.float32 shape = (dim, dim) data = np.fromfile(file, dtype=dtype).reshape(shape) data_key = file_dict[key][1] d[data_key] = data.astype(dtype) else: dtype = np.float32 shape = (2, dim, dim) data = np.fromfile(file, dtype=dtype).reshape(shape) data_key = file_dict[key][1] d[data_key] = data[0, :, :].astype(dtype) data_key = file_dict[key][2] d[data_key] = data[1, :, :].astype(dtype) # pixel size d['pixsize'] = ((2.0 ** 2) * 1000.0 ** 2) * (1 / np.cos(np.radians(d['vza']))) # # adjust sampled area based on blocks used # for k in d.keys(): # d[k] = d[k][start_pix:stop_pix, :] return d def sun_angles(lat, lon, time_key): """ input: lat, np array; lon, np array time_key, string YYYYMMDDHHMM format like 201501031100 output:szen, sun zenith angle saa, sun azimuth angle """ # Define internal constants used for conversion EQTIME1 = 229.18 EQTIME2 = 0.000075 EQTIME3 = 0.001868 EQTIME4 = 0.032077 EQTIME5 = 0.014615 EQTIME6 = 0.040849 DECL1 = 0.006918 DECL2 = 0.399912 DECL3 = 0.070257 DECL4 = 0.006758 DECL5 = 0.000907 DECL6 = 0.002697 DECL7 = 0.00148 # Evaluate the input lat and lon in radians RadLat = np.radians(lat) dt = datetime.datetime.strptime(time_key, '%Y%m%d%H%M') # get the days in the year, normal year:365; leap year:366 d1 = datetime.datetime(dt.year, 1, 1) d2 = datetime.datetime(dt.year + 1, 1, 1) days_in_year = (d2 - d1).days # Evaluate the fractional year in radians # dt.hour-12 because gamma start from local noon time gamma = 2 * np.pi * (dt.timetuple().tm_yday - 1 + \ (dt.hour - 12) / 24.0) / days_in_year # Evaluate the Equation of time in minutes eqtime = EQTIME1 * (EQTIME2 + EQTIME3 * np.cos(gamma) - \ EQTIME4 * np.sin(gamma) - EQTIME5 * np.cos(2 * gamma) - \ 0.040849 * np.sin(2 * gamma)) # Time offset in minutes time_offset = eqtime + 4.0 * lon # local solar time in minutes true_solar_time = dt.hour * 60 + dt.minute + dt.second / 60 + time_offset # Solar hour angle in degrees and in radians HaRad = np.radians((true_solar_time / 4.) - 180.) # Evaluate the solar declination angle in radians Decli = DECL1 - DECL2 * np.cos(gamma) + DECL3 * np.sin(gamma) - \ DECL4 * np.cos(2 * gamma) + DECL5 * np.sin(2 * gamma) - \ DECL6 * np.cos(3 * gamma) + DECL7 * np.sin(3 * gamma) # Evaluate the Solar local Coordinates CosZen = (np.sin(RadLat) * np.sin(Decli) + \ np.cos(RadLat) * np.cos(Decli) * np.cos(HaRad)) TmpZenRad = np.arccos(CosZen) szen = np.degrees(TmpZenRad) CosAzi = -((np.sin(RadLat) * np.cos(TmpZenRad) - np.sin(Decli)) / \ (np.cos(RadLat) * np.sin(TmpZenRad))) saa = 360. - np.degrees(np.arccos(CosAzi)) # Correct for Time < 12.00 ( -> in range 0 . 180 ) saa[(true_solar_time < 720)] = 360. - saa[(true_solar_time < 720)] # in minutes 12 *60 return (szen, saa) def rebin(a, newshape): '''Rebin an array to a new shape. ''' assert len(a.shape) == len(newshape) slices = [slice(0, old, float(old) / new) for old, new in zip(a.shape, newshape)] coordinates = np.mgrid[slices] indices = coordinates.astype('i') # choose the biggest smaller integer index return a[tuple(indices)] def H8_file_read(file, verbose=False): # type: (object, object) -> object ''' read in a single Himawari8 file. ''' if not os.path.exists(file): print 'can not read %s' % file fileExtension = os.path.splitext(file)[1] if fileExtension in '.bz2': fh = h8_bz2.BZ2File(file, 'rb') else: fh = open(file, 'rb') # doit = call(["bunzip2", file]) # if doit < 1: # file = file[:-4] # else: # print 'can not unzip ', file # Read in the head blocks #print "processing %s" % file total_len = 0 # read in the file as binary see python struct for help for bb in xrange(11): # for Block 1 fh.seek(total_len) Block_no = struct.unpack('b', fh.read(1))[0] if verbose: print 'Reading block %s' % Block_no fh.seek(total_len + 1) Block_len = struct.unpack('h', fh.read(2))[0] if verbose: print 'The length of block %s is %s' % (Block_no, Block_len) # from block 2 read in number of samps and lines if Block_no == 2: fh.seek(total_len + 5) samps = struct.unpack('h', fh.read(2))[0] fh.seek(total_len + 7) lines = struct.unpack('h', fh.read(2))[0] # from block 3 read projection information elif Block_no == 3: fh.seek(total_len + 3) sub_lon = struct.unpack('d', fh.read(8))[0] #print 'central longitude %r' % sub_lon fh.seek(total_len + 11) CFAC = struct.unpack('I', fh.read(4))[0] fh.seek(total_len + 15) LFAC = struct.unpack('I', fh.read(4))[0] fh.seek(total_len + 19) COFF = struct.unpack('f', fh.read(4))[0] fh.seek(total_len + 23) LOFF = struct.unpack('f', fh.read(4))[0] fh.seek(total_len + 27) # Information about satellite height, earth equatorial radius # more infor can be found on page 16 of # Himawari_D_users_guide_en Proj_info = struct.unpack('ddddddd', fh.read(8 * 7))[:] elif Block_no == 4: fh.seek(total_len + 3) Nav_info = struct.unpack('dddddddd', fh.read(8 * 8))[:] elif Block_no == 5: fh.seek(total_len + 3) Band_no = struct.unpack('h', fh.read(2))[0] fh.seek(total_len + 5) central_wave = struct.unpack('d', fh.read(8))[0] fh.seek(total_len + 19) Cal_info = struct.unpack('ddddddddddd', fh.read(8 * 11))[:] # Change the block length for next block total_len += Block_len if verbose: print 'Total header length %s' % total_len # Now read in image data fh.seek(total_len) dtype = 'u2' shape = [lines, samps] size = np.dtype(dtype).itemsize * samps * lines data = fh.read() data = np.frombuffer(data[:size], dtype).reshape(shape) fh.close() fileExtension = os.path.splitext(file)[1] # if fileExtension in '.DAT': # call(["bzip2", file]) if verbose: print 'slope: %f, offset: %f for radiance' % (Cal_info[0], Cal_info[1]) radiance = data * Cal_info[0] + Cal_info[1] # for infrared bands if Band_no > 6: # for Planck temperature speed_of_light = Cal_info[8] planck_constant = Cal_info[9] boltzmann_constant = Cal_info[10] # radiance = 2.5 # central_wave = 4 c1 = 2.0 * planck_constant * speed_of_light * speed_of_light c2 = planck_constant * speed_of_light / boltzmann_constant # -- Derived constant scaling factors for: # c1: W.m2 to W/(m2.um-4) => multiplier of 1.0e+24 is required. # c2: K.m to K.um a=> multiplier of 1.0e+06 is required. c1_scale = 1.0e+24 c2_scale = 1.0e+06 # -- Calculate wavelength dependent "constants" fk1 = c1_scale * c1 / (central_wave ** 5) fk2 = c2_scale * c2 / central_wave logarithm = np.log((fk1 / (radiance) + 1.0)) temperature = fk2 / logarithm BT = Cal_info[2] + Cal_info[3] * temperature + Cal_info[4] * \ temperature * temperature else: # Lets not resample the himawari data here. # if samps > 12000: # hard coded should find a better way later # # Resampled by a factor of 0.25 with bilinear interpolation # # sub = radiance[4300*4:4700*4,2500*4:2900*4] # # d['vis_full'] = sub # radiance = rebin(radiance, (lines / 4, samps / 4)) # elif samps > 5500: # radiance = rebin(radiance, (lines / 2, samps / 2)) # for visible band this is Albedo BT = radiance * Cal_info[2] return (radiance, BT) def Himawari_read(file_dict, verbose=False): """ Read the Himawari-8 channels for fire detection, we only need red, MIR and TIR input: file dictionary like {'red_path' : 'HS_H08_20150109_0600_B03_FLDK_R20_S0101.DAT', 'mir_path' : 'HS_H08_20150109_0600_B07_FLDK_R20_S0101.DAT', 'tir_path' : 'HS_H08_20150109_0600_B14_FLDK_R20_S0101.DAT'} output: date dictionary like {'mir_BT' : np.array(5500,55500)} reference: Himawari_D_users_guide_en from http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/hsd_sample/HS_D_users_guide_en_v11.pdf """ d = {} for key in file_dict.keys(): files = file_dict[key][0] files.sort() rad_data_list = [] BT_data_list = [] for file in files: radiance, BT = H8_file_read(file) rad_data_list.append(radiance) BT_data_list.append(BT) radiance = np.vstack(rad_data_list) BT = np.vstack(BT_data_list) data_key = file_dict[key][1] d[data_key] = BT.astype(np.float32) data_key = file_dict[key][2] d[data_key] = radiance.astype(np.float32) return d def get_path(root, band, time_key=None, path_tree=None): """Finds path for given time key and data band time_key: 201501081300, YYYYMMDDHHMM band: B07,MIR (3.9um), B14,TIR (11um), B03,red (0.6um) sometime B03 has both 500m and 2km resolution files variable: BT, brightness temperature; Radiance for static data return path path_tree: HSFD, the original japan FTP path like:# 201501/09/201501090000/00/B03 else,weidong own path tree like 201501090000 """ if time_key is not None: # EO realtime date # separate the date and time from the time_key dt_time_key = datetime.datetime.strptime(time_key, '%Y%m%d%H%M') dt_date = dt_time_key.strftime('%Y%m%d') dt_time = dt_time_key.strftime('%H%M') # keys = [dt_date, dt_time, band] # Realtime EO channels if path_tree in ['HSFD']: root = os.path.join(root, dt_time_key.strftime('%Y%m'), dt_time_key.strftime('%d'), dt_time_key.strftime('%Y%m%d%H') + '00', dt_time_key.strftime('%M'), band + '/') # 500m resolution data band_vis_05 = band + "_FLDK_R05_S" # 2km resolution data band = band + "_FLDK_R20_S" else: root = root + time_key + "/" band_vis_05 = band else: root = os.path.join(root, "lcov/") band_vis_05 = band #print "root: %s" % root #print "band: %s" % band # now iterate over root path if os.path.exists(root): filepath = [] filepath1 = [] for f in os.listdir(root): if band in f: file_size = os.path.getsize(root + f) if file_size > 10000: filepath.append(root + f) elif band_vis_05 in f: filepath1.append(root + f) else: continue if len(filepath) < 1: filepath = filepath1 # return # if filepath1 is not None: # return filepath1 # else: return filepath else: print root, 'does not exists' sys.exit def paths(root, time_key=None, path_tree=None, mode=0): """Constructs a dictionary for the file paths """ # path dictionary construted from here d = {} if time_key is not None: # EO realtime date # for fire detection model if mode == 0: d["red_path"] = [get_path(root, "B03", \ time_key=time_key, path_tree=path_tree), 'vis', 'redrad'] # d["nir_path"] = [get_path(root, "B04", \ # time_key=time_key,path_tree=path_tree), 'nir', 'nirrad'] # d["sir_path"] = [get_path(root, "B06", \ # time_key=time_key, path_tree=path_tree), 'sir', 'sirrad'] d["tir86_path"] = [get_path(root, "B11", \ time_key=time_key, path_tree=path_tree), 'ir86', 'ir86rad'] d["mir_path"] = [get_path(root, "B07", \ time_key=time_key, path_tree=path_tree), 'ir39', 'ir39rad'] d["tir11_path"] = [get_path(root, "B14", \ time_key=time_key, path_tree=path_tree), 'ir11', 'ir11rad'] else: d["latlon_path"] = [get_path(root, "lat_lon.img"), 'lat', 'lon'] d["sat_view_angle_path"] = [get_path(root, "vza_vaa.img"), 'vza', 'vaa'] d["landcover_path"] = [get_path(root, "lcov.img"), 'lcov'] # d["fixed_position_path"] = [get_path(root, "H8_tir_201501090620.img"),'fpos'] return d def load_h8(in_root, time_key, path_tree=None, mode=0): """ load all the data and put them in a dictionary """ # firstly setup the path dictionary EO_path_dict = paths(in_root, time_key=time_key, path_tree=path_tree, mode=0) # readin all the Himawari files here EO_data = Himawari_read(EO_path_dict) # construt a static data dictionary static_path_dict = paths(in_root) # readin all static data here static_data = static_read(static_path_dict) # get the sun angle szen, saa = sun_angles(static_data['lat'], static_data['lon'], time_key) # get the sun glint angle sun_glint = sunglint(static_data['vza'], static_data['vaa'], szen, saa) # combine EO and static data together EO_data.update(static_data) EO_data['szen'] = szen EO_data['sun_glint'] = sun_glint EO_data['ACQTIME'] = np.zeros(EO_data['ir39'].shape, dtype=np.int8) # for fire detection EO_data['diff'] = EO_data['ir39'] - EO_data['ir11'] EO_data['tirradratio'] = EO_data['ir39rad'] / EO_data['ir11rad'] EO_data['visradratio'] = EO_data['ir39rad'] / EO_data['redrad'] # d['ndvi'] = (d['nir'] - d['vis']) / (d['nir'] + d['vis']) # correct the navigation problem dt_time_key = datetime.datetime.strptime(time_key, '%Y%m%d%H%M') dt_time = int(dt_time_key.strftime('%H')) # if dt_time < 11: # doit = img_move(EO_data) # do the cloud masking data = cloud_mask(EO_data) # data = water_mask(EO_data) return data if __name__ == '__main__': in_root = '/Volumes/INTENSO/him_downlaod' # root for the output files time_key = "201507060000" data = load_h8(in_root, time_key, path_tree="HSFD")
36.430823
106
0.571484
# open Himawari-8 standard format # output data dictionary ready to put in frp_pixel.py # the struction of the dictionary should be like this # ['ir39', 'ir12', 'saa', 'ir11', 'cloudfree', 'lat', # 'ir11rad', 'diff', 'sun_glint', 'ACQTIME', 'vza', # 'vaa', 'lon', 'cmask', 'CMa_TEST', 'pixsize', 'szen', # 'tirradratio', 'infos', 'redrad', 'visradratio', # 'tcwv', 'ir39rad', 'lcov'] import os import datetime import struct import numpy as np import scipy.ndimage from subprocess import call import bz2file as h8_bz2 import sys def sunglint(vzen, vaz, szen, saz): """ all the input in degrees calculation from Prins et al. enhanced fired detection 1998 """ vzen_r = np.radians(vzen) vaz_r = np.radians(vaz) szen_r = np.radians(szen) saz_r = np.radians(saz) raz_r = vaz_r - saz_r G = np.cos(vzen_r) * np.cos(szen_r) - np.sin(vzen_r) * np.sin(szen_r) * np.cos(raz_r) sun_glint = np.degrees(np.arccos(G)) return sun_glint def cloud_mask(data): """ A simple cloud masking for Himawari8 for fire detection """ # threshold for Albedo vis_day = 0.1 bt10_day = 290. bt10_day_vz = 290 bt4_ni = 272. bt10_ni = 268. bt11_ni = 268. Diffthresh_day = 15.0 Diffthresh_ni = 10.0 Diff2thresh = 13.7 data['cmask'] = np.zeros(data['ir39'].shape, dtype=np.int8) - 1 # work in the satellite visible area and the land mask = ((data['vza'] > 0.0) & (data['lcov'] < 20)) data['cmask'][mask] = 0. # check if day or night - day <70.degrees and night gt 70.degrees # work on day time first day = ((data['szen'] < 75.) & (data['szen'] > 0.0) & \ (data['lcov'] < 20) & (data['vza'] > 0.0)) # visible band vis_thresh = ((data['vis'] > vis_day) & (data['ir11'] < bt10_day) & \ (data['cmask'] < 1) & (day > 0)) data['cmask'][vis_thresh] = 1 # 10mincron threshold tir_thresh = ((data['ir11'] < bt10_day) & (data['cmask'] < 1) & (day > 0)) data['cmask'][tir_thresh] = 2 # his is the mid infrared temperature threshold used here for cloud detected # bt4_th = -0.35 * data['szen'] + 300 diff = data['ir39'] - data['ir11'] # 10mincron and 3.9um difference threshold dif_thresh = ((data['vis'] > vis_day / 3) & (diff > Diffthresh_day) & (data['ir11'] < bt10_day_vz) & \ (data['cmask'] < 1) & (day > 0)) data['cmask'][dif_thresh] = 5 # night time night = ((data['szen'] > 90.0) & (data['vza'] > 0.) & (data['lcov'] < 20)) # MIR # mir_thresh = ((data['ir39'] < bt4_ni) & (np.abs(diff) > 2) & (night > 0)) # data['cmask'][mir_thresh] = 7 # 10mincron tir_thresh = ((data['ir11'] < bt10_ni) & (np.abs(diff) > 4) & (night > 0)) data['cmask'][tir_thresh] = 8 # 10mincron and 3.9um difference threshold dif_thresh = ((diff > Diffthresh_ni) & (data['ir39'] < 275) & (night > 0)) data['cmask'][dif_thresh] = 5 # twilight time twilight = ((data['szen'] >= 75.0) & (data['szen'] < 90.0) & (data['vza'] > 0.) & (data['lcov'] < 20)) vis_thresh = ((data['vis'] > vis_day / 4) & (data['cmask'] < 1) & (twilight > 0)) data['cmask'][vis_thresh] = 1 # sun glint affest area glint = (data['sun_glint'] < 20.0) vis_thresh = ((data['vis'] > vis_day / 4) & (data['cmask'] < 1) & (glint > 0)) data['cmask'][vis_thresh] = 1 # this is clear sky data['cloudfree'] = (data['cmask'] < 1.0) & (data['cmask'] > -1.0) return data def water_mask(data): """ A simple water masking for Himawari8 for fire georeference land: 1, water: 0, background:-1 """ # threshold for Albedo sir_day = 0.05 data['wmask'] = np.zeros(data['ir39'].shape, dtype=np.int8) - 1 # work in the satellite visible area and the land mask = data['vza'] > 0.0 data['wmask'][mask] = 0. # check if day or night - day <70.degrees and night gt 70.degrees # work on day time first day = ((data['szen'] < 80.) & (data['szen'] > 0.0) & (data['vza'] > 0.0)) # sir band sir_thresh = ((data['sir'] > sir_day) & (data['wmask'] < 1) & \ (data['cmask'] < 1) & (day > 0) & (data['diff'] < 20)) data['wmask'][sir_thresh] = 1 return data def geo_read(f, verbose=False): """ read in the static data like view angle, landcover put them in a data dictionary """ dim = 5500 # hard coded for Himawari8 possible it is 5500 in which case we need to zoom if verbose: print 'reading file %s' % f dtype = np.float32 shape = (2, dim, dim) data = np.fromfile(f, dtype=dtype).reshape(shape) lat = data[0, :, :].astype(dtype) lon = data[1, :, :].astype(dtype) return lat, lon def static_read(file_dict, verbose=False): """ read in the static data like view angle, landcover put them in a data dictionary """ d = {} dim = 5500 # hard coded for Himawari8 for key in file_dict.keys(): file = file_dict[key][0][0] if verbose: print 'file path %s' % key print 'reading file %s' % file if key == 'landcover_path': dtype = np.int8 shape = (dim, dim) data = np.fromfile(file, dtype=dtype).reshape(shape) data_key = file_dict[key][1] d[data_key] = data.astype(dtype) elif key == 'fixed_position_path': dtype = np.float32 shape = (dim, dim) data = np.fromfile(file, dtype=dtype).reshape(shape) data_key = file_dict[key][1] d[data_key] = data.astype(dtype) else: dtype = np.float32 shape = (2, dim, dim) data = np.fromfile(file, dtype=dtype).reshape(shape) data_key = file_dict[key][1] d[data_key] = data[0, :, :].astype(dtype) data_key = file_dict[key][2] d[data_key] = data[1, :, :].astype(dtype) # pixel size d['pixsize'] = ((2.0 ** 2) * 1000.0 ** 2) * (1 / np.cos(np.radians(d['vza']))) # # adjust sampled area based on blocks used # for k in d.keys(): # d[k] = d[k][start_pix:stop_pix, :] return d def sun_angles(lat, lon, time_key): """ input: lat, np array; lon, np array time_key, string YYYYMMDDHHMM format like 201501031100 output:szen, sun zenith angle saa, sun azimuth angle """ # Define internal constants used for conversion EQTIME1 = 229.18 EQTIME2 = 0.000075 EQTIME3 = 0.001868 EQTIME4 = 0.032077 EQTIME5 = 0.014615 EQTIME6 = 0.040849 DECL1 = 0.006918 DECL2 = 0.399912 DECL3 = 0.070257 DECL4 = 0.006758 DECL5 = 0.000907 DECL6 = 0.002697 DECL7 = 0.00148 # Evaluate the input lat and lon in radians RadLat = np.radians(lat) dt = datetime.datetime.strptime(time_key, '%Y%m%d%H%M') # get the days in the year, normal year:365; leap year:366 d1 = datetime.datetime(dt.year, 1, 1) d2 = datetime.datetime(dt.year + 1, 1, 1) days_in_year = (d2 - d1).days # Evaluate the fractional year in radians # dt.hour-12 because gamma start from local noon time gamma = 2 * np.pi * (dt.timetuple().tm_yday - 1 + \ (dt.hour - 12) / 24.0) / days_in_year # Evaluate the Equation of time in minutes eqtime = EQTIME1 * (EQTIME2 + EQTIME3 * np.cos(gamma) - \ EQTIME4 * np.sin(gamma) - EQTIME5 * np.cos(2 * gamma) - \ 0.040849 * np.sin(2 * gamma)) # Time offset in minutes time_offset = eqtime + 4.0 * lon # local solar time in minutes true_solar_time = dt.hour * 60 + dt.minute + dt.second / 60 + time_offset # Solar hour angle in degrees and in radians HaRad = np.radians((true_solar_time / 4.) - 180.) # Evaluate the solar declination angle in radians Decli = DECL1 - DECL2 * np.cos(gamma) + DECL3 * np.sin(gamma) - \ DECL4 * np.cos(2 * gamma) + DECL5 * np.sin(2 * gamma) - \ DECL6 * np.cos(3 * gamma) + DECL7 * np.sin(3 * gamma) # Evaluate the Solar local Coordinates CosZen = (np.sin(RadLat) * np.sin(Decli) + \ np.cos(RadLat) * np.cos(Decli) * np.cos(HaRad)) TmpZenRad = np.arccos(CosZen) szen = np.degrees(TmpZenRad) CosAzi = -((np.sin(RadLat) * np.cos(TmpZenRad) - np.sin(Decli)) / \ (np.cos(RadLat) * np.sin(TmpZenRad))) saa = 360. - np.degrees(np.arccos(CosAzi)) # Correct for Time < 12.00 ( -> in range 0 . 180 ) saa[(true_solar_time < 720)] = 360. - saa[(true_solar_time < 720)] # in minutes 12 *60 return (szen, saa) def rebin(a, newshape): '''Rebin an array to a new shape. ''' assert len(a.shape) == len(newshape) slices = [slice(0, old, float(old) / new) for old, new in zip(a.shape, newshape)] coordinates = np.mgrid[slices] indices = coordinates.astype('i') # choose the biggest smaller integer index return a[tuple(indices)] def H8_file_read(file, verbose=False): # type: (object, object) -> object ''' read in a single Himawari8 file. ''' if not os.path.exists(file): print 'can not read %s' % file fileExtension = os.path.splitext(file)[1] if fileExtension in '.bz2': fh = h8_bz2.BZ2File(file, 'rb') else: fh = open(file, 'rb') # doit = call(["bunzip2", file]) # if doit < 1: # file = file[:-4] # else: # print 'can not unzip ', file # Read in the head blocks #print "processing %s" % file total_len = 0 # read in the file as binary see python struct for help for bb in xrange(11): # for Block 1 fh.seek(total_len) Block_no = struct.unpack('b', fh.read(1))[0] if verbose: print 'Reading block %s' % Block_no fh.seek(total_len + 1) Block_len = struct.unpack('h', fh.read(2))[0] if verbose: print 'The length of block %s is %s' % (Block_no, Block_len) # from block 2 read in number of samps and lines if Block_no == 2: fh.seek(total_len + 5) samps = struct.unpack('h', fh.read(2))[0] fh.seek(total_len + 7) lines = struct.unpack('h', fh.read(2))[0] # from block 3 read projection information elif Block_no == 3: fh.seek(total_len + 3) sub_lon = struct.unpack('d', fh.read(8))[0] #print 'central longitude %r' % sub_lon fh.seek(total_len + 11) CFAC = struct.unpack('I', fh.read(4))[0] fh.seek(total_len + 15) LFAC = struct.unpack('I', fh.read(4))[0] fh.seek(total_len + 19) COFF = struct.unpack('f', fh.read(4))[0] fh.seek(total_len + 23) LOFF = struct.unpack('f', fh.read(4))[0] fh.seek(total_len + 27) # Information about satellite height, earth equatorial radius # more infor can be found on page 16 of # Himawari_D_users_guide_en Proj_info = struct.unpack('ddddddd', fh.read(8 * 7))[:] elif Block_no == 4: fh.seek(total_len + 3) Nav_info = struct.unpack('dddddddd', fh.read(8 * 8))[:] elif Block_no == 5: fh.seek(total_len + 3) Band_no = struct.unpack('h', fh.read(2))[0] fh.seek(total_len + 5) central_wave = struct.unpack('d', fh.read(8))[0] fh.seek(total_len + 19) Cal_info = struct.unpack('ddddddddddd', fh.read(8 * 11))[:] # Change the block length for next block total_len += Block_len if verbose: print 'Total header length %s' % total_len # Now read in image data fh.seek(total_len) dtype = 'u2' shape = [lines, samps] size = np.dtype(dtype).itemsize * samps * lines data = fh.read() data = np.frombuffer(data[:size], dtype).reshape(shape) fh.close() fileExtension = os.path.splitext(file)[1] # if fileExtension in '.DAT': # call(["bzip2", file]) if verbose: print 'slope: %f, offset: %f for radiance' % (Cal_info[0], Cal_info[1]) radiance = data * Cal_info[0] + Cal_info[1] # for infrared bands if Band_no > 6: # for Planck temperature speed_of_light = Cal_info[8] planck_constant = Cal_info[9] boltzmann_constant = Cal_info[10] # radiance = 2.5 # central_wave = 4 c1 = 2.0 * planck_constant * speed_of_light * speed_of_light c2 = planck_constant * speed_of_light / boltzmann_constant # -- Derived constant scaling factors for: # c1: W.m2 to W/(m2.um-4) => multiplier of 1.0e+24 is required. # c2: K.m to K.um a=> multiplier of 1.0e+06 is required. c1_scale = 1.0e+24 c2_scale = 1.0e+06 # -- Calculate wavelength dependent "constants" fk1 = c1_scale * c1 / (central_wave ** 5) fk2 = c2_scale * c2 / central_wave logarithm = np.log((fk1 / (radiance) + 1.0)) temperature = fk2 / logarithm BT = Cal_info[2] + Cal_info[3] * temperature + Cal_info[4] * \ temperature * temperature else: # Lets not resample the himawari data here. # if samps > 12000: # hard coded should find a better way later # # Resampled by a factor of 0.25 with bilinear interpolation # # sub = radiance[4300*4:4700*4,2500*4:2900*4] # # d['vis_full'] = sub # radiance = rebin(radiance, (lines / 4, samps / 4)) # elif samps > 5500: # radiance = rebin(radiance, (lines / 2, samps / 2)) # for visible band this is Albedo BT = radiance * Cal_info[2] return (radiance, BT) def Himawari_read(file_dict, verbose=False): """ Read the Himawari-8 channels for fire detection, we only need red, MIR and TIR input: file dictionary like {'red_path' : 'HS_H08_20150109_0600_B03_FLDK_R20_S0101.DAT', 'mir_path' : 'HS_H08_20150109_0600_B07_FLDK_R20_S0101.DAT', 'tir_path' : 'HS_H08_20150109_0600_B14_FLDK_R20_S0101.DAT'} output: date dictionary like {'mir_BT' : np.array(5500,55500)} reference: Himawari_D_users_guide_en from http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/hsd_sample/HS_D_users_guide_en_v11.pdf """ d = {} for key in file_dict.keys(): files = file_dict[key][0] files.sort() rad_data_list = [] BT_data_list = [] for file in files: radiance, BT = H8_file_read(file) rad_data_list.append(radiance) BT_data_list.append(BT) radiance = np.vstack(rad_data_list) BT = np.vstack(BT_data_list) data_key = file_dict[key][1] d[data_key] = BT.astype(np.float32) data_key = file_dict[key][2] d[data_key] = radiance.astype(np.float32) return d def get_path(root, band, time_key=None, path_tree=None): """Finds path for given time key and data band time_key: 201501081300, YYYYMMDDHHMM band: B07,MIR (3.9um), B14,TIR (11um), B03,red (0.6um) sometime B03 has both 500m and 2km resolution files variable: BT, brightness temperature; Radiance for static data return path path_tree: HSFD, the original japan FTP path like:# 201501/09/201501090000/00/B03 else,weidong own path tree like 201501090000 """ if time_key is not None: # EO realtime date # separate the date and time from the time_key dt_time_key = datetime.datetime.strptime(time_key, '%Y%m%d%H%M') dt_date = dt_time_key.strftime('%Y%m%d') dt_time = dt_time_key.strftime('%H%M') # keys = [dt_date, dt_time, band] # Realtime EO channels if path_tree in ['HSFD']: root = os.path.join(root, dt_time_key.strftime('%Y%m'), dt_time_key.strftime('%d'), dt_time_key.strftime('%Y%m%d%H') + '00', dt_time_key.strftime('%M'), band + '/') # 500m resolution data band_vis_05 = band + "_FLDK_R05_S" # 2km resolution data band = band + "_FLDK_R20_S" else: root = root + time_key + "/" band_vis_05 = band else: root = os.path.join(root, "lcov/") band_vis_05 = band #print "root: %s" % root #print "band: %s" % band # now iterate over root path if os.path.exists(root): filepath = [] filepath1 = [] for f in os.listdir(root): if band in f: file_size = os.path.getsize(root + f) if file_size > 10000: filepath.append(root + f) elif band_vis_05 in f: filepath1.append(root + f) else: continue if len(filepath) < 1: filepath = filepath1 # return # if filepath1 is not None: # return filepath1 # else: return filepath else: print root, 'does not exists' sys.exit def paths(root, time_key=None, path_tree=None, mode=0): """Constructs a dictionary for the file paths """ # path dictionary construted from here d = {} if time_key is not None: # EO realtime date # for fire detection model if mode == 0: d["red_path"] = [get_path(root, "B03", \ time_key=time_key, path_tree=path_tree), 'vis', 'redrad'] # d["nir_path"] = [get_path(root, "B04", \ # time_key=time_key,path_tree=path_tree), 'nir', 'nirrad'] # d["sir_path"] = [get_path(root, "B06", \ # time_key=time_key, path_tree=path_tree), 'sir', 'sirrad'] d["tir86_path"] = [get_path(root, "B11", \ time_key=time_key, path_tree=path_tree), 'ir86', 'ir86rad'] d["mir_path"] = [get_path(root, "B07", \ time_key=time_key, path_tree=path_tree), 'ir39', 'ir39rad'] d["tir11_path"] = [get_path(root, "B14", \ time_key=time_key, path_tree=path_tree), 'ir11', 'ir11rad'] else: d["latlon_path"] = [get_path(root, "lat_lon.img"), 'lat', 'lon'] d["sat_view_angle_path"] = [get_path(root, "vza_vaa.img"), 'vza', 'vaa'] d["landcover_path"] = [get_path(root, "lcov.img"), 'lcov'] # d["fixed_position_path"] = [get_path(root, "H8_tir_201501090620.img"),'fpos'] return d def find_start_stop(EO_path_dict): mult = 550 first_path = EO_path_dict['mir_path'][0][0] last_path = EO_path_dict['mir_path'][0][-1] first_block = int(first_path.split('_')[-1][1:3]) last_block = int(last_path.split('_')[-1][1:3]) + 1 return first_block * mult, last_block * mult def load_h8(in_root, time_key, path_tree=None, mode=0): """ load all the data and put them in a dictionary """ # firstly setup the path dictionary EO_path_dict = paths(in_root, time_key=time_key, path_tree=path_tree, mode=0) # readin all the Himawari files here EO_data = Himawari_read(EO_path_dict) # construt a static data dictionary static_path_dict = paths(in_root) # readin all static data here static_data = static_read(static_path_dict) # get the sun angle szen, saa = sun_angles(static_data['lat'], static_data['lon'], time_key) # get the sun glint angle sun_glint = sunglint(static_data['vza'], static_data['vaa'], szen, saa) # combine EO and static data together EO_data.update(static_data) EO_data['szen'] = szen EO_data['sun_glint'] = sun_glint EO_data['ACQTIME'] = np.zeros(EO_data['ir39'].shape, dtype=np.int8) # for fire detection EO_data['diff'] = EO_data['ir39'] - EO_data['ir11'] EO_data['tirradratio'] = EO_data['ir39rad'] / EO_data['ir11rad'] EO_data['visradratio'] = EO_data['ir39rad'] / EO_data['redrad'] # d['ndvi'] = (d['nir'] - d['vis']) / (d['nir'] + d['vis']) # correct the navigation problem dt_time_key = datetime.datetime.strptime(time_key, '%Y%m%d%H%M') dt_time = int(dt_time_key.strftime('%H')) # if dt_time < 11: # doit = img_move(EO_data) # do the cloud masking data = cloud_mask(EO_data) # data = water_mask(EO_data) return data if __name__ == '__main__': in_root = '/Volumes/INTENSO/him_downlaod' # root for the output files time_key = "201507060000" data = load_h8(in_root, time_key, path_tree="HSFD")
284
0
23
668d96dc85f2dcb56b1266245486159c92348042
318
py
Python
profiles_api/urls.py
shalaka-dengale16/profiles-rest-api
fb0ea419123ffe5a6a4000b7893fe506dd5456f9
[ "MIT" ]
null
null
null
profiles_api/urls.py
shalaka-dengale16/profiles-rest-api
fb0ea419123ffe5a6a4000b7893fe506dd5456f9
[ "MIT" ]
null
null
null
profiles_api/urls.py
shalaka-dengale16/profiles-rest-api
fb0ea419123ffe5a6a4000b7893fe506dd5456f9
[ "MIT" ]
null
null
null
from django.urls import path,include from rest_framework.routers import DefaultRouter from profiles_api import views router=DefaultRouter() router.register('hello-viewset',views.HelloViewSet,basename='hello-viewset') urlpatterns=[ path('hello-api/',views.HelloApi.as_view()), path('',include(router.urls)) ]
26.5
76
0.779874
from django.urls import path,include from rest_framework.routers import DefaultRouter from profiles_api import views router=DefaultRouter() router.register('hello-viewset',views.HelloViewSet,basename='hello-viewset') urlpatterns=[ path('hello-api/',views.HelloApi.as_view()), path('',include(router.urls)) ]
0
0
0
2a78100207b1f584bd76849f055d4bad916d1dcd
897
py
Python
start.py
zxlim/pyHookDeploy
8b151dc0351b13df74555d2087daf877e6c80797
[ "Apache-2.0" ]
1
2020-08-07T04:29:02.000Z
2020-08-07T04:29:02.000Z
start.py
zxlim/pyHookDeploy
8b151dc0351b13df74555d2087daf877e6c80797
[ "Apache-2.0" ]
null
null
null
start.py
zxlim/pyHookDeploy
8b151dc0351b13df74555d2087daf877e6c80797
[ "Apache-2.0" ]
1
2021-07-15T14:01:55.000Z
2021-07-15T14:01:55.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ################################################## # Copyright (c) 2019 Zhao Xiang Lim. # Distributed under the Apache License 2.0 (the "License"). # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the Apache License 2.0 # along with this program. # If not, see <http://www.apache.org/licenses/LICENSE-2.0>. ################################################## from pyHookDeploy import init_app app = init_app() if __name__ == "__main__": main()
27.181818
74
0.639911
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ################################################## # Copyright (c) 2019 Zhao Xiang Lim. # Distributed under the Apache License 2.0 (the "License"). # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the Apache License 2.0 # along with this program. # If not, see <http://www.apache.org/licenses/LICENSE-2.0>. ################################################## from pyHookDeploy import init_app app = init_app() def main(): # Run instance on 0.0.0.0 on port 8080. app.run(host="0.0.0.0", port=8080) if __name__ == "__main__": main()
67
0
23
6867d19bdffc28c622d54d9758507091ad086596
925
py
Python
keanu-python/tests/test_traceplot.py
rs992214/keanu
f7f9b877aaaf9c9f732604f17da238e15dfdad13
[ "MIT" ]
153
2018-04-06T13:30:31.000Z
2022-01-31T10:05:27.000Z
keanu-python/tests/test_traceplot.py
shinnlok/keanu
c75b2a00571a0da93c6b1d5e9f0cbe09aebdde4d
[ "MIT" ]
168
2018-04-06T16:37:33.000Z
2021-09-27T21:43:54.000Z
keanu-python/tests/test_traceplot.py
shinnlok/keanu
c75b2a00571a0da93c6b1d5e9f0cbe09aebdde4d
[ "MIT" ]
46
2018-04-10T10:46:01.000Z
2022-02-24T02:53:50.000Z
from collections import OrderedDict import pytest from numpy.testing import assert_array_equal from keanu.plots import traceplot from keanu.vartypes import sample_types @pytest.fixture
28.90625
89
0.697297
from collections import OrderedDict import pytest from numpy.testing import assert_array_equal from keanu.plots import traceplot from keanu.vartypes import sample_types @pytest.fixture def trace() -> sample_types: return OrderedDict([("gamma", [1., 2., 3., 4.]), ("gaussian", [0.1, 0.2, 0.3, 0.4])]) def test_traceplot_returns_axesplot_with_correct_data(trace: sample_types) -> None: ax = traceplot(trace, labels=['gamma', 'gaussian']) gamma_ax = ax[0][0] gaussian_ax = ax[1][0] assert gamma_ax.get_title() == 'gamma' assert gaussian_ax.get_title() == 'gaussian' gamma_lines = gamma_ax.get_lines() gaussian_lines = gaussian_ax.get_lines() gamma_ax_data = [l.get_ydata() for l in gamma_lines] gaussian_ax_data = [l.get_ydata() for l in gaussian_lines] assert_array_equal(gamma_ax_data, [[1., 2., 3., 4.]]) assert_array_equal(gaussian_ax_data, [[0.1, 0.2, 0.3, 0.4]])
690
0
45
26b636a86c604a98c671236be20e5c67f52404ba
2,189
py
Python
lexical_analyzer.py
Tenchi88/lexical_analyzer
ef7b5d6af12de602c6f50c899928d36cb6408646
[ "Apache-2.0" ]
null
null
null
lexical_analyzer.py
Tenchi88/lexical_analyzer
ef7b5d6af12de602c6f50c899928d36cb6408646
[ "Apache-2.0" ]
null
null
null
lexical_analyzer.py
Tenchi88/lexical_analyzer
ef7b5d6af12de602c6f50c899928d36cb6408646
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- import collections from lexical_analyzer_helpers import *
36.483333
110
0.745089
# -*- coding: utf-8 -*- import collections from lexical_analyzer_helpers import * def get_top_words_in_path(path, name_type, speech_part, top_size=10): if name_type not in ["variable", "function_definition"]: raise ValueError(name_type + " not supported. Available types: [variable, function_definition]") if speech_part not in ["noun", "verb"]: raise ValueError(speech_part + " not supported. Available types: [noun, verb]") if name_type == "variable": names = get_all_words_in_path(path, get_all_variables_names) else: names = get_all_words_in_path(path, get_all_function_names) if speech_part == "noun": words = flat([split_snake_case_name_to_words(name, is_noun) for name in names]) else: words = flat([split_snake_case_name_to_words(name, is_verb) for name in names]) return collections.Counter(words).most_common(top_size) def get_top_verbs_in_function_names(path, top_size=10): return get_top_words_in_path(path, name_type="function_definition", speech_part="verb", top_size=top_size) def get_top_nouns_in_function_names(path, top_size=10): return get_top_words_in_path(path, name_type="function_definition", speech_part="noun", top_size=top_size) def get_top_verbs_in_variable_names(path, top_size=10): return get_top_words_in_path(path, name_type="variable", speech_part="verb", top_size=top_size) def get_top_nouns_in_variable_names(path, top_size=10): return get_top_words_in_path(path, name_type="variable", speech_part="noun", top_size=top_size) def get_top_for_projects_in_path(projects_path, projects, top_function, top_size=10): top_words = {} for project in projects: full_path = os.path.join(projects_path, project) top = top_function(full_path, top_size=top_size) top_words = sum_occurrence(top_words, top) top_words_list = [] for word in top_words: top_words_list.append((word, top_words[word])) return top_words_list def speech_part_is_supported(speech_part): return speech_part in ["verb", "noun"] def name_type_is_supported(names_type): return names_type in ["function_definition", "variable"]
1,913
0
184
7797d8f5bfbc1aa24b9f92677ad4b1d2bac9ebb0
869
py
Python
build.py
ryanc414/pytest_runner
11681fea458de1761e808684f578e183bddc40ef
[ "MIT" ]
21
2020-09-19T02:19:51.000Z
2022-03-05T16:01:48.000Z
build.py
ryanc414/pytest_runner
11681fea458de1761e808684f578e183bddc40ef
[ "MIT" ]
12
2020-08-25T17:41:31.000Z
2021-08-17T21:57:32.000Z
build.py
ryanc414/pytest_runner
11681fea458de1761e808684f578e183bddc40ef
[ "MIT" ]
8
2020-09-19T02:20:12.000Z
2022-02-22T15:21:25.000Z
#!/usr/bin/env python3 """ Build everything from source. Handles: 1. Install npm dependencies and build the UI client 2. Build source and binary distributions of the python package. """ import os import shutil import subprocess import sys WEB_CLIENT_DIR = os.path.join( os.path.dirname(__file__), "pytest_commander", "web_client" ) if __name__ == "__main__": main()
22.868421
73
0.674338
#!/usr/bin/env python3 """ Build everything from source. Handles: 1. Install npm dependencies and build the UI client 2. Build source and binary distributions of the python package. """ import os import shutil import subprocess import sys WEB_CLIENT_DIR = os.path.join( os.path.dirname(__file__), "pytest_commander", "web_client" ) def main(): npm_exe = shutil.which("npm") if not npm_exe: sys.exit( "Error, could not find npm installation. Ensure that npm is " "installed and on your PATH." ) print("Building UI...") subprocess.check_call([npm_exe, "install"], cwd=WEB_CLIENT_DIR) subprocess.check_call([npm_exe, "run", "build"], cwd=WEB_CLIENT_DIR) print("Done! Find source archive and wheel under dist/") print("Run tests with: $ python test.py") if __name__ == "__main__": main()
466
0
23
6011f11e84b0ca5ba3caa12f9e06568d9dbe7af6
474
py
Python
src/consensus/block_rewards.py
reghacker/chia-blockchain
51ab6260dcfbd95b4be9686f717f8d63133ef50f
[ "Apache-2.0" ]
null
null
null
src/consensus/block_rewards.py
reghacker/chia-blockchain
51ab6260dcfbd95b4be9686f717f8d63133ef50f
[ "Apache-2.0" ]
null
null
null
src/consensus/block_rewards.py
reghacker/chia-blockchain
51ab6260dcfbd95b4be9686f717f8d63133ef50f
[ "Apache-2.0" ]
null
null
null
from src.util.ints import uint32, uint64 def calculate_block_reward(height: uint32) -> uint64: """ Returns the coinbase reward at a certain block height. 1 Chia coin = 16,000,000,000,000 = 16 trillion mojo. """ return uint64(14000000000000) def calculate_base_fee(height: uint32) -> uint64: """ Returns the base fee reward at a certain block height. 1 base fee reward is 1/8 of total block reward """ return uint64(2000000000000)
26.333333
58
0.691983
from src.util.ints import uint32, uint64 def calculate_block_reward(height: uint32) -> uint64: """ Returns the coinbase reward at a certain block height. 1 Chia coin = 16,000,000,000,000 = 16 trillion mojo. """ return uint64(14000000000000) def calculate_base_fee(height: uint32) -> uint64: """ Returns the base fee reward at a certain block height. 1 base fee reward is 1/8 of total block reward """ return uint64(2000000000000)
0
0
0
b525ded27d8f2ca62cb466a65a21bbd19071ef22
3,061
py
Python
dp_tornado/helper/security/crypto/__init__.py
donghak-shin/dp-tornado
095bb293661af35cce5f917d8a2228d273489496
[ "MIT" ]
18
2015-04-07T14:28:39.000Z
2020-02-08T14:03:38.000Z
dp_tornado/helper/security/crypto/__init__.py
donghak-shin/dp-tornado
095bb293661af35cce5f917d8a2228d273489496
[ "MIT" ]
7
2016-10-05T05:14:06.000Z
2021-05-20T02:07:22.000Z
dp_tornado/helper/security/crypto/__init__.py
donghak-shin/dp-tornado
095bb293661af35cce5f917d8a2228d273489496
[ "MIT" ]
11
2015-12-15T09:49:39.000Z
2021-09-06T18:38:21.000Z
# -*- coding: utf-8 -*- from __future__ import absolute_import from dp_tornado.engine.helper import Helper as dpHelper from Crypto.Cipher import AES
34.011111
116
0.611565
# -*- coding: utf-8 -*- from __future__ import absolute_import from dp_tornado.engine.helper import Helper as dpHelper from Crypto.Cipher import AES class CryptoHelper(dpHelper): def encrypt(self, plain, randomized=False, expire_in=0, key=None, raw=False, encode=True, pad=True): if raw: return self._encrypt(plain, key=key, encode=encode, pad=pad) payload = { 'p': plain } if randomized and not expire_in: payload['r'] = self.helper.datetime.timestamp.now(ms=True) if expire_in and expire_in > 0: payload['e'] = self.helper.datetime.timestamp.now(ms=True) + (expire_in * 1000) payload = self.helper.string.serialization.serialize(payload, method='json') return self._encrypt(payload, key=key) def decrypt(self, encrypted, key=None, raw=False, encode=True, pad=True): try: decrypted = self._decrypt(encrypted, key=key, encode=encode, pad=pad) except Exception as e: self.logging.exception(e) return False if raw: return decrypted payload = self.helper.string.serialization.deserialize(decrypted, method='json') if not payload: return None if 'e' in payload and self.helper.datetime.timestamp.now(ms=True) > payload['e']: return False return payload['p'] def _pad(self, s): if self.helper.misc.system.py_version >= 3: return s + (16 - len(s) % 16) * chr(16 - len(s) % 16).encode('utf8') else: return s + (16 - len(s) % 16) * chr(16 - len(s) % 16) def _unpad(self, s): return s[:-ord(s[len(s)-1:])] def _key_and_iv(self, key): if isinstance(key, (tuple, list)): return key[0], key[1] if not key: key = getattr(self, '__crypto_default_key__', None) if not key: key = self.ini.crypto.key setattr(self, '__crypto_default_key__', key) r_key = key * max(1, int(self.helper.numeric.math.ceil(32.0 / (len(key) * 1.0)))) return r_key[0:16], r_key[16:32] def _encrypt(self, plain, key=None, encode=True, pad=True): key, iv = self._key_and_iv(key) if self.helper.misc.system.py_version >= 3: plain = plain.encode('utf8') encrypted = AES.new(key, AES.MODE_CBC, iv).encrypt(self._pad(plain) if pad else plain) return self.helper.security.crypto.encoding.base64_encode(encrypted, raw=True) if encode else encrypted def _decrypt(self, encrypted, key=None, encode=True, pad=True): key, iv = self._key_and_iv(key) encrypted = self.helper.security.crypto.encoding.base64_decode(encrypted, raw=True) if encode else encrypted decrypted = AES.new(key, AES.MODE_CBC, iv).decrypt(encrypted) decrypted = self._unpad(decrypted) if pad else decrypted if self.helper.misc.system.py_version >= 3: decrypted = decrypted.decode('utf8') return decrypted
2,690
8
211
b40678d6e6142c5ce139c5ab13bce7b7eed2edf6
10,091
py
Python
Figures.py
swtietz/BA_Analysis
197e4aad2b054dc9235808fcc5900659cec8cce6
[ "MIT" ]
1
2016-11-30T07:23:51.000Z
2016-11-30T07:23:51.000Z
Figures.py
swtietz/BA_Analysis
197e4aad2b054dc9235808fcc5900659cec8cce6
[ "MIT" ]
null
null
null
Figures.py
swtietz/BA_Analysis
197e4aad2b054dc9235808fcc5900659cec8cce6
[ "MIT" ]
1
2016-11-30T07:24:25.000Z
2016-11-30T07:24:25.000Z
import numpy as np import matplotlib import matplotlib.pyplot as plt from matplotlib.backends.backend_pdf import PdfPages import matplotlib.cm from scipy.signal.windows import gaussian import sklearn.metrics from DataSet import createDataSetFromFile from Utils import getProjectPath from Evaluation import getSpecificColorMap, plotMinErrors, plotAlongAxisErrors,\ plotMinErrorsSqueezed if __name__ == '__main__': matplotlib.rcParams.update({'font.size': 20}) createGroundTruthCreation()
36.168459
146
0.651373
import numpy as np import matplotlib import matplotlib.pyplot as plt from matplotlib.backends.backend_pdf import PdfPages import matplotlib.cm from scipy.signal.windows import gaussian import sklearn.metrics from DataSet import createDataSetFromFile from Utils import getProjectPath from Evaluation import getSpecificColorMap, plotMinErrors, plotAlongAxisErrors,\ plotMinErrorsSqueezed def createTargetShapeDelayFigure(): gestureLen = 20 gestureSig = np.concatenate([np.zeros((10,3)),np.random.normal(size=(gestureLen,3))*np.atleast_2d(gaussian(20, 3, 0)*2).T,np.zeros((10,3))],0) target = np.concatenate([np.zeros((10,1)),np.ones((gestureLen,1)),np.zeros((10,1))],0) target_gaus = np.concatenate([np.zeros((5,1)),np.atleast_2d(gaussian(gestureLen+10,5)).T,np.zeros((5,1))],0) target_delayed = np.concatenate([np.zeros((28,1)),np.ones((5,1)),np.zeros((7,1))],0) fig, ax = plt.subplots(1, 3, sharey=True, sharex=True, figsize=(20,5)) plt.ylim(-5,5) for axn in ax: axn.plot(gestureSig,label='input signal') axn.plot([0,40],[0,0],c='black',linewidth=1) ax[0].plot(target,label='target',c='red',linewidth=2) ax[0].fill_between(np.arange(0,40),0,target.squeeze(),facecolor='red',alpha=0.5) ax[0].set_title('(a)') ax[0].set_xlabel('timestep') ax[1].plot(target_gaus,label='target',c='red',linewidth=2) ax[1].fill_between(np.arange(0,40),0,target_gaus.squeeze(),facecolor='red',alpha=0.5) ax[1].set_title('(b)') ax[1].set_xlabel('timestep') ax[2].plot(target_delayed,label='target',c='red',linewidth=2) ax[2].fill_between(np.arange(0,40),0,target_delayed.squeeze(),facecolor='red',alpha=0.5) ax[2].set_title('(c)') ax[2].set_xlabel('timestep') #plt.legend(bbox_to_anchor=(1., 1.05), loc=1, borderaxespad=0.) plt.tight_layout() projectPath = 'C:\Users\Steve\Documents\Uni\BAThesis\\src\\targetShapeDelay2.pdf' pp = PdfPages(projectPath) pp.savefig() pp.close() def createEvaluationProblem(): gestureLen = 20 target = np.concatenate([np.ones((gestureLen+1,1)),np.zeros((9,1)),np.ones((gestureLen,1)),np.zeros((40,1))],0) target2 = np.concatenate([np.zeros((70,1)),np.ones((gestureLen,1))],0) pred1 = np.concatenate([np.ones((8,1)),np.zeros((5,1)),np.ones((8,1)),np.zeros((69,1))],0) pred2 = np.concatenate([np.zeros((7,1)),np.ones((7,1)),np.zeros((66,1)),np.ones((10,1))],0) zero = np.zeros((100,1)) plt.figure(figsize=(20,5)) #plt.plot(target, label='Target Gesture 1', color='red', linewidth=2, linestyle='--') #plt.plot(pred1, label='Pred. Gesture 1', color='red', linewidth=2, linestyle='-') #plt.plot(pred2, label='Pred. Gesture 2', color='blue', linewidth=2, linestyle='-') #plt.fill_between(np.arange(0,70), 0, 1, label='Target Gesture 1', facecolor='red', alpha=0.2, where=np.squeeze(target>0)) #plt.fill_between(np.arange(0,70), 0, np.squeeze(pred1), label='Pred. Gesture 1', facecolor='red', where=np.squeeze(pred1>=pred2)) #plt.fill_between(np.arange(0,70), 0, np.squeeze(pred2), label='Pred. Gesture 2', facecolor='blue', where=np.squeeze(pred2>=pred1)) plt.plot(np.ones((90,1))*0.5,color='black') plt.plot(np.ones((90,1))*1,color='black') plt.plot(np.ones((90,1))*-0.5,color='black') plt.plot(np.ones((90,1))*-1,color='black') plt.fill_between(np.arange(0,90), 0.5, 1, label='no gesture', facecolor='grey', alpha=0.4) plt.fill_between(np.arange(0,90), 0.5, 1, facecolor='red', alpha=0.8, where=np.squeeze(target>0)) plt.fill_between(np.arange(0,90), 0.5, 1, facecolor='blue', alpha=0.8, where=np.squeeze(target2>0)) plt.fill_between(np.arange(0,90), -0.5, -1, facecolor='grey', alpha=0.4) plt.fill_between(np.arange(0,90), -0.5, -1, label='Gesture 1', facecolor='red', where=np.squeeze(pred1==1)) plt.fill_between(np.arange(0,90), -0.50, -1, label='Gesture 2', facecolor='blue', where=np.squeeze(pred2==1)) plt.fill_between(np.arange(0,90), -0.2, 0.2, facecolor='yellow', alpha=0.2) plt.annotate('TP',xy=(3.5,-0.1)) plt.plot([3,10],[-0.75,0.75],linewidth=3, color='black') plt.annotate('WG',xy=(8,-0.1)) plt.plot([10,10],[-0.75,0.75],linewidth=3, color='black') plt.annotate('FP',xy=(14,-0.1)) plt.plot([17,10],[-0.75,0.75],linewidth=3, color='black') plt.annotate('TP',xy=(34,-0.1)) plt.plot([50,25],[-0.75,0.75],linewidth=3, color='black') plt.annotate('FN',xy=(46,-0.1)) plt.plot([50,40],[-0.75,0.75],linewidth=3, color='black') plt.annotate('TP',xy=(55.5,-0.1)) plt.plot([50,60],[-0.75,0.75],linewidth=3, color='black') plt.annotate('TP',xy=(83.5,-0.1)) plt.plot([85,80],[-0.75,0.75],linewidth=3, color='black') ax = plt.gca() ax.text( 2.5, -1.3,str(1),bbox=dict(facecolor='none', edgecolor='black', boxstyle='circle,pad=0.5')) ax.text( 9.5, -1.3,str(2),bbox=dict(facecolor='none', edgecolor='black', boxstyle='circle,pad=0.5')) ax.text(15 , -1.3,str(3),bbox=dict(facecolor='none', edgecolor='black', boxstyle='circle,pad=0.5')) ax.text(50 , -1.3,str(4),bbox=dict(facecolor='none', edgecolor='black', boxstyle='circle,pad=0.5')) ax.text(84.5, -1.3,str(5),bbox=dict(facecolor='none', edgecolor='black', boxstyle='circle,pad=0.5')) ax.text(39.5, 1.2,str(6),bbox=dict(facecolor='none', edgecolor='black', boxstyle='circle,pad=0.5')) ax.text(59.5, 1.2,str(7),bbox=dict(facecolor='none', edgecolor='black', boxstyle='circle,pad=0.5')) plt.xlabel('time step') plt.yticks([-0.75,0,0.75]) plt.setp(plt.gca(), 'yticklabels', ['Prediction','Mapping','Target']) plt.ylim(-1.5,1.5) plt.xlim(0,120) plt.legend() plt.tight_layout() projectPath = 'C:\Users\Steve\Documents\Uni\BAThesis\\src\\classificationProb.pdf' pp = PdfPages(projectPath) pp.savefig() pp.close() true = [1,1,1,2,3,3,3] pred = [1,2,3,2,1,3,3] print sklearn.metrics.f1_score(true,pred,average=None) print np.mean(sklearn.metrics.f1_score(true,pred,average=None)) def createInputSignalFigure(): errors = [0.272813277233,0.233033147087,0.217966453407,0.139282580674,0.0953774246893,0.0898370698925,0.0551168200035] labels = ['F','G','A','FG','FA','GA','FGA'] ax = plt.subplot() #ax.bar(np.arange(0,7), errors, alpha=0.5) cmap = matplotlib.cm.brg_r for i, error in enumerate(errors): ax.bar([i], errors[i], facecolor=cmap(error/0.5), alpha=1) ax.set_xticks(np.arange(0.5,7.5,1)) ax.set_xticklabels(labels) plt.ylabel('Validation Error') plt.xlabel('Input signal') plt.xlim(-0.5,7.5) plt.ylim(0,0.5) projectPath = 'C:\Users\Steve\Documents\Uni\BAThesis\\src\\errorByInput.pdf' pp = PdfPages(projectPath) pp.savefig() pp.close() return ax def createGroundTruthCreation(): ds = createDataSetFromFile('julian_0_fullSet.npz') def bla(): vals = np.array([0.8867924528301887, 0.85238095238095235, 0.89047619047619042, 0.8418604651162791, 0.89622641509433965, 0.875, 0.86301369863013699, 0.82027649769585254, 0.83783783783783783, 0.90094339622641506, 0.75, 0.74568965517241381, 0.76855895196506552, 0.78240740740740744, 0.76923076923076927, 0.85308056872037918, 0.85915492957746475, 0.87019230769230771, 0.86976744186046506, 0.82938388625592419, 0.90047393364928907, 0.83257918552036203, 0.80888888888888888, 0.89671361502347413, 0.86915887850467288, 0.78026905829596416, 0.76211453744493396, 0.76956521739130435, 0.73931623931623935, 0.75107296137339052, 0.90476190476190477, 0.84931506849315064, 0.89099526066350709, 0.83486238532110091, 0.84722222222222221, 0.86098654708520184, 0.87441860465116283, 0.8545454545454545, 0.85849056603773588, 0.88732394366197187, 0.74889867841409696, 0.79824561403508776, 0.82949308755760365, 0.77253218884120167, 0.77876106194690264]) np.set_printoptions(precision=3) for i in range(9): print i print str( "{0:.3f}".format(np.mean(vals[i*5:i*5+5]) )) + " (" + str("{0:.2f}".format(np.std(vals[i*5:i*5+5]))) + ")" print def evaluateNPZ(npzFile): pp = PdfPages(getProjectPath()+"error_space_"+npzFile+".pdf") a = np.load(getProjectPath()+npzFile) plotMinErrors(a['errors'], a['params'], a['paraRanges'], pp, getSpecificColorMap()) i = 0 inputSignalAxis = -1 inputScalingAxis = -1 normAxis = -1 for node, param in a['params']: if param == 'spectral_radius': inputSignalAxis = i elif param == 'output_dim': inputScalingAxis = i elif param == 'ridge_param': normAxis = i i =i+1 plotAlongAxisErrors(a['errors'], a['params'], a['paraRanges'], normAxis, inputSignalAxis, inputScalingAxis, pp, getSpecificColorMap()) pp.close() #plt.close('all') def plotErrorResSize(): matplotlib.rcParams.update({'font.size': 25}) npzFile = '2016-04-28-09-57_bigRunOnlySnap.npz' npz2 = '2016-04-28-15-18_bigRunOnlySnap.npz' projectPath = 'C:\Users\Steve\Documents\Uni\BAThesis\\src\\errorResSize.pdf' pp = PdfPages(projectPath) a = np.load(getProjectPath()+npzFile) errors = a['errors'] errors = np.mean(errors,2).squeeze() b = np.load(getProjectPath()+npz2) errors2 = b['errors'] errors2 = np.mean(errors2,2).squeeze() plt.figure(figsize=(10,7.5)) plt.plot(errors, 'o', linestyle='-', linewidth=3, label='ridge para = 0.01') #plt.plot(errors2, 'o', linestyle='-', linewidth=3, label='ridge para = 0.1') plt.grid() plt.minorticks_on() plt.grid(which='minor', axis='y') plt.xlabel('Reservoir size') ticks = np.arange(0, 8) labels = [25,50,100,200,400,800,1600,3200] plt.xticks(ticks, labels) plt.ylabel('Validation error') plt.ylim(0,1) plt.tight_layout() pp.savefig() pp.close() #plt.close('all') if __name__ == '__main__': matplotlib.rcParams.update({'font.size': 20}) createGroundTruthCreation()
9,384
0
170
7ca55fae0b20b4213cd5dea9d90664287f5cdc5c
655
py
Python
Ch9/datadefault_start.py
RiddhiDamani/Python
06cba66aeafd9dc0fa849ec2112c0786a3e8f001
[ "MIT" ]
null
null
null
Ch9/datadefault_start.py
RiddhiDamani/Python
06cba66aeafd9dc0fa849ec2112c0786a3e8f001
[ "MIT" ]
null
null
null
Ch9/datadefault_start.py
RiddhiDamani/Python
06cba66aeafd9dc0fa849ec2112c0786a3e8f001
[ "MIT" ]
null
null
null
# Python Object Oriented Programming by Joe Marini course example # implementing default values in data classes # Default Values always have to come first - i.e. before non-default values. from dataclasses import dataclass, field import random @dataclass b1 = Book("War and Peace", "Leo Tolstoy", 1225) b2 = Book("The Catcher in the Rye", "JD Salinger", 234) print(b1) print(b2)
27.291667
77
0.720611
# Python Object Oriented Programming by Joe Marini course example # implementing default values in data classes # Default Values always have to come first - i.e. before non-default values. from dataclasses import dataclass, field import random def price_func(): return float(random.randrange(20, 40)) @dataclass class Book: # you can define default values when attributes are declared title: str = "No Title" author: str = "No Author" pages: int = 0 price: float = field(default_factory = price_func) b1 = Book("War and Peace", "Leo Tolstoy", 1225) b2 = Book("The Catcher in the Rye", "JD Salinger", 234) print(b1) print(b2)
39
187
45
7dc6f35c51c58e0ce7864e4def04a2076bf3889b
2,954
py
Python
modules/compressor.py
zeroone2numeral2/nmjpeg-bot
8c77b0b90933cf2efb221fdad097e2087deae1f1
[ "MIT" ]
6
2018-03-25T02:04:18.000Z
2020-07-03T04:22:30.000Z
modules/compressor.py
zeroone2numeral2/nmjpeg-bot
8c77b0b90933cf2efb221fdad097e2087deae1f1
[ "MIT" ]
null
null
null
modules/compressor.py
zeroone2numeral2/nmjpeg-bot
8c77b0b90933cf2efb221fdad097e2087deae1f1
[ "MIT" ]
1
2019-08-12T16:25:12.000Z
2019-08-12T16:25:12.000Z
import os import sys import logging from telegram.ext import MessageHandler from telegram.ext import CommandHandler from telegram.ext import Filters from telegram.ext import BaseFilter from telegram.ext.dispatcher import run_async from picture import Picture logger = logging.getLogger(__name__) BASE_FILE_PATH = os.path.abspath(os.path.dirname(sys.argv[0])) + '/tmp/{}_{}.jpg' private_chat = FilterPrivateChat() photo_reply = FilterReplyToPhoto() @run_async @run_async
28.403846
89
0.651997
import os import sys import logging from telegram.ext import MessageHandler from telegram.ext import CommandHandler from telegram.ext import Filters from telegram.ext import BaseFilter from telegram.ext.dispatcher import run_async from picture import Picture logger = logging.getLogger(__name__) BASE_FILE_PATH = os.path.abspath(os.path.dirname(sys.argv[0])) + '/tmp/{}_{}.jpg' class FilterReplyToPhoto(BaseFilter): def filter(self, message): if message.reply_to_message and message.reply_to_message.photo: return True class FilterPrivateChat(BaseFilter): def filter(self, message): return message.chat_id > 0 private_chat = FilterPrivateChat() photo_reply = FilterReplyToPhoto() class PictureExtended(Picture): def __init__(self, message, quality=20): if message.reply_to_message: self.PhotoSize_object = message.reply_to_message.photo[-1] self.reply_to_message_id = message.reply_to_message.message_id else: self.PhotoSize_object = message.photo[-1] self.reply_to_message_id = None self.chat_id = message.chat_id self.message_id = message.message_id self.file_path = BASE_FILE_PATH.format(self.chat_id, self.message_id) self.message = message Picture.__init__(self, self.file_path, quality=quality) def download(self): new_file = self.PhotoSize_object.bot.get_file(self.PhotoSize_object.file_id) new_file.download(self.file_path) def send(self): with open(self.compressed_file_path, 'rb') as file: self.message.reply_photo(file, reply_to_message_id=self.reply_to_message_id) def one_shot(self): self.download() self.load() self.compress() self.send() self.remove() @run_async def on_photo_private(bot, update): logger.info("private chat photo") PictureExtended(update.message).one_shot() def get_quality_level(args): if len(args) > 0: try: quality_level = int(args[0]) if quality_level > 99: quality_level = 99 elif quality_level < 1: quality_level = 1 except ValueError: quality_level = 20 else: quality_level = 20 return quality_level @run_async def on_command(_, update, args): logger.info("jpeg command") quality_level = get_quality_level(args) logger.info("quality level: %d", quality_level) PictureExtended(update.message, quality=quality_level).one_shot() class module: name = "compressor" handlers = ( MessageHandler(Filters.photo & private_chat, on_photo_private), CommandHandler(["morejpg", "morejpg", "nmjpg", "nmjpeg", "jpg", "jpeg"], on_command, filters=photo_reply, pass_args=True) )
1,789
311
339
c06ed1ad8feac712313aca6df1e744a56c3c1611
2,687
py
Python
python/observesim/observations.py
jsobeck/observesim
8019c3031ad24cd08d9eb0e68b3fbe534a2d30ad
[ "BSD-3-Clause" ]
null
null
null
python/observesim/observations.py
jsobeck/observesim
8019c3031ad24cd08d9eb0e68b3fbe534a2d30ad
[ "BSD-3-Clause" ]
null
null
null
python/observesim/observations.py
jsobeck/observesim
8019c3031ad24cd08d9eb0e68b3fbe534a2d30ad
[ "BSD-3-Clause" ]
null
null
null
import numpy as np """Observations module class. Dependencies: numpy scipy """ class Observations(object): """Observations class. Parameters: ---------- observatory : string Observatory for tiles ('apo' or 'lco'; default 'apo') Attributes: ---------- nobservations : np.int32 number of observations tileid : ndarray of np.int32 id of each tile for observations mjd : ndarray of np.float64 MJD of observation (days) duration : ndarray of np.float64 duration of observation (days) sn2 : ndarray of np.float64 duration of observation (days) Methods: ------- add() : add an observation of a tile toarray() : Return ndarray of tile properties """ def toarray(self, indx=None): """Return observations as a record array Parameters: ---------- indx : ndarray of np.int32 indices of observations to return (default to all) Returns: ------- observations : record array observation information """ obs0 = [('tileid', np.int32), ('mjd', np.float64), ('duration', np.float64), ('sn2', np.float64)] if(indx is None): indx = np.arange(self.nobservations) nobs = len(indx) obs = np.zeros(nobs, dtype=obs0) if(nobs > 0): obs['tileid'] = self.tileid[indx] obs['mjd'] = self.mjd[indx] obs['duration'] = self.duration[indx] obs['sn2'] = self.sn2[indx] return(obs)
26.343137
67
0.54038
import numpy as np """Observations module class. Dependencies: numpy scipy """ class Observations(object): """Observations class. Parameters: ---------- observatory : string Observatory for tiles ('apo' or 'lco'; default 'apo') Attributes: ---------- nobservations : np.int32 number of observations tileid : ndarray of np.int32 id of each tile for observations mjd : ndarray of np.float64 MJD of observation (days) duration : ndarray of np.float64 duration of observation (days) sn2 : ndarray of np.float64 duration of observation (days) Methods: ------- add() : add an observation of a tile toarray() : Return ndarray of tile properties """ def __init__(self, observatory='apo'): self.nobservations = np.int32(0) self.observatory = observatory self.tileid = np.zeros(0, dtype=np.int32) self.mjd = np.zeros(0, dtype=np.float64) self.duration = np.zeros(0, dtype=np.float64) self.sn2 = np.zeros(0, dtype=np.float64) return def add(self, tileid=None, mjd=None, duration=None, sn2=None): self.tileid = np.append(self.tileid, np.array([np.float64(tileid)])) self.mjd = np.append(self.mjd, np.array([np.float64(mjd)])) self.duration = np.append(self.duration, np.array([np.float64(duration)])) self.sn2 = np.append(self.sn2, np.array([np.float64(sn2)])) self.nobservations = len(self.tileid) return def fortile(self, mjd=None, tileid=None): indx = np.where((self.mjd <= mjd) & (self.tileid == tileid))[0] return(self.toarray(indx=indx)) def toarray(self, indx=None): """Return observations as a record array Parameters: ---------- indx : ndarray of np.int32 indices of observations to return (default to all) Returns: ------- observations : record array observation information """ obs0 = [('tileid', np.int32), ('mjd', np.float64), ('duration', np.float64), ('sn2', np.float64)] if(indx is None): indx = np.arange(self.nobservations) nobs = len(indx) obs = np.zeros(nobs, dtype=obs0) if(nobs > 0): obs['tileid'] = self.tileid[indx] obs['mjd'] = self.mjd[indx] obs['duration'] = self.duration[indx] obs['sn2'] = self.sn2[indx] return(obs)
992
0
80
eba7469e17fc1598df3072271972a8ee3c7065f4
13,859
py
Python
main.py
aziznal/luigi-sacco-and-route-encryption
7e9ebba5572ef58e9f47bca9d443b93d579624d5
[ "MIT" ]
1
2022-02-24T18:34:36.000Z
2022-02-24T18:34:36.000Z
main.py
aziznal/luigi-sacco-and-route-encryption
7e9ebba5572ef58e9f47bca9d443b93d579624d5
[ "MIT" ]
null
null
null
main.py
aziznal/luigi-sacco-and-route-encryption
7e9ebba5572ef58e9f47bca9d443b93d579624d5
[ "MIT" ]
null
null
null
from typing import Callable, Literal, Tuple from PyQt5.QtWidgets import QApplication, QComboBox, QWidget from PyQt5.QtGui import QPixmap from gui import Gui from logic.luigi_sacco import luigi_sacco_encrypt, luigi_sacco_decrypt, confirm_text_in_correct_lang, format_key_and_input_text from logic.route_encryption import create_empty_matrix, route_encrypt, route_decrypt, get_potential_table_sizes, apply_e4, apply_reverse_b3 import utils ENCRYPT, DECRYPT = (True, False), (False, True) def center_window(window: Gui) -> None: """ Center the given window on the screen """ centered_x = screen_geometry.center().x() - window.width()//2 centered_y = screen_geometry.center().y() - window.height()//2 window.move(centered_x, centered_y) def create_main_window() -> Gui: """ Creates app main window from which user can go to luigi sacco or route encryption windows """ widget_ids = "assets/main-ids.json" gui_file_path = "assets/main-menu.ui" kto_image_path = utils.get_path_in_bundle_dir("assets/kto_logo.png") gui = Gui(widget_ids, gui_file_path) center_window(gui) kto_image = QPixmap(kto_image_path) gui.get_widget("mainLabel").setPixmap(kto_image) gui.add_event_listener("exitButton", lambda: app.quit()) return gui def create_error_message_window() -> Gui: """ Creates a small error message whose contents can be tuned to inform the user of an error """ widget_ids = "assets/error-message.json" gui_file_path = "assets/error-message.ui" gui = Gui(widget_ids, gui_file_path) # Hidden by default gui.hide() center_window(gui) # Set the okay button to hide the window when clicked gui.add_event_listener('okayButton', lambda: gui.hide()) return gui def display_error_message( error_message_window: Gui, title: str, content: str, solution: str) -> None: """ Shows error dialog. """ error_message_window.get_widget('errorNameLabel').setText(title) error_message_window.get_widget('errorMessageLabel').setText(content) error_message_window.get_widget('errorSolutionLabel').setText(solution) center_window(error_message_window) error_message_window.show() error_message_window.activateWindow() def goto_window(source: Gui, destination: Gui) -> None: """ Hides source gui and centers then shows the destination gui """ source.hide() center_window(destination) destination.activateWindow() destination.show() def create_luigi_sacco_window(main_window: Gui, show_error: Callable[[str, str, str], None]) -> Gui: """ Creates a submenu where user can use luigi sacco encryption / decrypytion """ widget_ids = "assets/first-method-ids.json" gui_file_path = "assets/first-method.ui" gui = Gui(widget_ids, gui_file_path, show_error) gui.hide() gui.add_event_listener("backButton", lambda: goto_window(gui, main_window)) return gui def create_luigi_sacco_info_window(luigi_sacco_window: Gui) -> Gui: """ Creates the info window for the Luigi Sacco encryption method """ widget_ids = "assets/luigi-sacco-info-ids.json" gui_file_path = "assets/luigi-sacco-info.ui" gui = Gui(widget_ids, gui_file_path) gui.hide() gui.add_event_listener("backButton", lambda: goto_window(gui, luigi_sacco_window)) return gui def create_route_encryption_window(main_window: Gui, show_error: Callable[[str, str, str], None]) -> Gui: """ Creates window where user can use route encryption / decryption according to E4 & B3 Routes """ widget_ids = "assets/second-method-ids.json" gui_file_path = "assets/second-method.ui" gui = Gui(widget_ids, gui_file_path, show_error) # Make this window hidden by default gui.hide() # Adding images for Route Visualization routes_image_path = utils.get_path_in_bundle_dir("assets/routes.png") routes_image = QPixmap(routes_image_path) gui.get_widget("routesLabel").setPixmap(routes_image) gui.add_event_listener("backButton", lambda: goto_window(gui, main_window)) return gui def get_luigi_sacco_language(get: Callable[[str], QWidget]) -> Literal["EN", "TR"]: """ Extracts and returns chosen language from Luigi Sacco Gui """ return "EN" if get('englishRadioButton').isChecked() else "TR" def get_luigi_sacco_input(get: Callable[[str], QWidget]) -> Tuple[str, str]: """ Extracts and returns key and text which user has given in Gui. Raises ValueError if either key or text are empty """ key = get('keyTextEdit').toPlainText() plain_text = get('inputTextEdit').toPlainText() if key == "" or plain_text == "": raise ValueError("Key or Plain Text not given") return key, plain_text def get_selected_action(get: Callable[[str], QWidget]) -> Tuple[bool, bool]: """ Returns whether given gui is in encrypt or decrypt state. """ encrypt = get('encryptRadioButton').isChecked() decrypt = get('decryptRadioButton').isChecked() return encrypt, decrypt def run_luigi_sacco(window: Gui) -> None: """ Function to run when Run Button is clicked in Luigi Sacco window. Runs luigi sacco encryption / decryption on the given key and input text and takes given language into account. Sets the output of the algorithm as the content of the output box """ # Shortcut for ops in this function get = lambda x: window.get_widget(x) language = get_luigi_sacco_language(get) try: key, plain_text = get_luigi_sacco_input(get) except ValueError: window.show_error( title="Empty Key or Input Text", content="Cannot run program without both Key and Input Text present.", solution="Please fill in both of these fields and try again" ) return action = get_selected_action(get) output = "" formatted_key, formatted_plain_text = format_key_and_input_text(key, plain_text) # Confirm Language has been correctly chosen try: confirm_text_in_correct_lang(formatted_key, language) except ValueError: window.show_error( title="Key has invalid characters", content="Your key includes characters that do not belong in your chosen language", solution="Remove any characters than don't belong to your chosen language and try again" ) return try: confirm_text_in_correct_lang(formatted_plain_text, language) except ValueError: window.show_error( title="Plain Text has invalid characters", content="Your Plain Text includes characters that do not belong in your chosen language", solution="Remove any characters than don't belong to your chosen language and try again" ) return if action == ENCRYPT: output = luigi_sacco_encrypt(key, plain_text, language) elif action == DECRYPT: output = luigi_sacco_decrypt(key, plain_text, language) get('outputTextEdit').setPlainText(output) def reset_luigi_sacco(window: Gui) -> None: """ Resets gui in luigi sacco to blank state """ window.get_widget('keyTextEdit').clear() window.get_widget('inputTextEdit').clear() window.get_widget('outputTextEdit').clear() def get_chosen_table_size(input_text: str, get: Callable[[str], QWidget]) -> Tuple[int, int]: """ Returns chosen table size from gui according to given input text """ if len(input_text) <= 0: return (0, 0) sizes, _ = get_potential_table_sizes(len(input_text)) return sizes[get('arraySizeComboBox').currentIndex()] def get_route_encryption_input(get: Callable[[str], QWidget]) -> Tuple[str, Tuple[int, int]]: """ Extracts and returns input text and table size from Route Encryption Gui """ input_text = get('inputTextEdit').toPlainText() table_size = get_chosen_table_size(input_text, get) return input_text, table_size def run_route_encryption(window: Gui) -> None: """ Function to run when Run Button is clicked in Route Encryption window. Runs route encryption / decryption on the given key and input text and takes given language into account. Sets the output of the algorithm as the content of the output box """ # Shortcut for ops in this function input_text, table_size = get_route_encryption_input(get) if len(input_text) == 0: window.show_error( title="Cannot Encrypt / Decrypt Empty Message", content="You attempted to start the program with no input text", solution="Enter at least one character in input field and try again" ) return elif len(input_text) > 50: window.show_error( title="Your input is too long", content="Maximum allowed is 50 characters", solution=f"You have entered {len(input_text)} characters. Please make sure your input is less than 50 characters." ) return if utils.is_prime(len(input_text)): window.show_error( title="Warning! Text Length is Prime", content=f"Your input text has a prime length of {len(input_text)} characters", solution="To get better performance using this encryption method, add another letter to your message." ) # Encrypt vs. Decrypt action = get_selected_action(get) # Final output message which goes to output box output = "" matrix_output = [] if action == ENCRYPT: output = route_encrypt(input_text, table_size) matrix_output = apply_e4(input_text, create_empty_matrix(table_size)) elif action == DECRYPT: output = route_decrypt(input_text, table_size) matrix_output = apply_reverse_b3(input_text, table_size) # Convert output matrix into a string formatted_matrix_output = "\n".join(', '.join(row) for row in matrix_output) get('outputTextEdit').setPlainText(output) get('matrixOutputTextEdit').setPlainText(formatted_matrix_output) def reset_route_encryption(window: Gui) -> None: """ Resets route encryption gui to blank state """ get('inputTextEdit').clear() get('outputTextEdit').clear() get('matrixOutputTextEdit').clear() get('arraySizeComboBox').clear() def populate_combobox(combobox: QComboBox, get_message: Callable[[], str]) -> None: """ Populates given combobox with potential tables sizes for the given message. """ combobox.clear() message = get_message() if len(message) <= 0: return # Populate combobox with list of sizes sizes, optimal_size = get_potential_table_sizes(len(message)) for size in sizes: if size == optimal_size or size == optimal_size[::-1]: combobox.addItem(f"{size[0]} x {size[1]} (Recommended)") else: combobox.addItem(f"{size[0]} x {size[1]}") def add_route_encryption_hooks(window: Gui) -> None: """ Hooks Route Encryption Gui with its Logic """ # Set Encrypt as default option get('encryptRadioButton').setChecked(True) # Set drop-select to have no elements at the start get('arraySizeComboBox').clear() input_text = get('inputTextEdit').toPlainText() combobox = get('arraySizeComboBox') # As text gets typed, the table size combo-box gets filled with new values get('inputTextEdit').textChanged.connect( lambda: populate_combobox(combobox, lambda: get('inputTextEdit').toPlainText()) ) # Run and Reset Button Listeners window.add_event_listener( "runButton", lambda: run_route_encryption(window) ) window.add_event_listener( "resetButton", lambda: reset_route_encryption(window) ) def add_luigi_sacco_hooks(luigi_sacco_window: Gui, info_window: Gui) -> None: """ Hooks Luigi Sacco Gui with its Logic """ # Shortcuts for ops in this function get = lambda x: luigi_sacco_window.get_widget(x) # Check English by default get('englishRadioButton').setChecked(True) # Check Encrypt by default get('encryptRadioButton').setChecked(True) # Set listeners for RUN and RESET buttons luigi_sacco_window.add_event_listener('runButton', lambda: run_luigi_sacco(luigi_sacco_window)) luigi_sacco_window.add_event_listener('resetButton', lambda: reset_luigi_sacco(luigi_sacco_window)) luigi_sacco_window.add_event_listener('informationButton', lambda: goto_window(luigi_sacco_window, info_window)) if __name__ == '__main__': app = QApplication([]) screen_geometry = QApplication.desktop().screenGeometry() SCREEN_WIDTH = screen_geometry.width() SCREEN_HEIGHT = screen_geometry.height() main_window = create_main_window() error_dialog = create_error_message_window() show_error = lambda title, content, solution: display_error_message(error_dialog, title, content, solution) luigi_sacco_window = create_luigi_sacco_window(main_window, show_error) luigi_sacco_info_window = create_luigi_sacco_info_window(luigi_sacco_window) route_encryption_window = create_route_encryption_window(main_window, show_error) main_window.add_event_listener( "firstMethodButton", lambda: goto_window(main_window, luigi_sacco_window) ) main_window.add_event_listener( "secondMethodButton", lambda: goto_window(main_window, route_encryption_window) ) add_luigi_sacco_hooks(luigi_sacco_window, luigi_sacco_info_window) add_route_encryption_hooks(route_encryption_window) app.exec_()
29.054507
139
0.696082
from typing import Callable, Literal, Tuple from PyQt5.QtWidgets import QApplication, QComboBox, QWidget from PyQt5.QtGui import QPixmap from gui import Gui from logic.luigi_sacco import luigi_sacco_encrypt, luigi_sacco_decrypt, confirm_text_in_correct_lang, format_key_and_input_text from logic.route_encryption import create_empty_matrix, route_encrypt, route_decrypt, get_potential_table_sizes, apply_e4, apply_reverse_b3 import utils ENCRYPT, DECRYPT = (True, False), (False, True) def center_window(window: Gui) -> None: """ Center the given window on the screen """ centered_x = screen_geometry.center().x() - window.width()//2 centered_y = screen_geometry.center().y() - window.height()//2 window.move(centered_x, centered_y) def create_main_window() -> Gui: """ Creates app main window from which user can go to luigi sacco or route encryption windows """ widget_ids = "assets/main-ids.json" gui_file_path = "assets/main-menu.ui" kto_image_path = utils.get_path_in_bundle_dir("assets/kto_logo.png") gui = Gui(widget_ids, gui_file_path) center_window(gui) kto_image = QPixmap(kto_image_path) gui.get_widget("mainLabel").setPixmap(kto_image) gui.add_event_listener("exitButton", lambda: app.quit()) return gui def create_error_message_window() -> Gui: """ Creates a small error message whose contents can be tuned to inform the user of an error """ widget_ids = "assets/error-message.json" gui_file_path = "assets/error-message.ui" gui = Gui(widget_ids, gui_file_path) # Hidden by default gui.hide() center_window(gui) # Set the okay button to hide the window when clicked gui.add_event_listener('okayButton', lambda: gui.hide()) return gui def display_error_message( error_message_window: Gui, title: str, content: str, solution: str) -> None: """ Shows error dialog. """ error_message_window.get_widget('errorNameLabel').setText(title) error_message_window.get_widget('errorMessageLabel').setText(content) error_message_window.get_widget('errorSolutionLabel').setText(solution) center_window(error_message_window) error_message_window.show() error_message_window.activateWindow() def goto_window(source: Gui, destination: Gui) -> None: """ Hides source gui and centers then shows the destination gui """ source.hide() center_window(destination) destination.activateWindow() destination.show() def create_luigi_sacco_window(main_window: Gui, show_error: Callable[[str, str, str], None]) -> Gui: """ Creates a submenu where user can use luigi sacco encryption / decrypytion """ widget_ids = "assets/first-method-ids.json" gui_file_path = "assets/first-method.ui" gui = Gui(widget_ids, gui_file_path, show_error) gui.hide() gui.add_event_listener("backButton", lambda: goto_window(gui, main_window)) return gui def create_luigi_sacco_info_window(luigi_sacco_window: Gui) -> Gui: """ Creates the info window for the Luigi Sacco encryption method """ widget_ids = "assets/luigi-sacco-info-ids.json" gui_file_path = "assets/luigi-sacco-info.ui" gui = Gui(widget_ids, gui_file_path) gui.hide() gui.add_event_listener("backButton", lambda: goto_window(gui, luigi_sacco_window)) return gui def create_route_encryption_window(main_window: Gui, show_error: Callable[[str, str, str], None]) -> Gui: """ Creates window where user can use route encryption / decryption according to E4 & B3 Routes """ widget_ids = "assets/second-method-ids.json" gui_file_path = "assets/second-method.ui" gui = Gui(widget_ids, gui_file_path, show_error) # Make this window hidden by default gui.hide() # Adding images for Route Visualization routes_image_path = utils.get_path_in_bundle_dir("assets/routes.png") routes_image = QPixmap(routes_image_path) gui.get_widget("routesLabel").setPixmap(routes_image) gui.add_event_listener("backButton", lambda: goto_window(gui, main_window)) return gui def get_luigi_sacco_language(get: Callable[[str], QWidget]) -> Literal["EN", "TR"]: """ Extracts and returns chosen language from Luigi Sacco Gui """ return "EN" if get('englishRadioButton').isChecked() else "TR" def get_luigi_sacco_input(get: Callable[[str], QWidget]) -> Tuple[str, str]: """ Extracts and returns key and text which user has given in Gui. Raises ValueError if either key or text are empty """ key = get('keyTextEdit').toPlainText() plain_text = get('inputTextEdit').toPlainText() if key == "" or plain_text == "": raise ValueError("Key or Plain Text not given") return key, plain_text def get_selected_action(get: Callable[[str], QWidget]) -> Tuple[bool, bool]: """ Returns whether given gui is in encrypt or decrypt state. """ encrypt = get('encryptRadioButton').isChecked() decrypt = get('decryptRadioButton').isChecked() return encrypt, decrypt def run_luigi_sacco(window: Gui) -> None: """ Function to run when Run Button is clicked in Luigi Sacco window. Runs luigi sacco encryption / decryption on the given key and input text and takes given language into account. Sets the output of the algorithm as the content of the output box """ # Shortcut for ops in this function get = lambda x: window.get_widget(x) language = get_luigi_sacco_language(get) try: key, plain_text = get_luigi_sacco_input(get) except ValueError: window.show_error( title="Empty Key or Input Text", content="Cannot run program without both Key and Input Text present.", solution="Please fill in both of these fields and try again" ) return action = get_selected_action(get) output = "" formatted_key, formatted_plain_text = format_key_and_input_text(key, plain_text) # Confirm Language has been correctly chosen try: confirm_text_in_correct_lang(formatted_key, language) except ValueError: window.show_error( title="Key has invalid characters", content="Your key includes characters that do not belong in your chosen language", solution="Remove any characters than don't belong to your chosen language and try again" ) return try: confirm_text_in_correct_lang(formatted_plain_text, language) except ValueError: window.show_error( title="Plain Text has invalid characters", content="Your Plain Text includes characters that do not belong in your chosen language", solution="Remove any characters than don't belong to your chosen language and try again" ) return if action == ENCRYPT: output = luigi_sacco_encrypt(key, plain_text, language) elif action == DECRYPT: output = luigi_sacco_decrypt(key, plain_text, language) get('outputTextEdit').setPlainText(output) def reset_luigi_sacco(window: Gui) -> None: """ Resets gui in luigi sacco to blank state """ window.get_widget('keyTextEdit').clear() window.get_widget('inputTextEdit').clear() window.get_widget('outputTextEdit').clear() def get_chosen_table_size(input_text: str, get: Callable[[str], QWidget]) -> Tuple[int, int]: """ Returns chosen table size from gui according to given input text """ if len(input_text) <= 0: return (0, 0) sizes, _ = get_potential_table_sizes(len(input_text)) return sizes[get('arraySizeComboBox').currentIndex()] def get_route_encryption_input(get: Callable[[str], QWidget]) -> Tuple[str, Tuple[int, int]]: """ Extracts and returns input text and table size from Route Encryption Gui """ input_text = get('inputTextEdit').toPlainText() table_size = get_chosen_table_size(input_text, get) return input_text, table_size def run_route_encryption(window: Gui) -> None: """ Function to run when Run Button is clicked in Route Encryption window. Runs route encryption / decryption on the given key and input text and takes given language into account. Sets the output of the algorithm as the content of the output box """ # Shortcut for ops in this function def get(x): return window.get_widget(x) input_text, table_size = get_route_encryption_input(get) if len(input_text) == 0: window.show_error( title="Cannot Encrypt / Decrypt Empty Message", content="You attempted to start the program with no input text", solution="Enter at least one character in input field and try again" ) return elif len(input_text) > 50: window.show_error( title="Your input is too long", content="Maximum allowed is 50 characters", solution=f"You have entered {len(input_text)} characters. Please make sure your input is less than 50 characters." ) return if utils.is_prime(len(input_text)): window.show_error( title="Warning! Text Length is Prime", content=f"Your input text has a prime length of {len(input_text)} characters", solution="To get better performance using this encryption method, add another letter to your message." ) # Encrypt vs. Decrypt action = get_selected_action(get) # Final output message which goes to output box output = "" matrix_output = [] if action == ENCRYPT: output = route_encrypt(input_text, table_size) matrix_output = apply_e4(input_text, create_empty_matrix(table_size)) elif action == DECRYPT: output = route_decrypt(input_text, table_size) matrix_output = apply_reverse_b3(input_text, table_size) # Convert output matrix into a string formatted_matrix_output = "\n".join(', '.join(row) for row in matrix_output) get('outputTextEdit').setPlainText(output) get('matrixOutputTextEdit').setPlainText(formatted_matrix_output) def reset_route_encryption(window: Gui) -> None: """ Resets route encryption gui to blank state """ def get(x): return window.get_widget(x) get('inputTextEdit').clear() get('outputTextEdit').clear() get('matrixOutputTextEdit').clear() get('arraySizeComboBox').clear() def populate_combobox(combobox: QComboBox, get_message: Callable[[], str]) -> None: """ Populates given combobox with potential tables sizes for the given message. """ combobox.clear() message = get_message() if len(message) <= 0: return # Populate combobox with list of sizes sizes, optimal_size = get_potential_table_sizes(len(message)) for size in sizes: if size == optimal_size or size == optimal_size[::-1]: combobox.addItem(f"{size[0]} x {size[1]} (Recommended)") else: combobox.addItem(f"{size[0]} x {size[1]}") def add_route_encryption_hooks(window: Gui) -> None: """ Hooks Route Encryption Gui with its Logic """ def get(x): return window.get_widget(x) # Set Encrypt as default option get('encryptRadioButton').setChecked(True) # Set drop-select to have no elements at the start get('arraySizeComboBox').clear() input_text = get('inputTextEdit').toPlainText() combobox = get('arraySizeComboBox') # As text gets typed, the table size combo-box gets filled with new values get('inputTextEdit').textChanged.connect( lambda: populate_combobox(combobox, lambda: get('inputTextEdit').toPlainText()) ) # Run and Reset Button Listeners window.add_event_listener( "runButton", lambda: run_route_encryption(window) ) window.add_event_listener( "resetButton", lambda: reset_route_encryption(window) ) def add_luigi_sacco_hooks(luigi_sacco_window: Gui, info_window: Gui) -> None: """ Hooks Luigi Sacco Gui with its Logic """ # Shortcuts for ops in this function get = lambda x: luigi_sacco_window.get_widget(x) # Check English by default get('englishRadioButton').setChecked(True) # Check Encrypt by default get('encryptRadioButton').setChecked(True) # Set listeners for RUN and RESET buttons luigi_sacco_window.add_event_listener('runButton', lambda: run_luigi_sacco(luigi_sacco_window)) luigi_sacco_window.add_event_listener('resetButton', lambda: reset_luigi_sacco(luigi_sacco_window)) luigi_sacco_window.add_event_listener('informationButton', lambda: goto_window(luigi_sacco_window, info_window)) if __name__ == '__main__': app = QApplication([]) screen_geometry = QApplication.desktop().screenGeometry() SCREEN_WIDTH = screen_geometry.width() SCREEN_HEIGHT = screen_geometry.height() main_window = create_main_window() error_dialog = create_error_message_window() show_error = lambda title, content, solution: display_error_message(error_dialog, title, content, solution) luigi_sacco_window = create_luigi_sacco_window(main_window, show_error) luigi_sacco_info_window = create_luigi_sacco_info_window(luigi_sacco_window) route_encryption_window = create_route_encryption_window(main_window, show_error) main_window.add_event_listener( "firstMethodButton", lambda: goto_window(main_window, luigi_sacco_window) ) main_window.add_event_listener( "secondMethodButton", lambda: goto_window(main_window, route_encryption_window) ) add_luigi_sacco_hooks(luigi_sacco_window, luigi_sacco_info_window) add_route_encryption_hooks(route_encryption_window) app.exec_()
54
0
79
0d783e56e550a44e98454d237e710237dfde05ff
5,589
py
Python
Random-Programs/dev/battle.py
naumoff0/Archive
d4ad2da89abb1576dd5a7c72ded6bf9b45c3f610
[ "MIT" ]
null
null
null
Random-Programs/dev/battle.py
naumoff0/Archive
d4ad2da89abb1576dd5a7c72ded6bf9b45c3f610
[ "MIT" ]
null
null
null
Random-Programs/dev/battle.py
naumoff0/Archive
d4ad2da89abb1576dd5a7c72ded6bf9b45c3f610
[ "MIT" ]
null
null
null
""" this program kills humans """ import sys import time import random import builtins import termcolor PRINT_SPEED = 0.005 DEBUG_MODE = False class human: """ a human exists... to die """ def checkWound(self): """ checks if a human is wounded """ dmg = random.randint(1, 4) print("{} is hit".format(self.name), end="") time.sleep(.2) if random.randint(0, 100) > self.advantage: print(" [not wounded]", None, "green") else: if self.hp != 0: print(" [wounded][hp = {}]".format(self.hp - dmg), None, "red") elif self.hp <= 0: print(" [dead]") self.hp -= dmg def remove_lines(amount): """ deletes lines printed previously """ cursor_up = "\x1b[1A" erase = "\x1b[2K" for _ in range(amount): sys.stdout.write(cursor_up) sys.stdout.write(erase) def delayed_print(text, end=None, color=None): """ prints characters with delay provided between each character. color for prompt is optional """ text = str(text) if end != "": text += "\n" for char in text: if color: sys.stdout.write(termcolor.colored(char, color)) else: sys.stdout.write(char) sys.stdout.flush() time.sleep(PRINT_SPEED) time.sleep(PRINT_SPEED * 5) def contest(other): """contests human ability""" return random.randint(1, 20) > round(other.advantage / 5) def getAdvantage(): """shut is advantage""" return random.randint(0, 100) def hpBar(hp): """ returns an hp bar """ ber = "" for _ in range(hp): ber += "#" for _ in range(10 - hp): ber += " " return "|" + ber + "|" def getColor(hp): """ allows for specifically colored hp bars """ if hp < 4: color = "red" if hp >= 4 and hp <= 7: color = "yellow" if hp > 7: color = "green" return color def combat(playerOne, playerTwo): """full combat""" rounds = 0 if playerOne.name == playerTwo.name: print("{} commits suicide".format(playerOne.name)) return 1 while playerOne.hp != 0 and playerTwo.hp != 0: combatRound(playerOne, playerTwo) rounds += 1 if rounds % 10 == 0 and rounds != 0: print("We are taking a break!") print("{} hp:{}".format(playerOne.name, playerOne.hp)) print("{} hp:{}".format(playerTwo.name, playerTwo.hp)) time.sleep(3) remove_lines(3) if playerOne.hp <= 0: print("{} is dead, {} is is the champion".format(playerOne.name, playerTwo.name)) print("combat took {} rounds".format(rounds)) return 1 else: print("{} is dead, {} is is the champion".format(playerTwo.name, playerOne.name)) print("combat took {} rounds".format(rounds)) return 2 def combatRound(playerOne, playerTwo): """single rounds of human to human combat""" hpOne = hpBar(playerOne.hp) hpTwo = hpBar(playerTwo.hp) colorOne = getColor(playerOne.hp) colorTwo = getColor(playerTwo.hp) print(playerOne.name) print("{}".format(hpOne), None, colorOne) print(playerTwo.name) print("{}".format(hpTwo), None, colorTwo) if random.randint(0, 1) == 1: success = contest(playerTwo) print("{} attacks {}".format(playerOne.name, playerTwo.name), end="") if success is True: print(" [success]") playerTwo.checkWound() else: print(" [failed]") successTwo = contest(playerOne) print("{} attacks {}".format(playerTwo.name, playerOne.name), end="") if successTwo is True: print(" [success]") playerOne.checkWound() else: print(" [failed]") else: success = contest(playerOne) print("{} attacks {}".format(playerTwo.name, playerOne.name), end="") if success is True: print(" [success]") playerOne.checkWound() else: print(" [failed]") successTwo = contest(playerTwo) print("{} attacks {}".format(playerOne.name, playerTwo.name), end="") if successTwo is True: print(" [success]") playerTwo.checkWound() else: print(" [failed]") print("[round over]") time.sleep(.5) if DEBUG_MODE is not True: remove_lines(10) def spawn(name): """allows for human corpse overwriting""" bloo = human(name) return bloo def main(): """ main arena """ builtins.print = delayed_print print("welcome to fight simulator") print("enter names of combatants") combatantOne = input("combatant one\n>> ") combatantTwo = input("combatant two\n>> ") remove_lines(6) combatantOne = spawn(combatantOne) combatantTwo = spawn(combatantTwo) while True: combatantOne.advantage = random.randint(25, 100) combatantTwo.advantage = random.randint(25, 100) result = combat(combatantOne, combatantTwo) if result == 1: combatantOne = spawn(input("new combatant\n>> ")) combatantTwo.hp = 10 elif result == 2: combatantTwo = spawn(input("new combatant\n>> ")) combatantOne.hp = 10 if __name__ == "__main__": main()
25.75576
102
0.55985
""" this program kills humans """ import sys import time import random import builtins import termcolor PRINT_SPEED = 0.005 DEBUG_MODE = False class human: """ a human exists... to die """ def __init__(self, name): self.hp = 10 self.incombat = False self.name = name self.advantage = getAdvantage() def checkWound(self): """ checks if a human is wounded """ dmg = random.randint(1, 4) print("{} is hit".format(self.name), end="") time.sleep(.2) if random.randint(0, 100) > self.advantage: print(" [not wounded]", None, "green") else: if self.hp != 0: print(" [wounded][hp = {}]".format(self.hp - dmg), None, "red") elif self.hp <= 0: print(" [dead]") self.hp -= dmg def remove_lines(amount): """ deletes lines printed previously """ cursor_up = "\x1b[1A" erase = "\x1b[2K" for _ in range(amount): sys.stdout.write(cursor_up) sys.stdout.write(erase) def delayed_print(text, end=None, color=None): """ prints characters with delay provided between each character. color for prompt is optional """ text = str(text) if end != "": text += "\n" for char in text: if color: sys.stdout.write(termcolor.colored(char, color)) else: sys.stdout.write(char) sys.stdout.flush() time.sleep(PRINT_SPEED) time.sleep(PRINT_SPEED * 5) def contest(other): """contests human ability""" return random.randint(1, 20) > round(other.advantage / 5) def getAdvantage(): """shut is advantage""" return random.randint(0, 100) def hpBar(hp): """ returns an hp bar """ ber = "" for _ in range(hp): ber += "#" for _ in range(10 - hp): ber += " " return "|" + ber + "|" def getColor(hp): """ allows for specifically colored hp bars """ if hp < 4: color = "red" if hp >= 4 and hp <= 7: color = "yellow" if hp > 7: color = "green" return color def combat(playerOne, playerTwo): """full combat""" rounds = 0 if playerOne.name == playerTwo.name: print("{} commits suicide".format(playerOne.name)) return 1 while playerOne.hp != 0 and playerTwo.hp != 0: combatRound(playerOne, playerTwo) rounds += 1 if rounds % 10 == 0 and rounds != 0: print("We are taking a break!") print("{} hp:{}".format(playerOne.name, playerOne.hp)) print("{} hp:{}".format(playerTwo.name, playerTwo.hp)) time.sleep(3) remove_lines(3) if playerOne.hp <= 0: print("{} is dead, {} is is the champion".format(playerOne.name, playerTwo.name)) print("combat took {} rounds".format(rounds)) return 1 else: print("{} is dead, {} is is the champion".format(playerTwo.name, playerOne.name)) print("combat took {} rounds".format(rounds)) return 2 def combatRound(playerOne, playerTwo): """single rounds of human to human combat""" hpOne = hpBar(playerOne.hp) hpTwo = hpBar(playerTwo.hp) colorOne = getColor(playerOne.hp) colorTwo = getColor(playerTwo.hp) print(playerOne.name) print("{}".format(hpOne), None, colorOne) print(playerTwo.name) print("{}".format(hpTwo), None, colorTwo) if random.randint(0, 1) == 1: success = contest(playerTwo) print("{} attacks {}".format(playerOne.name, playerTwo.name), end="") if success is True: print(" [success]") playerTwo.checkWound() else: print(" [failed]") successTwo = contest(playerOne) print("{} attacks {}".format(playerTwo.name, playerOne.name), end="") if successTwo is True: print(" [success]") playerOne.checkWound() else: print(" [failed]") else: success = contest(playerOne) print("{} attacks {}".format(playerTwo.name, playerOne.name), end="") if success is True: print(" [success]") playerOne.checkWound() else: print(" [failed]") successTwo = contest(playerTwo) print("{} attacks {}".format(playerOne.name, playerTwo.name), end="") if successTwo is True: print(" [success]") playerTwo.checkWound() else: print(" [failed]") print("[round over]") time.sleep(.5) if DEBUG_MODE is not True: remove_lines(10) def spawn(name): """allows for human corpse overwriting""" bloo = human(name) return bloo def main(): """ main arena """ builtins.print = delayed_print print("welcome to fight simulator") print("enter names of combatants") combatantOne = input("combatant one\n>> ") combatantTwo = input("combatant two\n>> ") remove_lines(6) combatantOne = spawn(combatantOne) combatantTwo = spawn(combatantTwo) while True: combatantOne.advantage = random.randint(25, 100) combatantTwo.advantage = random.randint(25, 100) result = combat(combatantOne, combatantTwo) if result == 1: combatantOne = spawn(input("new combatant\n>> ")) combatantTwo.hp = 10 elif result == 2: combatantTwo = spawn(input("new combatant\n>> ")) combatantOne.hp = 10 if __name__ == "__main__": main()
120
0
26
1efe8318e95b52ae257743852a598257ff3b467e
1,451
py
Python
toolkit/src/NTIVirtualKeyBoard.py
FoxyRabbit67/enigma2-plugins
f6b94012726931fdf28e80a26226aec612b350de
[ "Linux-OpenIB" ]
41
2016-01-21T17:54:44.000Z
2021-06-26T05:54:41.000Z
toolkit/src/NTIVirtualKeyBoard.py
FoxyRabbit67/enigma2-plugins
f6b94012726931fdf28e80a26226aec612b350de
[ "Linux-OpenIB" ]
22
2016-11-16T11:25:26.000Z
2021-12-13T09:13:06.000Z
toolkit/src/NTIVirtualKeyBoard.py
FoxyRabbit67/enigma2-plugins
f6b94012726931fdf28e80a26226aec612b350de
[ "Linux-OpenIB" ]
62
2016-02-05T22:55:48.000Z
2022-03-12T21:48:22.000Z
from Tools.NumericalTextInput import NumericalTextInput from Screens.VirtualKeyBoard import VirtualKeyBoard from Components.ActionMap import NumberActionMap
27.377358
69
0.72295
from Tools.NumericalTextInput import NumericalTextInput from Screens.VirtualKeyBoard import VirtualKeyBoard from Components.ActionMap import NumberActionMap class NTIVirtualKeyBoard(VirtualKeyBoard, NumericalTextInput): def __init__(self, session, **kwargs): VirtualKeyBoard.__init__(self, session, **kwargs) NumericalTextInput.__init__(self, nextFunc = self.nextFunc) self.skinName = "VirtualKeyBoard" self["NumberActions"] = NumberActionMap(["NumberActions"], { "1": self.keyNumberGlobal, "2": self.keyNumberGlobal, "3": self.keyNumberGlobal, "4": self.keyNumberGlobal, "5": self.keyNumberGlobal, "6": self.keyNumberGlobal, "7": self.keyNumberGlobal, "8": self.keyNumberGlobal, "9": self.keyNumberGlobal, "0": self.keyNumberGlobal }) self.editing = False def backClicked(self): self.nextKey() self.editing = False self["text"].setMarkedPos(-1) VirtualKeyBoard.backClicked(self) def okClicked(self): self.nextKey() self.editing = False self["text"].setMarkedPos(-1) VirtualKeyBoard.okClicked(self) def keyNumberGlobal(self, number): unichar = self.getKey(number) if not self.editing: self.text = self["text"].getText() self.editing = True self["text"].setMarkedPos(len(self.text)) self["text"].setText(self.text + unichar.encode('utf-8', 'ignore')) def nextFunc(self): self.text = self["text"].getText() self.editing = False self["text"].setMarkedPos(-1)
1,110
41
142
ae1e992f116581312c1a1a97506a08f6cff69711
6,426
py
Python
validation/GCMC/TIP3_DIOX/build/create_solvent_boxes.py
GregorySchwing/wolfCalibration
9ff7ca7f0d144da407c14f0f4e9a202c4691de2d
[ "MIT" ]
null
null
null
validation/GCMC/TIP3_DIOX/build/create_solvent_boxes.py
GregorySchwing/wolfCalibration
9ff7ca7f0d144da407c14f0f4e9a202c4691de2d
[ "MIT" ]
null
null
null
validation/GCMC/TIP3_DIOX/build/create_solvent_boxes.py
GregorySchwing/wolfCalibration
9ff7ca7f0d144da407c14f0f4e9a202c4691de2d
[ "MIT" ]
null
null
null
from Bio.PDB.PDBParser import PDBParser from Bio.PDB.PDBIO import PDBIO import numpy as np import shutil f = open("log.txt", "a") parser = PDBParser(PERMISSIVE=1) structure_id = "3rgk" filename = "../1-1-build/MYO_HEME_MUT.pdb" structure = parser.get_structure(structure_id, filename) atoms = structure.get_atoms() listOfCoords = [] for atom in atoms: coords = atom.get_coord() listOfCoords.append(coords) coorNP = np.asarray(listOfCoords) geoCenter = coorNP.mean(axis=0) log = "Geometric Center: {}\n".format(geoCenter) f.write(log) # calculating Euclidean distance # using linalg.norm() maxDistMan = 0 maxDistL2 = 0 for atom in coorNP: #manDist = np.sqrt((atom[0] - geoCenter[0])**2 + (atom[1] - geoCenter[1])**2 + (atom[2] - geoCenter[2])**2) dist = np.linalg.norm(geoCenter - atom) #print ("manDist {} dist {}".format(manDist,dist)) #print(dist) if (dist > maxDistL2): maxDistL2 = dist #if (manDist > maxDistMan): # maxDistMan = manDist log = "maxDistL2 {}\n".format(maxDistL2) f.write(log) # 2 times the Max internal distance of protein atoms + 2 angstroms on each side # The padding is in case the radius of gyration of the protein increases. # Currently the maximum allowed increase in radius of gyration is 2 angstroms. # This is likely a highly liberal amount for a globular protein at 310 K in minimal Na/Cl. maxDistL2_padded = maxDistL2+20 log = "maxDistL2_padded {}\n".format(maxDistL2_padded) f.write(log) shutil.copyfile("../1-1-build/MYO_HEME.psf", "MYO_HEME_SHIFTED.psf") import mbuild as mb import numpy as np from foyer import Forcefield import mbuild.formats.charmm_writer as mf_charmm import mbuild.formats.gomc_conf_writer as gomc_control FF_file_O2 = './FFs/charmmD_molecular_oxygen.xml' O2 = mb.load('./FFs/DIOX.mol2') O2.name = 'DIOX' #O2.energy_minimize(forcefield=FF_file_O2, steps=10**5) FF_file_water = './FFs/charmm_tip3p.xml' water = mb.load('O', smiles=True) water.name = 'TIP3' #water.energy_minimize(forcefield=FF_file_water, steps=10**5) FF_dict = {water.name: FF_file_water, O2.name: FF_file_O2} residues_list = [water.name, O2.name] fix_bonds_angles_residues = [water.name, O2.name] bead_to_atom_name_dict = { '_ON':'ON', '_OP':'OP'} # Build the main simulation liquid box (box 0) and the vapor (box 1) for the simulation [1, 2, 13-17] water_O2_box_liq = mb.fill_box(compound=[water,O2], density= 950, compound_ratio=[0.98, 0.02] , box=[2*maxDistL2_padded/10, 2*maxDistL2_padded/10, 2*maxDistL2_padded/10]) geoCenterBox = water_O2_box_liq.center log = "BOX CENTER : {}\n".format(geoCenterBox*10) f.write(log) trueCenter = [maxDistL2_padded/10, maxDistL2_padded/10, maxDistL2_padded/10] log = "DESIRED BOX CENTER : {}\n".format(trueCenter*10) f.write(log) translationVectorBox = trueCenter-geoCenterBox log = "BOX TRANSLATION VECTOR : {}\n".format(translationVectorBox*10) f.write(log) water_O2_box_liq.translate(translationVectorBox) geoCenterBoxPostTranslate = water_O2_box_liq.center log = "BOX CENTER POST TRANSLATE : {}\n".format(geoCenterBoxPostTranslate*10) f.write(log) water_O2_box_res = mb.fill_box(compound=[water,O2], density= 950, compound_ratio=[0.80 0.20] , box=[9, 9, 9]) charmmNAMD = mf_charmm.Charmm(water_O2_box_liq, 'GCMC_water_O2_liq_NAMD', structure_box_1=water_O2_box_res, filename_box_1='GCMC_water_O2_res_NAMD', ff_filename="GCMC_water_O2_FF_NAMD", forcefield_selection=FF_dict, residues=residues_list, bead_to_atom_name_dict=bead_to_atom_name_dict, fix_residue=None, gomc_fix_bonds_angles=None, reorder_res_in_pdb_psf=True ) charmm = mf_charmm.Charmm(water_O2_box_liq, 'GCMC_water_O2_liq', structure_box_1=water_O2_box_res, filename_box_1='GCMC_water_O2_res', ff_filename="GCMC_water_O2_FF", forcefield_selection=FF_dict, residues=residues_list, bead_to_atom_name_dict=bead_to_atom_name_dict, fix_residue=None, gomc_fix_bonds_angles=fix_bonds_angles_residues, reorder_res_in_pdb_psf=True ) charmm.write_inp() charmm.write_psf() charmm.write_pdb() charmmNAMD.write_inp() gomc_control.write_gomc_control_file(charmm, 'in_GCMC_NVT.conf', 'GCMC', 100, 310, input_variables_dict={"VDWGeometricSigma": True, "Rcut": 12, "DisFreq": 0.00, "RotFreq": 0.00, "IntraSwapFreq": 0.00, "SwapFreq": 1.00, "RegrowthFreq": 0.00, "CrankShaftFreq": 0.00, "VolFreq": 0.00, "MultiParticleFreq": 0.00, "ChemPot" : {"TIP3" : -4166, "DIOX" : -8000} } ) f.write('Completed: GOMC FF file, and the psf and pdb files') log = "PROTEIN GEOMETRIC CENTER: {}\n".format(geoCenter) f.write(log) log = "BOX GEOMETRIC CENTER: {}\n".format(geoCenterBoxPostTranslate*10) f.write(log) translationArrayProt = np.abs(geoCenterBoxPostTranslate*10 - geoCenter) log = "PROTEIN TRANSLATION VECTOR : {}\n".format(translationArrayProt) f.write(log) atoms = structure.get_atoms() for atom in atoms: newCoords = atom.get_coord()+translationArrayProt atom.set_coord(newCoords) io = PDBIO() io.set_structure(structure) io.save("MYO_HEME_MUT_SHIFTED.pdb")
36.931034
110
0.583411
from Bio.PDB.PDBParser import PDBParser from Bio.PDB.PDBIO import PDBIO import numpy as np import shutil f = open("log.txt", "a") parser = PDBParser(PERMISSIVE=1) structure_id = "3rgk" filename = "../1-1-build/MYO_HEME_MUT.pdb" structure = parser.get_structure(structure_id, filename) atoms = structure.get_atoms() listOfCoords = [] for atom in atoms: coords = atom.get_coord() listOfCoords.append(coords) coorNP = np.asarray(listOfCoords) geoCenter = coorNP.mean(axis=0) log = "Geometric Center: {}\n".format(geoCenter) f.write(log) # calculating Euclidean distance # using linalg.norm() maxDistMan = 0 maxDistL2 = 0 for atom in coorNP: #manDist = np.sqrt((atom[0] - geoCenter[0])**2 + (atom[1] - geoCenter[1])**2 + (atom[2] - geoCenter[2])**2) dist = np.linalg.norm(geoCenter - atom) #print ("manDist {} dist {}".format(manDist,dist)) #print(dist) if (dist > maxDistL2): maxDistL2 = dist #if (manDist > maxDistMan): # maxDistMan = manDist log = "maxDistL2 {}\n".format(maxDistL2) f.write(log) # 2 times the Max internal distance of protein atoms + 2 angstroms on each side # The padding is in case the radius of gyration of the protein increases. # Currently the maximum allowed increase in radius of gyration is 2 angstroms. # This is likely a highly liberal amount for a globular protein at 310 K in minimal Na/Cl. maxDistL2_padded = maxDistL2+20 log = "maxDistL2_padded {}\n".format(maxDistL2_padded) f.write(log) shutil.copyfile("../1-1-build/MYO_HEME.psf", "MYO_HEME_SHIFTED.psf") import mbuild as mb import numpy as np from foyer import Forcefield import mbuild.formats.charmm_writer as mf_charmm import mbuild.formats.gomc_conf_writer as gomc_control FF_file_O2 = './FFs/charmmD_molecular_oxygen.xml' O2 = mb.load('./FFs/DIOX.mol2') O2.name = 'DIOX' #O2.energy_minimize(forcefield=FF_file_O2, steps=10**5) FF_file_water = './FFs/charmm_tip3p.xml' water = mb.load('O', smiles=True) water.name = 'TIP3' #water.energy_minimize(forcefield=FF_file_water, steps=10**5) FF_dict = {water.name: FF_file_water, O2.name: FF_file_O2} residues_list = [water.name, O2.name] fix_bonds_angles_residues = [water.name, O2.name] bead_to_atom_name_dict = { '_ON':'ON', '_OP':'OP'} # Build the main simulation liquid box (box 0) and the vapor (box 1) for the simulation [1, 2, 13-17] water_O2_box_liq = mb.fill_box(compound=[water,O2], density= 950, compound_ratio=[0.98, 0.02] , box=[2*maxDistL2_padded/10, 2*maxDistL2_padded/10, 2*maxDistL2_padded/10]) geoCenterBox = water_O2_box_liq.center log = "BOX CENTER : {}\n".format(geoCenterBox*10) f.write(log) trueCenter = [maxDistL2_padded/10, maxDistL2_padded/10, maxDistL2_padded/10] log = "DESIRED BOX CENTER : {}\n".format(trueCenter*10) f.write(log) translationVectorBox = trueCenter-geoCenterBox log = "BOX TRANSLATION VECTOR : {}\n".format(translationVectorBox*10) f.write(log) water_O2_box_liq.translate(translationVectorBox) geoCenterBoxPostTranslate = water_O2_box_liq.center log = "BOX CENTER POST TRANSLATE : {}\n".format(geoCenterBoxPostTranslate*10) f.write(log) water_O2_box_res = mb.fill_box(compound=[water,O2], density= 950, compound_ratio=[0.80 0.20] , box=[9, 9, 9]) charmmNAMD = mf_charmm.Charmm(water_O2_box_liq, 'GCMC_water_O2_liq_NAMD', structure_box_1=water_O2_box_res, filename_box_1='GCMC_water_O2_res_NAMD', ff_filename="GCMC_water_O2_FF_NAMD", forcefield_selection=FF_dict, residues=residues_list, bead_to_atom_name_dict=bead_to_atom_name_dict, fix_residue=None, gomc_fix_bonds_angles=None, reorder_res_in_pdb_psf=True ) charmm = mf_charmm.Charmm(water_O2_box_liq, 'GCMC_water_O2_liq', structure_box_1=water_O2_box_res, filename_box_1='GCMC_water_O2_res', ff_filename="GCMC_water_O2_FF", forcefield_selection=FF_dict, residues=residues_list, bead_to_atom_name_dict=bead_to_atom_name_dict, fix_residue=None, gomc_fix_bonds_angles=fix_bonds_angles_residues, reorder_res_in_pdb_psf=True ) charmm.write_inp() charmm.write_psf() charmm.write_pdb() charmmNAMD.write_inp() gomc_control.write_gomc_control_file(charmm, 'in_GCMC_NVT.conf', 'GCMC', 100, 310, input_variables_dict={"VDWGeometricSigma": True, "Rcut": 12, "DisFreq": 0.00, "RotFreq": 0.00, "IntraSwapFreq": 0.00, "SwapFreq": 1.00, "RegrowthFreq": 0.00, "CrankShaftFreq": 0.00, "VolFreq": 0.00, "MultiParticleFreq": 0.00, "ChemPot" : {"TIP3" : -4166, "DIOX" : -8000} } ) f.write('Completed: GOMC FF file, and the psf and pdb files') log = "PROTEIN GEOMETRIC CENTER: {}\n".format(geoCenter) f.write(log) log = "BOX GEOMETRIC CENTER: {}\n".format(geoCenterBoxPostTranslate*10) f.write(log) translationArrayProt = np.abs(geoCenterBoxPostTranslate*10 - geoCenter) log = "PROTEIN TRANSLATION VECTOR : {}\n".format(translationArrayProt) f.write(log) atoms = structure.get_atoms() for atom in atoms: newCoords = atom.get_coord()+translationArrayProt atom.set_coord(newCoords) io = PDBIO() io.set_structure(structure) io.save("MYO_HEME_MUT_SHIFTED.pdb")
0
0
0
0276c7a19b751d8212996f1a3edd5965b18f8e95
9,764
py
Python
src/SALib/util/__init__.py
QianWanghhu/SALib
95a3371e503f9253cb917b8f0101c0202b969c2b
[ "MIT" ]
1
2019-12-20T00:32:45.000Z
2019-12-20T00:32:45.000Z
src/SALib/util/__init__.py
QianWanghhu/SALib
95a3371e503f9253cb917b8f0101c0202b969c2b
[ "MIT" ]
null
null
null
src/SALib/util/__init__.py
QianWanghhu/SALib
95a3371e503f9253cb917b8f0101c0202b969c2b
[ "MIT" ]
null
null
null
"""A set of utility functions """ from collections import OrderedDict import pkgutil from typing import Dict, Tuple import numpy as np # type: ignore import scipy as sp # type: ignore from scipy import stats from typing import List from .util_funcs import (avail_approaches, read_param_file, _check_bounds, _check_groups) from .problem import ProblemSpec from .results import ResultDict __all__ = ["scale_samples", "read_param_file", "ResultDict", "avail_approaches"] def _scale_samples(params: np.ndarray, bounds: List): """Rescale samples in 0-to-1 range to arbitrary bounds Parameters ---------- params : numpy.ndarray numpy array of dimensions `num_params`-by-:math:`N`, where :math:`N` is the number of samples bounds : list list of lists of dimensions `num_params`-by-2 """ # Check bounds are legal (upper bound is greater than lower bound) lower_bounds, upper_bounds = _check_bounds(bounds) # This scales the samples in-place, by using the optional output # argument for the numpy ufunctions # The calculation is equivalent to: # sample * (upper_bound - lower_bound) + lower_bound np.add(np.multiply(params, (upper_bounds - lower_bounds), out=params), lower_bounds, out=params) def scale_samples(params: np.ndarray, problem: Dict): """Scale samples based on specified distribution (defaulting to uniform). Adds an entry to the problem specification to indicate samples have been scaled to maintain backwards compatibility (`sample_scaled`). Parameters ---------- params : np.ndarray, numpy array of dimensions `num_params`-by-:math:`N`, where :math:`N` is the number of samples problem : dictionary, SALib problem specification Returns ---------- np.ndarray, scaled samples """ bounds = problem['bounds'] dists = problem.get('dists') if dists is None: _scale_samples(params, bounds) else: if params.shape[1] != len(dists): msg = "Mismatch in number of parameters and distributions.\n" msg += "Num parameters: {}".format(params.shape[1]) msg += "Num distributions: {}".format(len(dists)) raise ValueError(msg) params = _nonuniform_scale_samples( params, bounds, dists) problem['sample_scaled'] = True return params # limited_params = limit_samples(params, upper_bound, lower_bound, dists) def _unscale_samples(params, bounds): """Rescale samples from arbitrary bounds back to [0,1] range Parameters ---------- bounds : list list of lists of dimensions num_params-by-2 params : numpy.ndarray numpy array of dimensions num_params-by-N, where N is the number of samples """ # Check bounds are legal (upper bound is greater than lower bound) b = np.array(bounds) lower_bounds = b[:, 0] upper_bounds = b[:, 1] if np.any(lower_bounds >= upper_bounds): raise ValueError("Bounds are not legal") # This scales the samples in-place, by using the optional output # argument for the numpy ufunctions # The calculation is equivalent to: # (sample - lower_bound) / (upper_bound - lower_bound) np.divide(np.subtract(params, lower_bounds, out=params), np.subtract(upper_bounds, lower_bounds), out=params) def _nonuniform_scale_samples(params, bounds, dists): """Rescale samples in 0-to-1 range to other distributions Parameters ---------- problem : dict problem definition including bounds params : numpy.ndarray numpy array of dimensions num_params-by-N, where N is the number of samples dists : list list of distributions, one for each parameter unif: uniform with lower and upper bounds triang: triangular with width (scale) and location of peak location of peak is in percentage of width lower bound assumed to be zero norm: normal distribution with mean and standard deviation truncnorm: truncated normal distribution with upper and lower bounds, mean and standard deviation lognorm: lognormal with ln-space mean and standard deviation """ b = np.array(bounds) # initializing matrix for converted values conv_params = np.empty_like(params) # loop over the parameters for i in range(conv_params.shape[1]): # setting first and second arguments for distributions b1 = b[i][0] b2 = b[i][1] if dists[i] == 'triang': # checking for correct parameters if b1 <= 0 or b2 <= 0 or b2 >= 1: raise ValueError("""Triangular distribution: Scale must be greater than zero; peak on interval [0,1]""") else: conv_params[:, i] = sp.stats.triang.ppf( params[:, i], c=b2, scale=b1, loc=0) elif dists[i] == 'unif': if b1 >= b2: raise ValueError("""Uniform distribution: lower bound must be less than upper bound""") else: conv_params[:, i] = params[:, i] * (b2 - b1) + b1 elif dists[i] == 'norm': if b2 <= 0: raise ValueError("""Normal distribution: stdev must be > 0""") else: conv_params[:, i] = sp.stats.norm.ppf( params[:, i], loc=b1, scale=b2) # Truncated normal distribution # parameters are lower bound and upper bound, mean and stdev elif dists[i] == 'truncnorm': b3 = b[i][2] b4 = b[i][3] if b4 <= 0: raise ValueError( """Truncated normal distribution: stdev must be > 0""" ) if b1 >= b2: raise ValueError( """Truncated normal distribution: lower bound must be less than upper bound""" ) else: conv_params[:, i] = sp.stats.truncnorm.ppf( params[:, i], (b1 - b3) / b4, (b2 - b3) / b4, loc=b3, scale=b4 ) # lognormal distribution (ln-space, not base-10) # paramters are ln-space mean and standard deviation elif dists[i] == 'lognorm': # checking for valid parameters if b2 <= 0: raise ValueError( """Lognormal distribution: stdev must be > 0""") else: conv_params[:, i] = np.exp( sp.stats.norm.ppf(params[:, i], loc=b1, scale=b2)) else: valid_dists = ['unif', 'triang', 'norm', 'truncnorm', 'lognorm'] raise ValueError('Distributions: choose one of %s' % ", ".join(valid_dists)) return conv_params def extract_group_names(groups: List) -> Tuple: """Get a unique set of the group names. Reverts to parameter names (and number of parameters) if groups not defined. Parameters ---------- groups : List Returns ------- tuple : names, number of groups """ names = list(OrderedDict.fromkeys(groups)) number = len(names) return names, number def compute_groups_matrix(groups: List): """Generate matrix which notes factor membership of groups Computes a k-by-g matrix which notes factor membership of groups where: k is the number of variables (factors) g is the number of groups Also returns a g-length list of unique group_names whose positions correspond to the order of groups in the k-by-g matrix Parameters ---------- groups : List Group names corresponding to each variable Returns ------- tuple containing group matrix assigning parameters to groups and a list of unique group names """ num_vars = len(groups) unique_group_names, number_of_groups = extract_group_names(groups) indices = dict([(x, i) for (i, x) in enumerate(unique_group_names)]) output = np.zeros((num_vars, number_of_groups), dtype=np.int) for parameter_row, group_membership in enumerate(groups): group_index = indices[group_membership] output[parameter_row, group_index] = 1 return output, unique_group_names def _define_problem_with_groups(problem: Dict) -> Dict: """ Checks if the user defined the 'groups' key in the problem dictionary. If not, makes the 'groups' key equal to the variables names. In other words, the number of groups will be equal to the number of variables, which is equivalent to no groups. Parameters ---------- problem : dict The problem definition Returns ------- problem : dict The problem definition with the 'groups' key, even if the user doesn't define it """ # Checks if there isn't a key 'groups' or if it exists and is set to 'None' if 'groups' not in problem or not problem['groups']: problem['groups'] = problem['names'] elif len(problem['groups']) != problem['num_vars']: raise ValueError("Number of entries in \'groups\' should be the same " "as in \'names\'") return problem def _compute_delta(num_levels: int) -> float: """Computes the delta value from number of levels Parameters --------- num_levels : int The number of levels Returns ------- float """ return num_levels / (2.0 * (num_levels - 1))
32.013115
89
0.600676
"""A set of utility functions """ from collections import OrderedDict import pkgutil from typing import Dict, Tuple import numpy as np # type: ignore import scipy as sp # type: ignore from scipy import stats from typing import List from .util_funcs import (avail_approaches, read_param_file, _check_bounds, _check_groups) from .problem import ProblemSpec from .results import ResultDict __all__ = ["scale_samples", "read_param_file", "ResultDict", "avail_approaches"] def _scale_samples(params: np.ndarray, bounds: List): """Rescale samples in 0-to-1 range to arbitrary bounds Parameters ---------- params : numpy.ndarray numpy array of dimensions `num_params`-by-:math:`N`, where :math:`N` is the number of samples bounds : list list of lists of dimensions `num_params`-by-2 """ # Check bounds are legal (upper bound is greater than lower bound) lower_bounds, upper_bounds = _check_bounds(bounds) # This scales the samples in-place, by using the optional output # argument for the numpy ufunctions # The calculation is equivalent to: # sample * (upper_bound - lower_bound) + lower_bound np.add(np.multiply(params, (upper_bounds - lower_bounds), out=params), lower_bounds, out=params) def scale_samples(params: np.ndarray, problem: Dict): """Scale samples based on specified distribution (defaulting to uniform). Adds an entry to the problem specification to indicate samples have been scaled to maintain backwards compatibility (`sample_scaled`). Parameters ---------- params : np.ndarray, numpy array of dimensions `num_params`-by-:math:`N`, where :math:`N` is the number of samples problem : dictionary, SALib problem specification Returns ---------- np.ndarray, scaled samples """ bounds = problem['bounds'] dists = problem.get('dists') if dists is None: _scale_samples(params, bounds) else: if params.shape[1] != len(dists): msg = "Mismatch in number of parameters and distributions.\n" msg += "Num parameters: {}".format(params.shape[1]) msg += "Num distributions: {}".format(len(dists)) raise ValueError(msg) params = _nonuniform_scale_samples( params, bounds, dists) problem['sample_scaled'] = True return params # limited_params = limit_samples(params, upper_bound, lower_bound, dists) def _unscale_samples(params, bounds): """Rescale samples from arbitrary bounds back to [0,1] range Parameters ---------- bounds : list list of lists of dimensions num_params-by-2 params : numpy.ndarray numpy array of dimensions num_params-by-N, where N is the number of samples """ # Check bounds are legal (upper bound is greater than lower bound) b = np.array(bounds) lower_bounds = b[:, 0] upper_bounds = b[:, 1] if np.any(lower_bounds >= upper_bounds): raise ValueError("Bounds are not legal") # This scales the samples in-place, by using the optional output # argument for the numpy ufunctions # The calculation is equivalent to: # (sample - lower_bound) / (upper_bound - lower_bound) np.divide(np.subtract(params, lower_bounds, out=params), np.subtract(upper_bounds, lower_bounds), out=params) def _nonuniform_scale_samples(params, bounds, dists): """Rescale samples in 0-to-1 range to other distributions Parameters ---------- problem : dict problem definition including bounds params : numpy.ndarray numpy array of dimensions num_params-by-N, where N is the number of samples dists : list list of distributions, one for each parameter unif: uniform with lower and upper bounds triang: triangular with width (scale) and location of peak location of peak is in percentage of width lower bound assumed to be zero norm: normal distribution with mean and standard deviation truncnorm: truncated normal distribution with upper and lower bounds, mean and standard deviation lognorm: lognormal with ln-space mean and standard deviation """ b = np.array(bounds) # initializing matrix for converted values conv_params = np.empty_like(params) # loop over the parameters for i in range(conv_params.shape[1]): # setting first and second arguments for distributions b1 = b[i][0] b2 = b[i][1] if dists[i] == 'triang': # checking for correct parameters if b1 <= 0 or b2 <= 0 or b2 >= 1: raise ValueError("""Triangular distribution: Scale must be greater than zero; peak on interval [0,1]""") else: conv_params[:, i] = sp.stats.triang.ppf( params[:, i], c=b2, scale=b1, loc=0) elif dists[i] == 'unif': if b1 >= b2: raise ValueError("""Uniform distribution: lower bound must be less than upper bound""") else: conv_params[:, i] = params[:, i] * (b2 - b1) + b1 elif dists[i] == 'norm': if b2 <= 0: raise ValueError("""Normal distribution: stdev must be > 0""") else: conv_params[:, i] = sp.stats.norm.ppf( params[:, i], loc=b1, scale=b2) # Truncated normal distribution # parameters are lower bound and upper bound, mean and stdev elif dists[i] == 'truncnorm': b3 = b[i][2] b4 = b[i][3] if b4 <= 0: raise ValueError( """Truncated normal distribution: stdev must be > 0""" ) if b1 >= b2: raise ValueError( """Truncated normal distribution: lower bound must be less than upper bound""" ) else: conv_params[:, i] = sp.stats.truncnorm.ppf( params[:, i], (b1 - b3) / b4, (b2 - b3) / b4, loc=b3, scale=b4 ) # lognormal distribution (ln-space, not base-10) # paramters are ln-space mean and standard deviation elif dists[i] == 'lognorm': # checking for valid parameters if b2 <= 0: raise ValueError( """Lognormal distribution: stdev must be > 0""") else: conv_params[:, i] = np.exp( sp.stats.norm.ppf(params[:, i], loc=b1, scale=b2)) else: valid_dists = ['unif', 'triang', 'norm', 'truncnorm', 'lognorm'] raise ValueError('Distributions: choose one of %s' % ", ".join(valid_dists)) return conv_params def extract_group_names(groups: List) -> Tuple: """Get a unique set of the group names. Reverts to parameter names (and number of parameters) if groups not defined. Parameters ---------- groups : List Returns ------- tuple : names, number of groups """ names = list(OrderedDict.fromkeys(groups)) number = len(names) return names, number def compute_groups_matrix(groups: List): """Generate matrix which notes factor membership of groups Computes a k-by-g matrix which notes factor membership of groups where: k is the number of variables (factors) g is the number of groups Also returns a g-length list of unique group_names whose positions correspond to the order of groups in the k-by-g matrix Parameters ---------- groups : List Group names corresponding to each variable Returns ------- tuple containing group matrix assigning parameters to groups and a list of unique group names """ num_vars = len(groups) unique_group_names, number_of_groups = extract_group_names(groups) indices = dict([(x, i) for (i, x) in enumerate(unique_group_names)]) output = np.zeros((num_vars, number_of_groups), dtype=np.int) for parameter_row, group_membership in enumerate(groups): group_index = indices[group_membership] output[parameter_row, group_index] = 1 return output, unique_group_names def _define_problem_with_groups(problem: Dict) -> Dict: """ Checks if the user defined the 'groups' key in the problem dictionary. If not, makes the 'groups' key equal to the variables names. In other words, the number of groups will be equal to the number of variables, which is equivalent to no groups. Parameters ---------- problem : dict The problem definition Returns ------- problem : dict The problem definition with the 'groups' key, even if the user doesn't define it """ # Checks if there isn't a key 'groups' or if it exists and is set to 'None' if 'groups' not in problem or not problem['groups']: problem['groups'] = problem['names'] elif len(problem['groups']) != problem['num_vars']: raise ValueError("Number of entries in \'groups\' should be the same " "as in \'names\'") return problem def _compute_delta(num_levels: int) -> float: """Computes the delta value from number of levels Parameters --------- num_levels : int The number of levels Returns ------- float """ return num_levels / (2.0 * (num_levels - 1))
0
0
0
e9c9dd925406982a79befda5862e8bff57412e0d
2,585
py
Python
search.py
mizuff/gbf-rapid-search
3d6ab8cdba5b4f3a4e9fe956056fe3f7d0bc0a7d
[ "0BSD" ]
5
2018-03-06T02:23:21.000Z
2019-04-25T06:38:05.000Z
search.py
mizuff/gbf-rapid-search
3d6ab8cdba5b4f3a4e9fe956056fe3f7d0bc0a7d
[ "0BSD" ]
1
2021-01-29T04:18:14.000Z
2021-01-29T04:18:14.000Z
search.py
bookii/gbf-rapid-search
3d6ab8cdba5b4f3a4e9fe956056fe3f7d0bc0a7d
[ "0BSD" ]
4
2019-09-27T08:17:20.000Z
2021-03-14T03:43:56.000Z
# -*- coding: utf-8 -*- import json import re import subprocess import sys import time from requests_oauthlib import OAuth1Session # 取得したConsumer Key等と置き換えてください CK = 'consumer_key' CS = 'consumer_secret' AT = 'access_token' AS = 'access_token_secret' FILTER_URL = 'https://stream.twitter.com/1.1/statuses/filter.json' # 文字列から参戦IDを抽出 # stringをクリップボードにコピー if __name__ == "__main__": main()
29.712644
114
0.597292
# -*- coding: utf-8 -*- import json import re import subprocess import sys import time from requests_oauthlib import OAuth1Session # 取得したConsumer Key等と置き換えてください CK = 'consumer_key' CS = 'consumer_secret' AT = 'access_token' AS = 'access_token_secret' FILTER_URL = 'https://stream.twitter.com/1.1/statuses/filter.json' def usage(): print('Usage: python %s level name' % sys.argv[0]) print('Example: python %s 75 シュヴァリエ・マグナ' % sys.argv[0]) sys.exit() def unsupported_os(): print("Don't understand this operating system.") print("Try on Windows or Mac.") sys.exit() # 文字列から参戦IDを抽出 def parse(string): pattern = r'[0-9A-F]{8}\s:参戦ID' matchOB = re.findall(pattern, string) # 一致する文字列を全て取得 if matchOB: return matchOB[-1][0:8] # 一致する文字列のうち最後のものをreturnすることによってダミーのIDを回避 else: return None # stringをクリップボードにコピー def set_clipboard(string, os_name): if os_name == 'win32': process = subprocess.Popen('clip', stdin = subprocess.PIPE, shell=True) elif os_name == 'darwin': process = subprocess.Popen('pbcopy', stdin = subprocess.PIPE, shell=False) else: unsupported_os() process.communicate(string.encode("utf-8")) # str型をbyte型に変換 def print_tweet(tweet): tm = time.localtime() name = tweet.get('user').get('name') screen_name = tweet.get('user').get('screen_name') print('[%02d:%02d:%02d] %s @%s' % (tm.tm_hour, tm.tm_min, tm.tm_sec, name, screen_name)) print(tweet.get('text') + '\n') def main(): try: args = sys.argv if len(args) != 3: usage() os_name = sys.platform if os_name != 'win32' and os_name != 'darwin': # Windows / Mac unsupported_os() # OAuth oauth_session = OAuth1Session(CK, CS, AT, AS) params = {'track': 'Lv%s %s' % (args[1], args[2])} req = oauth_session.post(FILTER_URL, params=params, stream=True) for line in req.iter_lines(): line_decode = line.decode('utf-8') if line_decode != '': # if not empty tweet = json.loads(line_decode) # pass tweets via the game page if tweet.get('source') == '<a href="http://granbluefantasy.jp/" rel="nofollow">グランブルー ファンタジー</a>': raid_id = parse(tweet.get('text')) if raid_id: set_clipboard(raid_id, os_name) print_tweet(tweet) except KeyboardInterrupt: print() sys.exit() if __name__ == "__main__": main()
2,193
0
140
91ea2b2a100b4f84af908e54d6c94e5a481595ad
8,090
py
Python
pysnmp/ASCEND-MIBTRANSACTION-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
11
2021-02-02T16:27:16.000Z
2021-08-31T06:22:49.000Z
pysnmp/ASCEND-MIBTRANSACTION-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
75
2021-02-24T17:30:31.000Z
2021-12-08T00:01:18.000Z
pysnmp/ASCEND-MIBTRANSACTION-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module ASCEND-MIBTRANSACTION-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ASCEND-MIBTRANSACTION-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 17:12:43 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # configuration, = mibBuilder.importSymbols("ASCEND-MIB", "configuration") Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Counter32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, IpAddress, Gauge32, ModuleIdentity, TimeTicks, Integer32, NotificationType, Bits, iso, ObjectIdentity, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "IpAddress", "Gauge32", "ModuleIdentity", "TimeTicks", "Integer32", "NotificationType", "Bits", "iso", "ObjectIdentity", "Unsigned32") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") mibtransactionProfile = MibIdentifier((1, 3, 6, 1, 4, 1, 529, 23, 131)) mibtransactionProfileTable = MibTable((1, 3, 6, 1, 4, 1, 529, 23, 131, 1), ) if mibBuilder.loadTexts: mibtransactionProfileTable.setStatus('mandatory') mibtransactionProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1), ).setIndexNames((0, "ASCEND-MIBTRANSACTION-MIB", "transactionProfile-Index-o")) if mibBuilder.loadTexts: mibtransactionProfileEntry.setStatus('mandatory') transactionProfile_Index_o = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 1), Integer32()).setLabel("transactionProfile-Index-o").setMaxAccess("readonly") if mibBuilder.loadTexts: transactionProfile_Index_o.setStatus('mandatory') transactionProfile_SelectionTimeout = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 2), Integer32()).setLabel("transactionProfile-SelectionTimeout").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_SelectionTimeout.setStatus('mandatory') transactionProfile_DataAckTimeout = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 3), Integer32()).setLabel("transactionProfile-DataAckTimeout").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_DataAckTimeout.setStatus('mandatory') transactionProfile_KeepAliveTimeout = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 4), Integer32()).setLabel("transactionProfile-KeepAliveTimeout").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_KeepAliveTimeout.setStatus('mandatory') transactionProfile_QtpPort = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 5), Integer32()).setLabel("transactionProfile-QtpPort").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_QtpPort.setStatus('mandatory') transactionProfile_MetricMax = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 6), Integer32()).setLabel("transactionProfile-MetricMax").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_MetricMax.setStatus('mandatory') transactionProfile_NoConnAckIncrement = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 7), Integer32()).setLabel("transactionProfile-NoConnAckIncrement").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_NoConnAckIncrement.setStatus('mandatory') transactionProfile_CallRejectIncrement = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 8), Integer32()).setLabel("transactionProfile-CallRejectIncrement").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_CallRejectIncrement.setStatus('mandatory') transactionProfile_CallAckDecrement = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 9), Integer32()).setLabel("transactionProfile-CallAckDecrement").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_CallAckDecrement.setStatus('mandatory') transactionProfile_AvailableMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 10), Integer32()).setLabel("transactionProfile-AvailableMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_AvailableMetric.setStatus('mandatory') transactionProfile_PartlyCongestedMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 11), Integer32()).setLabel("transactionProfile-PartlyCongestedMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_PartlyCongestedMetric.setStatus('mandatory') transactionProfile_CongestedMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 12), Integer32()).setLabel("transactionProfile-CongestedMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_CongestedMetric.setStatus('mandatory') transactionProfile_ShutdownMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 13), Integer32()).setLabel("transactionProfile-ShutdownMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_ShutdownMetric.setStatus('mandatory') transactionProfile_NoFirstStatusMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 14), Integer32()).setLabel("transactionProfile-NoFirstStatusMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_NoFirstStatusMetric.setStatus('mandatory') transactionProfile_NoSecondStatusMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 15), Integer32()).setLabel("transactionProfile-NoSecondStatusMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_NoSecondStatusMetric.setStatus('mandatory') transactionProfile_MaxQtpPduSize = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 16), Integer32()).setLabel("transactionProfile-MaxQtpPduSize").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_MaxQtpPduSize.setStatus('mandatory') transactionProfile_Action_o = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noAction", 1), ("createProfile", 2), ("deleteProfile", 3)))).setLabel("transactionProfile-Action-o").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_Action_o.setStatus('mandatory') mibBuilder.exportSymbols("ASCEND-MIBTRANSACTION-MIB", transactionProfile_Index_o=transactionProfile_Index_o, transactionProfile_DataAckTimeout=transactionProfile_DataAckTimeout, mibtransactionProfileEntry=mibtransactionProfileEntry, transactionProfile_CallRejectIncrement=transactionProfile_CallRejectIncrement, transactionProfile_CallAckDecrement=transactionProfile_CallAckDecrement, transactionProfile_NoSecondStatusMetric=transactionProfile_NoSecondStatusMetric, transactionProfile_NoConnAckIncrement=transactionProfile_NoConnAckIncrement, transactionProfile_ShutdownMetric=transactionProfile_ShutdownMetric, transactionProfile_Action_o=transactionProfile_Action_o, mibtransactionProfileTable=mibtransactionProfileTable, transactionProfile_NoFirstStatusMetric=transactionProfile_NoFirstStatusMetric, transactionProfile_MaxQtpPduSize=transactionProfile_MaxQtpPduSize, transactionProfile_SelectionTimeout=transactionProfile_SelectionTimeout, transactionProfile_PartlyCongestedMetric=transactionProfile_PartlyCongestedMetric, transactionProfile_AvailableMetric=transactionProfile_AvailableMetric, transactionProfile_CongestedMetric=transactionProfile_CongestedMetric, mibtransactionProfile=mibtransactionProfile, DisplayString=DisplayString, transactionProfile_QtpPort=transactionProfile_QtpPort, transactionProfile_MetricMax=transactionProfile_MetricMax, transactionProfile_KeepAliveTimeout=transactionProfile_KeepAliveTimeout)
139.482759
1,428
0.813103
# # PySNMP MIB module ASCEND-MIBTRANSACTION-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ASCEND-MIBTRANSACTION-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 17:12:43 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # configuration, = mibBuilder.importSymbols("ASCEND-MIB", "configuration") Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Counter32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, IpAddress, Gauge32, ModuleIdentity, TimeTicks, Integer32, NotificationType, Bits, iso, ObjectIdentity, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "IpAddress", "Gauge32", "ModuleIdentity", "TimeTicks", "Integer32", "NotificationType", "Bits", "iso", "ObjectIdentity", "Unsigned32") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") class DisplayString(OctetString): pass mibtransactionProfile = MibIdentifier((1, 3, 6, 1, 4, 1, 529, 23, 131)) mibtransactionProfileTable = MibTable((1, 3, 6, 1, 4, 1, 529, 23, 131, 1), ) if mibBuilder.loadTexts: mibtransactionProfileTable.setStatus('mandatory') mibtransactionProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1), ).setIndexNames((0, "ASCEND-MIBTRANSACTION-MIB", "transactionProfile-Index-o")) if mibBuilder.loadTexts: mibtransactionProfileEntry.setStatus('mandatory') transactionProfile_Index_o = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 1), Integer32()).setLabel("transactionProfile-Index-o").setMaxAccess("readonly") if mibBuilder.loadTexts: transactionProfile_Index_o.setStatus('mandatory') transactionProfile_SelectionTimeout = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 2), Integer32()).setLabel("transactionProfile-SelectionTimeout").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_SelectionTimeout.setStatus('mandatory') transactionProfile_DataAckTimeout = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 3), Integer32()).setLabel("transactionProfile-DataAckTimeout").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_DataAckTimeout.setStatus('mandatory') transactionProfile_KeepAliveTimeout = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 4), Integer32()).setLabel("transactionProfile-KeepAliveTimeout").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_KeepAliveTimeout.setStatus('mandatory') transactionProfile_QtpPort = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 5), Integer32()).setLabel("transactionProfile-QtpPort").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_QtpPort.setStatus('mandatory') transactionProfile_MetricMax = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 6), Integer32()).setLabel("transactionProfile-MetricMax").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_MetricMax.setStatus('mandatory') transactionProfile_NoConnAckIncrement = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 7), Integer32()).setLabel("transactionProfile-NoConnAckIncrement").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_NoConnAckIncrement.setStatus('mandatory') transactionProfile_CallRejectIncrement = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 8), Integer32()).setLabel("transactionProfile-CallRejectIncrement").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_CallRejectIncrement.setStatus('mandatory') transactionProfile_CallAckDecrement = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 9), Integer32()).setLabel("transactionProfile-CallAckDecrement").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_CallAckDecrement.setStatus('mandatory') transactionProfile_AvailableMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 10), Integer32()).setLabel("transactionProfile-AvailableMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_AvailableMetric.setStatus('mandatory') transactionProfile_PartlyCongestedMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 11), Integer32()).setLabel("transactionProfile-PartlyCongestedMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_PartlyCongestedMetric.setStatus('mandatory') transactionProfile_CongestedMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 12), Integer32()).setLabel("transactionProfile-CongestedMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_CongestedMetric.setStatus('mandatory') transactionProfile_ShutdownMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 13), Integer32()).setLabel("transactionProfile-ShutdownMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_ShutdownMetric.setStatus('mandatory') transactionProfile_NoFirstStatusMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 14), Integer32()).setLabel("transactionProfile-NoFirstStatusMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_NoFirstStatusMetric.setStatus('mandatory') transactionProfile_NoSecondStatusMetric = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 15), Integer32()).setLabel("transactionProfile-NoSecondStatusMetric").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_NoSecondStatusMetric.setStatus('mandatory') transactionProfile_MaxQtpPduSize = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 16), Integer32()).setLabel("transactionProfile-MaxQtpPduSize").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_MaxQtpPduSize.setStatus('mandatory') transactionProfile_Action_o = MibScalar((1, 3, 6, 1, 4, 1, 529, 23, 131, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noAction", 1), ("createProfile", 2), ("deleteProfile", 3)))).setLabel("transactionProfile-Action-o").setMaxAccess("readwrite") if mibBuilder.loadTexts: transactionProfile_Action_o.setStatus('mandatory') mibBuilder.exportSymbols("ASCEND-MIBTRANSACTION-MIB", transactionProfile_Index_o=transactionProfile_Index_o, transactionProfile_DataAckTimeout=transactionProfile_DataAckTimeout, mibtransactionProfileEntry=mibtransactionProfileEntry, transactionProfile_CallRejectIncrement=transactionProfile_CallRejectIncrement, transactionProfile_CallAckDecrement=transactionProfile_CallAckDecrement, transactionProfile_NoSecondStatusMetric=transactionProfile_NoSecondStatusMetric, transactionProfile_NoConnAckIncrement=transactionProfile_NoConnAckIncrement, transactionProfile_ShutdownMetric=transactionProfile_ShutdownMetric, transactionProfile_Action_o=transactionProfile_Action_o, mibtransactionProfileTable=mibtransactionProfileTable, transactionProfile_NoFirstStatusMetric=transactionProfile_NoFirstStatusMetric, transactionProfile_MaxQtpPduSize=transactionProfile_MaxQtpPduSize, transactionProfile_SelectionTimeout=transactionProfile_SelectionTimeout, transactionProfile_PartlyCongestedMetric=transactionProfile_PartlyCongestedMetric, transactionProfile_AvailableMetric=transactionProfile_AvailableMetric, transactionProfile_CongestedMetric=transactionProfile_CongestedMetric, mibtransactionProfile=mibtransactionProfile, DisplayString=DisplayString, transactionProfile_QtpPort=transactionProfile_QtpPort, transactionProfile_MetricMax=transactionProfile_MetricMax, transactionProfile_KeepAliveTimeout=transactionProfile_KeepAliveTimeout)
0
21
22
b04416d3afeafec6d7544afdc6277f1a157d0148
5,608
py
Python
Applications/SIR/plotting.py
lcbendall/numerical_computing
565cde92525ea44c55abe933c6419c1543f9800b
[ "CC-BY-3.0" ]
null
null
null
Applications/SIR/plotting.py
lcbendall/numerical_computing
565cde92525ea44c55abe933c6419c1543f9800b
[ "CC-BY-3.0" ]
null
null
null
Applications/SIR/plotting.py
lcbendall/numerical_computing
565cde92525ea44c55abe933c6419c1543f9800b
[ "CC-BY-3.0" ]
null
null
null
#! /usr/bin/env python from __future__ import division from scipy.integrate import ode import numpy as np import matplotlib.pyplot as plt from solution import SIR #, SIRS, SIS from scikits import bvp_solver # Example() # Exercise1() # Exercise2() # Exercise2a() # Exercise2b() Exercise3() # Exercise4()
27.223301
120
0.599857
#! /usr/bin/env python from __future__ import division from scipy.integrate import ode import numpy as np import matplotlib.pyplot as plt from solution import SIR #, SIRS, SIS from scikits import bvp_solver def Example(): a, ya, b = 0., 2., 1.6 def ode_f(t,y): return np.array([-1.*y+6.+2.*t]) ode_object = ode(ode_f).set_integrator('dopri5',atol=1e-5) ode_object.set_initial_value(ya,a) dim, t = 1, np.linspace(a,b,51) Y = np.zeros((len(t),dim)) Y[0,:] = ya for j in range(1,len(t)): Y[j,:] = ode_object.integrate(t[j]) plt.plot(t,Y[:,0],'-k',linewidth=2) plt.axis([a,b,ya,8],fontsize=16.) plt.xlabel('$x$',fontsize=18.) plt.ylabel('$y$',fontsize=18.) plt.savefig('Example1.pdf') # plt.show() plt.clf() return t, Y.T[0] def Exercise1(): a, b, ya = 0., 16.,np.array([0,1,-2]) def ode_f(t,y): return np.array([y[1],y[2], -.2*(y[1] + 2.*y[0])]) example = ode(ode_f).set_integrator('dopri5',atol=1e-8) example.set_initial_value(ya,a) dim, t = 3, np.linspace(a,b,201) Y = np.zeros((len(t),dim)) Y[0,:] = ya for j in range(1,len(t)): Y[j,:] = example.integrate(t[j]) plt.plot(t,Y[:,0],'-k',linewidth=2.0) plt.axis([a-1.,b+1,-200,400],fontsize=16) plt.xlabel('x',fontsize=18) plt.ylabel('y',fontsize=18) # plt.savefig("exercise1.pdf") plt.show() plt.clf() return def Exercise2(): # SIR beta, gamma = 0.5, 0.25 # Exercise 2 a, b, ya = 0., 100., np.array([1.-(6.25e-7), 6.25e-7,0.]) t,Y = SIR(a,b,beta, gamma,ya) print "The Maximum fraction of the population that will be infected simultaneously is", max(Y[:,1]) plt.plot(t,Y[:,0],'-k',label='Susceptible') plt.plot(t,Y[:,2],'-b',label='Recovered') plt.plot(t,Y[:,1],'-r',label='Infected') plt.axis([a,b,-.1,1.1],fontsize=16) plt.legend(loc=1) plt.xlabel('T (days)',fontsize=18) plt.ylabel('Proportion of Population',fontsize=18) plt.savefig("SIR1.pdf") # plt.show() plt.clf() return def Exercise2a(): # SIR beta, gamma = 1., 1./3. # Exercise 3a a, b, ya = 0., 50., np.array([1.-(1.667e-6), 1.667e-6,0.]) t,Y = SIR(a,b,beta, gamma,ya) print "The Maximum fraction of the population that will be infected simultaneously is", max(Y[:,1]) plt.plot(t,Y[:,0],'-k',label='Susceptible') plt.plot(t,Y[:,2],'-b',label='Recovered') plt.plot(t,Y[:,1],'-r',label='Infected') plt.axis([a,b,-.1,1.1],fontsize=16) plt.legend(loc=1) plt.xlabel('T (days)',fontsize=18) plt.ylabel('Proportion of Population',fontsize=18) # plt.savefig("SIR.pdf") plt.show() plt.clf() return def Exercise3b(): # SIR beta, gamma = 1., 1./7. # Exercise 3b a, b, ya = 0., 50., np.array([1.-(1.667e-6), 1.667e-6,0.]) t,Y = SIR(a,b,beta, gamma,ya) print "The Maximum fraction of the population that will be infected simultaneously is", max(Y[:,1]) plt.plot(t,Y[:,0],'-k',label='Susceptible') plt.plot(t,Y[:,2],'-b',label='Recovered') plt.plot(t,Y[:,1],'-r',label='Infected') plt.axis([a,b,-.1,1.1],fontsize=16) plt.legend(loc=1) plt.xlabel('T (days)',fontsize=18) plt.ylabel('Proportion of Population',fontsize=18) # plt.savefig("SIR.pdf") plt.show() plt.clf() return def Exercise3(): # SIR beta, gamma = 3./10., 1./4. # Exercise 4 a, b, ya = 0., 500., np.array([1.-(1.667e-6), 1.667e-6,0.]) t,Y = SIR(a,b,beta, gamma,ya) print "The Maximum fraction of the population that will be infected simultaneously is", max(Y[:,1]) plt.plot(t,Y[:,0],'-k',label='Susceptible') plt.plot(t,Y[:,2],'-b',label='Recovered') plt.plot(t,Y[:,1],'-r',label='Infected') plt.axis([a,b,0.,1.],fontsize=16) plt.legend(loc=1) plt.xlabel('T (days)',fontsize=18) plt.ylabel('Proportion of Population',fontsize=18) # plt.savefig("SIR.pdf") plt.show() plt.clf() return def Exercise4(): # measles from math import pi, cos a, b = 0., 1. # Interval of the BVP n, N = 3, 80 # Dimension of the system/ Number of subintervals TOL,Max_IT = 10.**(-12), 40 # Tolerance/ Maximum number of Newton steps init_mesh = np.linspace(a,b,N+1) # Initial Mesh lmbda, mu, eta = .0279, .02, .01 def beta1(x): return 1575.*(1. + np.cos(2.*np.pi*x)) def Guess(x): S = .1 + .05*np.cos(2.*np.pi*x) return np.array([S, 05*(1.-S), 05*(1.-S), .05, .05, .05]) def ODE(x,y): return np.array([mu-beta1(x)*y[0]*y[2], beta1(x)*y[0]*y[2]-y[1]/lmbda, y[1]/lmbda - y[2]/eta, 0,0,0 ]) def g(Ya,Yb): BCa = Ya[0:3] - Ya[3:] BCb = Yb[0:3] - Yb[3:] return BCa, BCb problem = bvp_solver.ProblemDefinition(num_ODE = 6, num_parameters = 0, num_left_boundary_conditions = 3, boundary_points = (a, b), function = ODE, boundary_conditions = g) solution = bvp_solver.solve(problem, solution_guess = Guess, trace = 0, max_subintervals=1000, tolerance=1e-9) Num_Sol = solution(np.linspace(a,b,N+1)) # Guess_array = np.zeros((6,N+1)) # for index, x in zip(range(N+1),np.linspace(a,b,N+1)): # Guess_array[:,index] = Guess(x) # plt.plot(np.linspace(a,b,N+1), Guess_array[0,:] ,'-g') plt.plot(np.linspace(a,b,N+1), Num_Sol[0,:], '-k',label='Susceptible',linewidth=2.0) plt.plot(np.linspace(a,b,N+1), Num_Sol[1,:], '-g',label='Exposed',linewidth=2.0) plt.plot(np.linspace(a,b,N+1), Num_Sol[2,:], '-r',label='Infectious',linewidth=2.0) plt.legend(loc=5)# middle right placement plt.axis([0.,1.,-.01,.1]) plt.show() plt.clf() return # Example() # Exercise1() # Exercise2() # Exercise2a() # Exercise2b() Exercise3() # Exercise4()
5,126
0
166
abed2523cd069a7a9682de47eb8de08c27d3fef1
61
py
Python
sheetsync/version.py
guykisel/SheetSync
b6fa15f14320c2ae4fec88747f1f311dcabbac7b
[ "MIT" ]
1
2017-03-30T17:38:06.000Z
2017-03-30T17:38:06.000Z
sheetsync/version.py
guykisel/SheetSync
b6fa15f14320c2ae4fec88747f1f311dcabbac7b
[ "MIT" ]
null
null
null
sheetsync/version.py
guykisel/SheetSync
b6fa15f14320c2ae4fec88747f1f311dcabbac7b
[ "MIT" ]
null
null
null
# Single place version should be set. __version__ = '0.2.2'
20.333333
38
0.704918
# Single place version should be set. __version__ = '0.2.2'
0
0
0
a72987c371cafa206db482ba7d75d97327c280aa
1,273
py
Python
app/nn_inference/common/base_wrapper.py
rahowa/workzone
b6fd3241fdbc9463e0e7eb863f82f9524be50830
[ "MIT" ]
1
2020-04-25T07:49:11.000Z
2020-04-25T07:49:11.000Z
app/nn_inference/common/base_wrapper.py
rahowa/workzone
b6fd3241fdbc9463e0e7eb863f82f9524be50830
[ "MIT" ]
null
null
null
app/nn_inference/common/base_wrapper.py
rahowa/workzone
b6fd3241fdbc9463e0e7eb863f82f9524be50830
[ "MIT" ]
1
2020-04-23T10:24:56.000Z
2020-04-23T10:24:56.000Z
import json from typing import Dict, Any, Sequence from abc import ABC, abstractmethod from typing import List from app.base_types import Image from app.result_types import BaseResult class BaseWrapper(ABC): """ Base class for creating custom wrappers for models based on neural networks """ @abstractmethod def predict(self, image: Image) -> List[BaseResult]: """ Abstract method for predict result based on input image """ raise NotImplementedError @abstractmethod def preprocess(self, image: Image) -> Any: """ Abstract method for image preprocessing for certain model/framework """ raise NotImplementedError def load_config(self, path_to_config: str) -> Dict[str, Any]: """ Generic method for loading json config Parameters ---------- path_to_config: str Path to config file Returns ------- config: Dict[str, Any] Model config in dictionary """ with open(path_to_config, 'r') as conf_file: config = json.load(conf_file) return config
23.145455
65
0.593087
import json from typing import Dict, Any, Sequence from abc import ABC, abstractmethod from typing import List from app.base_types import Image from app.result_types import BaseResult class BaseWrapper(ABC): """ Base class for creating custom wrappers for models based on neural networks """ @abstractmethod def predict(self, image: Image) -> List[BaseResult]: """ Abstract method for predict result based on input image """ raise NotImplementedError @abstractmethod def preprocess(self, image: Image) -> Any: """ Abstract method for image preprocessing for certain model/framework """ raise NotImplementedError def load_config(self, path_to_config: str) -> Dict[str, Any]: """ Generic method for loading json config Parameters ---------- path_to_config: str Path to config file Returns ------- config: Dict[str, Any] Model config in dictionary """ with open(path_to_config, 'r') as conf_file: config = json.load(conf_file) return config def load(self): pass def unload(self): pass
16
0
54
4e67bf9d3581151e96fe9476c73689ab77c5975d
8,084
py
Python
deepracer_env_config/config_server.py
aws-deepracer/deepracer-env-config
9df38766ac3f35f9138ed8a48f96e9cf367d44c0
[ "Apache-2.0" ]
null
null
null
deepracer_env_config/config_server.py
aws-deepracer/deepracer-env-config
9df38766ac3f35f9138ed8a48f96e9cf367d44c0
[ "Apache-2.0" ]
null
null
null
deepracer_env_config/config_server.py
aws-deepracer/deepracer-env-config
9df38766ac3f35f9138ed8a48f96e9cf367d44c0
[ "Apache-2.0" ]
null
null
null
################################################################################# # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed under the Apache License, Version 2.0 (the "License"). # # You may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # ################################################################################# """A class for configuration server.""" import json import logging from threading import RLock from typing import Iterable, List, Optional, Union from deepracer_env_config.configs.config_interface import ConfigInterface from deepracer_env_config.configs.area import Area from deepracer_env_config.configs.agent import Agent from deepracer_env_config.configs.track import Track from deepracer_env_config.constants import ActionType, TargetType from ude import ( SideChannelObserverInterface, AbstractSideChannel, SideChannelData ) class ConfigServer(SideChannelObserverInterface): """ Config Server """ KEY_PREFIX = "deepracer_config" KEY_SPLITTER = "::" def __init__(self, side_channel: AbstractSideChannel, area: Optional[Area] = None, track: Optional[Track] = None, agents: Optional[Iterable[Agent]] = None) -> None: """ Initialize Config Server Args: side_channel (AbstractSideChannel): side channel to communicate with client. area (Optional[Area]): the area config track (Optional[Track]): the track config agents (Optional[Iterable[Agent]]): list of agent configs """ self._lock = RLock() self._area = area or Area() self._track = track or Track() agents = list(agents) if agents else [Agent()] self._agent_map = {agent.name: agent for agent in agents} self._side_channel = side_channel self._is_started = False self._server_lock = RLock() self.start() @property def is_started(self): """ Returns the flag whether server is started or not. Returns: bool: the flag whether server is started or not. """ return self._is_started def start(self) -> None: """ Start the server. """ with self._server_lock: if not self._is_started: self._side_channel.register(observer=self) self._is_started = True def stop(self) -> None: """ Stop the server. """ with self._server_lock: if self._is_started: self._side_channel.unregister(observer=self) self._is_started = False def get_area(self, *args, **kwargs) -> Area: """ Returns the area config. Returns: Area: area config. """ return self._area.copy() def get_agents(self, *args, **kwargs) -> List[Agent]: """ Returns the list of agent configs. Returns: List[Agent]: the list of agent configs. """ agents = list(self._agent_map.values()) return [agent.copy() for agent in agents] def get_agent(self, name: str, *args, **kwargs) -> Agent: """ Return the agent with given name. Args: name (str): the name of the agent. Returns: Agent: the agent with given name. """ agent = self._agent_map.get(name) return agent.copy() if agent else None def get_track(self, *args, **kwargs) -> Track: """ Returns the track config. Returns: Track: the track config. """ return self._track.copy() def apply_area(self, area: Union[Area, dict]) -> None: """ Applies the new area config given. Args: area (Union[Area, dict]): the new area config. """ self._area = area if isinstance(area, Area) else Area.from_json(area) def apply_agent(self, agent: Union[Agent, dict]) -> None: """ Applies the new agent config given. Args: agent (Union[Agent, dict]): the new agent config. """ agent = agent if isinstance(agent, Agent) else Agent.from_json(agent) if agent.name in self._agent_map: self._agent_map[agent.name] = agent def apply_track(self, track: Union[Track, dict]) -> None: """ Applies the track config given. Args: track (Union[Track, dict]): the new track config. """ self._track = track if isinstance(track, Track) else Track.from_json(track) def spawn_agent(self, agent: Union[Agent, dict]) -> None: """ Spawns new agent with given agent config. Args: agent (Union[Agent, dict]): new agent config in str format. """ agent = agent if isinstance(agent, Agent) else Agent.from_json(agent) self._agent_map[agent.name] = agent def delete_agent(self, agent: Union[Agent, dict]) -> None: """ Deletes the agent with given agent config. Args: agent (Union[Agent, dict]): the agent config to delete. """ if len(self._agent_map) > 1: agent = agent if isinstance(agent, Agent) else Agent.from_json(agent) self._agent_map.pop(agent.name, None) def on_received(self, side_channel: AbstractSideChannel, key: str, value: SideChannelData) -> None: """ Callback when side channel instance receives new message. Args: side_channel (AbstractSideChannel): side channel instance key (str): The string identifier of message value (SideChannelData): The data of the message. """ if key.startswith(ConfigServer.KEY_PREFIX): with self._lock: try: prefix, action, target = key.split(self.KEY_SPLITTER) if prefix != ConfigServer.KEY_PREFIX: logging.info("[Server] Invalid prefix received.") return action = ActionType(action) target = TargetType(target) except Exception as ex: logging.info("[Server] Invalid key received.", exc_info=ex) return method_name = "{}_{}".format(action.value, target.value) method = getattr(self, method_name) try: config = method(value) except Exception as ex: logging.info("[Server] method {} threw Exception.".format(method_name), exc_info=ex) return if action == ActionType.GET: if isinstance(config, ConfigInterface): side_channel.send(key, json.dumps(config.to_json())) elif isinstance(config, list): json_list = [item.to_json() for item in config] side_channel.send(key, json.dumps(json_list))
35.769912
103
0.537853
################################################################################# # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed under the Apache License, Version 2.0 (the "License"). # # You may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # ################################################################################# """A class for configuration server.""" import json import logging from threading import RLock from typing import Iterable, List, Optional, Union from deepracer_env_config.configs.config_interface import ConfigInterface from deepracer_env_config.configs.area import Area from deepracer_env_config.configs.agent import Agent from deepracer_env_config.configs.track import Track from deepracer_env_config.constants import ActionType, TargetType from ude import ( SideChannelObserverInterface, AbstractSideChannel, SideChannelData ) class ConfigServer(SideChannelObserverInterface): """ Config Server """ KEY_PREFIX = "deepracer_config" KEY_SPLITTER = "::" def __init__(self, side_channel: AbstractSideChannel, area: Optional[Area] = None, track: Optional[Track] = None, agents: Optional[Iterable[Agent]] = None) -> None: """ Initialize Config Server Args: side_channel (AbstractSideChannel): side channel to communicate with client. area (Optional[Area]): the area config track (Optional[Track]): the track config agents (Optional[Iterable[Agent]]): list of agent configs """ self._lock = RLock() self._area = area or Area() self._track = track or Track() agents = list(agents) if agents else [Agent()] self._agent_map = {agent.name: agent for agent in agents} self._side_channel = side_channel self._is_started = False self._server_lock = RLock() self.start() @property def is_started(self): """ Returns the flag whether server is started or not. Returns: bool: the flag whether server is started or not. """ return self._is_started def start(self) -> None: """ Start the server. """ with self._server_lock: if not self._is_started: self._side_channel.register(observer=self) self._is_started = True def stop(self) -> None: """ Stop the server. """ with self._server_lock: if self._is_started: self._side_channel.unregister(observer=self) self._is_started = False def get_area(self, *args, **kwargs) -> Area: """ Returns the area config. Returns: Area: area config. """ return self._area.copy() def get_agents(self, *args, **kwargs) -> List[Agent]: """ Returns the list of agent configs. Returns: List[Agent]: the list of agent configs. """ agents = list(self._agent_map.values()) return [agent.copy() for agent in agents] def get_agent(self, name: str, *args, **kwargs) -> Agent: """ Return the agent with given name. Args: name (str): the name of the agent. Returns: Agent: the agent with given name. """ agent = self._agent_map.get(name) return agent.copy() if agent else None def get_track(self, *args, **kwargs) -> Track: """ Returns the track config. Returns: Track: the track config. """ return self._track.copy() def apply_area(self, area: Union[Area, dict]) -> None: """ Applies the new area config given. Args: area (Union[Area, dict]): the new area config. """ self._area = area if isinstance(area, Area) else Area.from_json(area) def apply_agent(self, agent: Union[Agent, dict]) -> None: """ Applies the new agent config given. Args: agent (Union[Agent, dict]): the new agent config. """ agent = agent if isinstance(agent, Agent) else Agent.from_json(agent) if agent.name in self._agent_map: self._agent_map[agent.name] = agent def apply_track(self, track: Union[Track, dict]) -> None: """ Applies the track config given. Args: track (Union[Track, dict]): the new track config. """ self._track = track if isinstance(track, Track) else Track.from_json(track) def spawn_agent(self, agent: Union[Agent, dict]) -> None: """ Spawns new agent with given agent config. Args: agent (Union[Agent, dict]): new agent config in str format. """ agent = agent if isinstance(agent, Agent) else Agent.from_json(agent) self._agent_map[agent.name] = agent def delete_agent(self, agent: Union[Agent, dict]) -> None: """ Deletes the agent with given agent config. Args: agent (Union[Agent, dict]): the agent config to delete. """ if len(self._agent_map) > 1: agent = agent if isinstance(agent, Agent) else Agent.from_json(agent) self._agent_map.pop(agent.name, None) def on_received(self, side_channel: AbstractSideChannel, key: str, value: SideChannelData) -> None: """ Callback when side channel instance receives new message. Args: side_channel (AbstractSideChannel): side channel instance key (str): The string identifier of message value (SideChannelData): The data of the message. """ if key.startswith(ConfigServer.KEY_PREFIX): with self._lock: try: prefix, action, target = key.split(self.KEY_SPLITTER) if prefix != ConfigServer.KEY_PREFIX: logging.info("[Server] Invalid prefix received.") return action = ActionType(action) target = TargetType(target) except Exception as ex: logging.info("[Server] Invalid key received.", exc_info=ex) return method_name = "{}_{}".format(action.value, target.value) method = getattr(self, method_name) try: config = method(value) except Exception as ex: logging.info("[Server] method {} threw Exception.".format(method_name), exc_info=ex) return if action == ActionType.GET: if isinstance(config, ConfigInterface): side_channel.send(key, json.dumps(config.to_json())) elif isinstance(config, list): json_list = [item.to_json() for item in config] side_channel.send(key, json.dumps(json_list))
0
0
0
62b633476715b0655f5140e6ad29476e0c09be91
2,734
py
Python
tests/app/main/test_form_validators.py
alphagov-mirror/digitalmarketplace-admin-frontend
a46f2a1625a8fccdf5296c561b1dc07e63ad6970
[ "MIT" ]
7
2015-05-09T02:29:15.000Z
2021-05-06T22:37:11.000Z
tests/app/main/test_form_validators.py
alphagov-mirror/digitalmarketplace-admin-frontend
a46f2a1625a8fccdf5296c561b1dc07e63ad6970
[ "MIT" ]
349
2015-02-24T11:24:05.000Z
2021-07-27T15:23:50.000Z
tests/app/main/test_form_validators.py
alphagov-mirror/digitalmarketplace-admin-frontend
a46f2a1625a8fccdf5296c561b1dc07e63ad6970
[ "MIT" ]
16
2015-03-23T14:18:38.000Z
2021-04-10T18:05:11.000Z
import mock import pytest from flask_wtf import Form from wtforms.fields.core import Field from wtforms.validators import StopValidation, ValidationError from app.main.forms import AdminEmailAddressValidator, NotInDomainSuffixBlacklistValidator from ..helpers import BaseApplicationTest @mock.patch('app.main.forms.data_api_client')
36.453333
120
0.726774
import mock import pytest from flask_wtf import Form from wtforms.fields.core import Field from wtforms.validators import StopValidation, ValidationError from app.main.forms import AdminEmailAddressValidator, NotInDomainSuffixBlacklistValidator from ..helpers import BaseApplicationTest @mock.patch('app.main.forms.data_api_client') class TestAdminEmailAddressValidator(object): def setup_method(self): self.form_mock = mock.MagicMock(Form) self.field_mock = mock.MagicMock(Field, data='the_email_address') self.validator = AdminEmailAddressValidator(message='The message passed to validator') def test_admin_email_address_validator_calls_api(self, data_api_client): self.validator(self.form_mock, self.field_mock) data_api_client.email_is_valid_for_admin_user.assert_called_once_with('the_email_address') def test_admin_email_address_validator_raises_with_invalid_response(self, data_api_client): data_api_client.email_is_valid_for_admin_user.return_value = False with pytest.raises(StopValidation, match='The message passed to validator'): self.validator(self.form_mock, self.field_mock) def test_admin_email_address_validator_passes_with_valid_response(self, data_api_client): data_api_client.email_is_valid_for_admin_user.return_value = True assert self.validator(self.form_mock, self.field_mock) is None class TestNotInDomainSuffixBlacklistValidator(BaseApplicationTest): def setup_method(self, method): super().setup_method(method) self.app_context = self.app.app_context() self.app_context.push() self.form_mock = mock.MagicMock(Form) self.field_mock = mock.MagicMock(Field) self.validator = NotInDomainSuffixBlacklistValidator("Foo %(matched_suffix)s bar") def teardown_method(self, method): super().teardown_method(method) self.app_context.pop() @pytest.mark.parametrize("new_buyer_domain", ( "kev.uk", "zz", "keyes.ORG.uk", ".GOV", "om", # a suffix of "com", but not if the validator's assuming an implicit preceding separator, which it should )) def test_success(self, new_buyer_domain): self.field_mock.data = new_buyer_domain assert self.validator(self.form_mock, self.field_mock) is None @pytest.mark.parametrize("new_buyer_domain", ( "br.com", "org.uk", "ORG.UK", "uk", ".me", )) def test_failure(self, new_buyer_domain): self.field_mock.data = new_buyer_domain with pytest.raises(ValidationError, match=new_buyer_domain.lower()): self.validator(self.form_mock, self.field_mock)
1,674
569
153
4b332e150db5ee5e6294b41aa93700fbb5552631
756
py
Python
sendinblue/jinja.py
apihackers/wagtail-sendinblue
c52a3ab22c1cb6a919326b9b20bb8bb69e95f7bc
[ "MIT" ]
1
2018-02-01T16:39:08.000Z
2018-02-01T16:39:08.000Z
sendinblue/jinja.py
apihackers/wagtail-sendinblue
c52a3ab22c1cb6a919326b9b20bb8bb69e95f7bc
[ "MIT" ]
null
null
null
sendinblue/jinja.py
apihackers/wagtail-sendinblue
c52a3ab22c1cb6a919326b9b20bb8bb69e95f7bc
[ "MIT" ]
null
null
null
import jinja2 from jinja2.ext import Extension from django.template.loader import render_to_string from django.utils.safestring import mark_safe from .models import SendinBlueSettings @jinja2.contextfunction settings = SendinBlueExtension
26.068966
64
0.752646
import jinja2 from jinja2.ext import Extension from django.template.loader import render_to_string from django.utils.safestring import mark_safe from .models import SendinBlueSettings @jinja2.contextfunction def sendinblue(context): request = context['request'] settings = SendinBlueSettings.for_site(request.site) ctx = context.get_all() ctx.update(sendinblue_settings=settings) data = render_to_string('sendinblue/template_tag.html', ctx) return mark_safe(data) class SendinBlueExtension(Extension): def __init__(self, environment): super(SendinBlueExtension, self).__init__(environment) self.environment.globals.update({ 'sendinblue': sendinblue, }) settings = SendinBlueExtension
423
16
71
f2d36c2c036a4f007ec818bf94e1b480398a8fa1
533
py
Python
inserthelper.py
gabalese/py-BookBundler
22420a2f865b3f5612962130412c2e93a2ae89ab
[ "MIT" ]
2
2021-05-25T10:34:58.000Z
2022-02-18T06:40:11.000Z
inserthelper.py
bigdig/SkinAI
50c7e025d4c3ad6a7c9a2d5a227caf8314f352be
[ "MIT" ]
null
null
null
inserthelper.py
bigdig/SkinAI
50c7e025d4c3ad6a7c9a2d5a227caf8314f352be
[ "MIT" ]
1
2019-09-29T15:18:29.000Z
2019-09-29T15:18:29.000Z
import database import os DIRECTORY = "pages/" # helper script to add target contents to db # could be refactored to accept a list of files via shell expansion if __name__ == "__main__": # test script db = database.Database() for path, dirs, files in os.walk(os.path.abspath(DIRECTORY)): for singular in files: if singular.endswith("txt"): filepath = os.path.abspath(os.path.join(path, singular)) print db.inserttxt(filepath) else: print "DONE."
28.052632
72
0.630394
import database import os DIRECTORY = "pages/" # helper script to add target contents to db # could be refactored to accept a list of files via shell expansion if __name__ == "__main__": # test script db = database.Database() for path, dirs, files in os.walk(os.path.abspath(DIRECTORY)): for singular in files: if singular.endswith("txt"): filepath = os.path.abspath(os.path.join(path, singular)) print db.inserttxt(filepath) else: print "DONE."
0
0
0
f067864d9da92540424c0e9ce762420e165a9ec6
296
py
Python
Python/py-check-strict-superset.py
DacioRomero/HackerRank
d1d43bd23e319bf8015a741eca70ba471a13c045
[ "MIT" ]
1
2020-07-28T00:08:38.000Z
2020-07-28T00:08:38.000Z
Python/py-check-strict-superset.py
DacioRomero/HackerRank
d1d43bd23e319bf8015a741eca70ba471a13c045
[ "MIT" ]
null
null
null
Python/py-check-strict-superset.py
DacioRomero/HackerRank
d1d43bd23e319bf8015a741eca70ba471a13c045
[ "MIT" ]
null
null
null
if __name__ == '__main__': main()
21.142857
48
0.5
def main(): A = set(map(int, input().split())) for _ in range(int(input())): subset = set(map(int, input().split())) if A.intersection(subset) != subset: print('False') break else: print('True') if __name__ == '__main__': main()
235
0
22
380ca43fe4a19d64f64e568c8c5f1f118a342818
2,404
py
Python
ansibleflow/run.py
jmvrbanac/ansible-flow
666dc3664ed8001640d50c0e00d749f44f16826f
[ "Apache-2.0" ]
13
2015-12-03T10:41:13.000Z
2020-01-05T16:22:42.000Z
ansibleflow/run.py
jmvrbanac/ansible-flow
666dc3664ed8001640d50c0e00d749f44f16826f
[ "Apache-2.0" ]
2
2015-12-17T22:16:53.000Z
2016-05-30T01:20:59.000Z
ansibleflow/run.py
jmvrbanac/ansible-flow
666dc3664ed8001640d50c0e00d749f44f16826f
[ "Apache-2.0" ]
6
2015-12-09T21:38:55.000Z
2020-02-24T21:15:06.000Z
import os import glob import sys from ansibleflow import log from ansibleflow.config import get_config from ansibleflow.venv import execute_under_env, env_exists
28.282353
77
0.663062
import os import glob import sys from ansibleflow import log from ansibleflow.config import get_config from ansibleflow.venv import execute_under_env, env_exists def get_full_var_file_path(filename, environment): file_path = filename if environment.directory: file_path = os.path.join(environment.directory, filename) return file_path def convert_var_filename_to_arg(filename): return ' -e @{0}'.format(os.path.abspath(filename)) def build_ansible_command(playbook, target, environment): command = 'ansible-playbook' if target.options: command += ' {0}'.format(target.options) if target.inventory: command += ' -i {0}'.format(os.path.abspath(target.inventory)) if environment.custom_var_files: for path in environment.custom_var_files: full_path = get_full_var_file_path(path, environment) for filename in glob.glob(full_path): command += convert_var_filename_to_arg(filename) if environment.vault_key: command += ' --vault-password-file {0}'.format(environment.vault_key) command += ' {0}'.format(playbook) if target.tags: command += ' --tags "{0}"'.format(target.tags) return command def run(target_name, env_name, arguments, dry_run=False): target = get_config().targets.get(target_name, None) environment = get_config().environments.get(env_name, None) if not env_exists(): log('Virtual environment does not exist.. ' 'Please run: ansible-flow venv create') sys.exit(1) if not target: log('Could not find target: {0}'.format(target_name)) sys.exit(1) if not environment: log('Could not find environment: {0}'.format(env_name)) sys.exit(1) for playbook in target.playbooks: command = build_ansible_command(playbook, target, environment) log(command) if not dry_run: os_env = {} if environment.shell_vars: os_env.update(environment.shell_vars) if environment.ansible_config: os_env.update({'ANSIBLE_CONFIG': environment.ansible_config}) execute_under_env(command, os_env or None) def argument_handler(value, all_args): if value is True: log('Please specify a target to run...') sys.exit(1) run(value[0], all_args.env, all_args)
2,121
0
115
918279db550254174256b45efa403096b528d2d6
20,754
py
Python
SciDataTool/Methods/DataND/plot_2D_Data.py
EOMYS-Public/SciDataTool
dd74504fbed8e4071582981ff8736c37f1c25bdc
[ "Apache-2.0" ]
null
null
null
SciDataTool/Methods/DataND/plot_2D_Data.py
EOMYS-Public/SciDataTool
dd74504fbed8e4071582981ff8736c37f1c25bdc
[ "Apache-2.0" ]
null
null
null
SciDataTool/Methods/DataND/plot_2D_Data.py
EOMYS-Public/SciDataTool
dd74504fbed8e4071582981ff8736c37f1c25bdc
[ "Apache-2.0" ]
null
null
null
from SciDataTool.Functions.Plot.plot_2D import plot_2D from SciDataTool.Functions.Plot import ( unit_dict, norm_dict, axes_dict, COLORS, ) from SciDataTool.Functions.Load.import_class import import_class from SciDataTool.Classes.Norm_indices import Norm_indices from numpy import ( squeeze, split, array, where, unique, nanmax as np_max, array2string, insert, nanmin as np_min, linspace, log10, nan, ) def plot_2D_Data( self, *arg_list, axis_data=None, is_norm=False, unit="SI", overall_axes=[], data_list=[], legend_list=[], color_list=None, curve_colors=None, phase_colors=None, linestyles=None, linewidth_list=[2], save_path=None, x_min=None, x_max=None, y_min=None, y_max=None, is_logscale_x=False, is_logscale_y=False, is_disp_title=True, is_grid=True, is_auto_ticks=True, is_auto_range=True, xlabel=None, ylabel=None, title=None, fig=None, ax=None, barwidth=100, type_plot=None, fund_harm_dict=None, is_show_fig=None, win_title=None, thresh=None, font_name="arial", font_size_title=12, font_size_label=10, font_size_legend=8, is_show_legend=True, is_outside_legend=False, is_frame_legend=True, ): """Plots a field as a function of time Parameters ---------- data : Data a Data object *arg_list : list of str arguments to specify which axes to plot is_norm : bool boolean indicating if the field must be normalized unit : str unit in which to plot the field data_list : list list of Data objects to compare legend_list : list list of legends to use for each Data object (including reference one) instead of data.name color_list : list list of colors to use for each Data object save_path : str full path including folder, name and extension of the file to save if save_path is not None x_min : float minimum value for the x-axis x_max : float maximum value for the x-axis y_min : float minimum value for the y-axis y_max : float maximum value for the y-axis is_logscale_x : bool boolean indicating if the x-axis must be set in logarithmic scale is_logscale_y : bool boolean indicating if the y-axis must be set in logarithmic scale is_disp_title : bool boolean indicating if the title must be displayed is_grid : bool boolean indicating if the grid must be displayed is_auto_ticks : bool in fft, adjust ticks to freqs (deactivate if too close) is_auto_range : bool in fft, display up to 1% of max fig : Matplotlib.figure.Figure existing figure to use if None create a new one ax : Matplotlib.axes.Axes object ax on which to plot the data barwidth : float barwidth scaling factor, only if type_plot = "bargraph" type_plot : str type of 2D graph : "curve", "bargraph", "barchart" or "quiver" fund_harm_dict : dict Dict containing axis name as key and frequency/order/wavenumber of fundamental harmonic as value to display fundamental harmonic in red in the fft is_show_fig : bool True to show figure after plot win_title : str Title of the plot window thresh : float threshold for automatic fft ticks is_outside_legend : bool True to display legend outside the graph is_frame_legend : bool True to display legend in a frame """ # Dynamic import to avoid import loop DataPattern = import_class("SciDataTool.Classes", "DataPattern") # Extract arg_list it the function called from another script with *arg_list if len(arg_list) == 1 and type(arg_list[0]) == tuple: arg_list = arg_list[0] # In case of 1D fft, keep only positive wavenumbers for i, arg in enumerate(arg_list): if "wavenumber" in arg and "=" not in arg and "[" not in arg: liste = list(arg_list) liste[i] = arg.replace("wavenumber", "wavenumber>0") arg_list = tuple(liste) if color_list == [] or color_list is None: color_list = COLORS new_color_list = color_list.copy() # Set unit if unit == "SI": unit = self.unit # Detect if is fft, build ylabel is_fft = False if ( any("wavenumber" in s for s in arg_list) or any("freqs" in s for s in arg_list) ) and type_plot != "curve": is_fft = True if "dB" in unit: unit_str = ( "[" + unit + " re. " + str(self.normalizations["ref"].ref) + " $" + self.unit + "$]" ) else: unit_str = r"$[" + unit + "]$" if self.symbol == "Magnitude": if ylabel is None: ylabel = "Magnitude " + unit_str else: if ylabel is None: ylabel = r"$|\widehat{" + self.symbol + "}|$ " + unit_str else: if is_norm: if ylabel is None: ylabel = ( r"$\frac{" + self.symbol + "}{" + self.symbol + "_0}\, [" + unit + "]$" ) else: if self.symbol == "Magnitude": if ylabel is None: ylabel = "Magnitude " + r"$[" + unit + "]$" else: if ylabel is None: ylabel = r"$" + self.symbol + "\, [" + unit + "]$" # Extract field and axes Xdatas = [] Ydatas = [] data_list2 = [self] + data_list for i, d in enumerate(data_list2): if is_fft or "dB" in unit: result = d.get_magnitude_along( arg_list, axis_data=axis_data, unit=unit, is_norm=is_norm ) if i == 0: axes_list = result.pop("axes_list") axes_dict_other = result.pop("axes_dict_other") result_0 = result else: result = d.get_along( arg_list, axis_data=axis_data, unit=unit, is_norm=is_norm ) if i == 0: axes_list = result.pop("axes_list") axes_dict_other = result.pop("axes_dict_other") result_0 = result Ydatas.append(result.pop(d.symbol)) # in string case not overlay, Xdatas is a linspace if axes_list[0].is_components and axes_list[0].extension != "list": xdata = linspace( 0, len(result[list(result)[0]]) - 1, len(result[list(result)[0]]) ) else: xdata = result[list(result)[0]] Xdatas.append(xdata) # Build xlabel and title title1 = self.name.capitalize() + " " title2 = "for " for axis in axes_list: if axis.unit in norm_dict: name = norm_dict[axis.unit].split(" [")[0] elif axis.name in axes_dict: name = axes_dict[axis.name] else: name = axis.name if ( axis.extension in [ "whole", "interval", "oneperiod", "antiperiod", "smallestperiod", "axis_data", ] and len(axis.values) > 1 or (len(axis.values) == 1 and len(axes_list) == 1) ): if axis.unit == "SI": if axis.name in unit_dict: axis_unit = unit_dict[axis.name] else: axis_unit = axis.unit if xlabel is None: xlabel = name.capitalize() + " [" + axis_unit + "]" main_axis_name = name elif axis.unit in norm_dict: if xlabel is None: xlabel = norm_dict[axis.unit] if axis.unit == "Hz": main_axis_name = "frequency" else: main_axis_name = axis.unit else: axis_unit = axis.unit if xlabel is None: xlabel = name.capitalize() + " [" + axis_unit + "]" main_axis_name = name if ( axis.name == "angle" and axis.unit == "°" and round(np_max(axis.values) / 6) % 5 == 0 ): xticks = [i * round(np_max(axis.values) / 6) for i in range(7)] else: xticks = None if axes_list[0].is_components and axes_list[0].extension != "list": xticklabels = result[list(result)[0]] xticks = Xdatas[0] else: xticklabels = None else: is_display = True if axis.is_pattern and len(axis.values) == 1: is_display = False if is_display: if axis.unit == "SI": if axis.name in unit_dict: axis_unit = unit_dict[axis.name] else: axis_unit = axis.unit elif axis.unit in norm_dict: axis_unit = norm_dict[axis.unit] else: axis_unit = axis.unit if isinstance(result_0[axis.name], str): title2 += name + "=" + result_0[axis.name] else: axis_str = array2string( result_0[axis.name], formatter={"float_kind": "{:.3g}".format} ).replace(" ", ", ") if len(result_0[axis.name]) == 1: axis_str = axis_str.strip("[]") title2 += ( name + "=" + axis_str.rstrip(", ") + " [" + axis_unit + "], " ) # Title part 3 containing axes that are here but not involved in requested axes title3 = "" for axis_name in axes_dict_other: is_display = True for axis in self.axes: if axis.name == axis_name: if isinstance(axis, DataPattern) and len(axis.unique_indices) == 1: is_display = False if is_display: title3 += ( axis_name + "=" + array2string( axes_dict_other[axis_name][0], formatter={"float_kind": "{:.3g}".format}, ).replace(" ", ", ") + " [" + axes_dict_other[axis_name][1] + "], " ) if title2 == "for " and title3 == "": title2 = "" # Detect discontinuous axis (Norm_indices) to use bargraph for axis in axes_list: if axis.unit in self.axes[axis.index].normalizations: if isinstance( self.axes[axis.index].normalizations[axis.unit], Norm_indices ): type_plot = "bargraph" # Detect how many curves are overlaid, build legend and color lists if legend_list == [] and data_list != []: legend_list = [d.name for d in data_list2] elif legend_list == []: legend_list = ["" for d in data_list2] legends = [] # Prepare colors linestyle_list = linestyles for i, d in enumerate(data_list2): is_overlay = False for axis in axes_list: if axis.extension == "list": is_overlay = True if linestyles is None: linestyles = ["dashed"] n_curves = len(axis.values) if axis.unit == "SI": if axis.name in unit_dict: axis_unit = unit_dict[axis.name] else: axis_unit = axis.unit elif axis.unit in norm_dict: axis_unit = norm_dict[axis.unit] else: axis_unit = axis.unit if len(d.axes[axis.index].get_values()) > 1: legends += [ legend_list[i] + axis.name + "=" + axis.values.tolist()[j] + " " + axis_unit if isinstance(axis.values.tolist()[j], str) else legend_list[i] + axis.name + "=" + "%.3g" % axis.values.tolist()[j] + " " + axis_unit for j in range(n_curves) ] else: legends += [legend_list[i]] if not is_overlay: legends += [legend_list[i]] # Adjust colors in non overlay case with overlay axis if len(data_list2) > 1: for axis in self.get_axes(): if axis.is_overlay and len(color_list) > len(axis.values): new_color_list[1:] = color_list[len(axis.values) :] # Split Ydatas if the plot overlays several curves if is_overlay: Ydata = [] for d in Ydatas: if d.ndim != 1: axis_index = where(array(d.shape) == n_curves)[0] if axis_index.size > 1: print("WARNING, several axes with same dimensions") Ydata += split(d, n_curves, axis=axis_index[0]) else: Ydata += [d] Ydatas = [squeeze(d) for d in Ydata] Xdata = [] for i in range(len(data_list2)): Xdata += [Xdatas[i] for x in range(n_curves)] Xdatas = Xdata # Finish title if title is None: # Concatenate all title parts if is_overlay: title = title1 + title3 else: title = title1 + title2 + title3 # Remove last coma due to title2 or title3 title = title.rstrip(", ") # Remove dimless and quotes title = title.replace("[]", "") title = title.replace("'", "") # Overall computation if overall_axes != []: if self.unit == "W": op = "=sum" else: op = "=rss" arg_list_ovl = [0 for i in range(len(arg_list))] # Add sum to overall_axes for axis in overall_axes: is_match = False for i, arg in enumerate(arg_list): if axis in arg: is_match = True arg_list_ovl[i] = axis + op if not is_match: arg_list_ovl.append(axis + op) # Add other requested axes for i, arg in enumerate(arg_list): if arg_list_ovl[i] == 0: arg_list_ovl[i] = arg if is_fft or "dB" in unit: result = self.get_magnitude_along(*arg_list_ovl, unit=unit) else: result = self.get_along(*arg_list_ovl, unit=unit) Y_overall = result[self.symbol] # in string case not overlay, Xdatas is a linspace if axes_list[0].is_components and axes_list[0].extension != "list": xdata = linspace( 0, len(result[list(result)[0]]) - 1, len(result[list(result)[0]]) ) else: xdata = result[list(result)[0]] Ydatas.insert(0, Y_overall) Xdatas.insert(0, xdata) color_list = color_list.copy() color_list.insert(0, "#000000") legends.insert(0, "Overall") if "dB" in unit: # Replace <=0 by nans for ydata in Ydatas: ydata[ydata <= 0] = nan # Call generic plot function if is_fft: if thresh is None: if self.normalizations is not None and "ref" in self.normalizations: thresh = self.normalizations["ref"].ref else: thresh = 0.02 freqs = Xdatas[0] if "dB" in unit: indices = [ ind for ind, y in enumerate(Ydatas[0]) if abs(y) > max(10 * log10(thresh) + abs(np_max(Ydatas[0])), 0) ] else: if Ydatas[0].size == 1: indices = [0] else: indices = [ ind for ind, y in enumerate(Ydatas[0]) if abs(y) > abs(thresh * np_max(Ydatas[0])) ] xticks = unique(insert(freqs[indices], 0, 0)) if is_auto_range: if len(xticks) > 1: if x_min is None: x_min = xticks[0] else: x_min = max(x_min, xticks[0]) if x_max is None: x_max = xticks[-1] else: x_max = min(x_max, xticks[-1]) else: if x_min is None: x_min = np_min(freqs) else: x_min = max(x_min, np_min(freqs)) if x_max is None: x_max = np_max(freqs) else: x_max = min(x_max, np_max(freqs)) else: if x_min is None: x_min = np_min(freqs) if x_max is None: x_max = np_max(freqs) x_min = x_min - x_max * 0.05 x_max = x_max * 1.05 if ( len(xticks) == 0 or (len(xticks) > 20 and not axes_list[0].is_components) or not is_auto_range ): xticks = None # Force bargraph for fft if type_graph not specified if type_plot is None: type_plot = "bargraph" # Option to draw fundamental harmonic in red if not fund_harm_dict: fund_harm = None else: # Activate the option only if main axis is in dict and only one Data is plotted if main_axis_name in fund_harm_dict and len(Ydatas) == 1: fund_harm = fund_harm_dict[main_axis_name] else: # Deactivate the option fund_harm = None plot_2D( Xdatas, Ydatas, legend_list=legends, color_list=new_color_list, linestyle_list=linestyle_list, linewidth_list=linewidth_list, fig=fig, ax=ax, title=title, xlabel=xlabel, ylabel=ylabel, type_plot=type_plot, x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max, is_logscale_x=is_logscale_x, is_logscale_y=is_logscale_y, is_disp_title=is_disp_title, is_grid=is_grid, xticks=xticks, xticklabels=xticklabels, save_path=save_path, barwidth=barwidth, fund_harm=fund_harm, is_show_fig=is_show_fig, win_title=win_title, font_name=font_name, font_size_title=font_size_title, font_size_label=font_size_label, font_size_legend=font_size_legend, is_show_legend=is_show_legend, is_outside_legend=is_outside_legend, is_frame_legend=is_frame_legend, ) else: # Force curve plot if type_plot not specified if type_plot is None: type_plot = "curve" plot_2D( Xdatas, Ydatas, legend_list=legends, color_list=new_color_list, fig=fig, ax=ax, title=title, xlabel=xlabel, ylabel=ylabel, type_plot=type_plot, x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max, is_logscale_x=is_logscale_x, is_logscale_y=is_logscale_y, is_disp_title=is_disp_title, is_grid=is_grid, xticks=xticks, xticklabels=xticklabels, barwidth=barwidth, linestyle_list=linestyle_list, linewidth_list=linewidth_list, save_path=save_path, is_show_fig=is_show_fig, win_title=win_title, font_name=font_name, font_size_title=font_size_title, font_size_label=font_size_label, font_size_legend=font_size_legend, is_show_legend=is_show_legend, is_outside_legend=is_outside_legend, is_frame_legend=is_frame_legend, )
32.995231
154
0.504963
from SciDataTool.Functions.Plot.plot_2D import plot_2D from SciDataTool.Functions.Plot import ( unit_dict, norm_dict, axes_dict, COLORS, ) from SciDataTool.Functions.Load.import_class import import_class from SciDataTool.Classes.Norm_indices import Norm_indices from numpy import ( squeeze, split, array, where, unique, nanmax as np_max, array2string, insert, nanmin as np_min, linspace, log10, nan, ) def plot_2D_Data( self, *arg_list, axis_data=None, is_norm=False, unit="SI", overall_axes=[], data_list=[], legend_list=[], color_list=None, curve_colors=None, phase_colors=None, linestyles=None, linewidth_list=[2], save_path=None, x_min=None, x_max=None, y_min=None, y_max=None, is_logscale_x=False, is_logscale_y=False, is_disp_title=True, is_grid=True, is_auto_ticks=True, is_auto_range=True, xlabel=None, ylabel=None, title=None, fig=None, ax=None, barwidth=100, type_plot=None, fund_harm_dict=None, is_show_fig=None, win_title=None, thresh=None, font_name="arial", font_size_title=12, font_size_label=10, font_size_legend=8, is_show_legend=True, is_outside_legend=False, is_frame_legend=True, ): """Plots a field as a function of time Parameters ---------- data : Data a Data object *arg_list : list of str arguments to specify which axes to plot is_norm : bool boolean indicating if the field must be normalized unit : str unit in which to plot the field data_list : list list of Data objects to compare legend_list : list list of legends to use for each Data object (including reference one) instead of data.name color_list : list list of colors to use for each Data object save_path : str full path including folder, name and extension of the file to save if save_path is not None x_min : float minimum value for the x-axis x_max : float maximum value for the x-axis y_min : float minimum value for the y-axis y_max : float maximum value for the y-axis is_logscale_x : bool boolean indicating if the x-axis must be set in logarithmic scale is_logscale_y : bool boolean indicating if the y-axis must be set in logarithmic scale is_disp_title : bool boolean indicating if the title must be displayed is_grid : bool boolean indicating if the grid must be displayed is_auto_ticks : bool in fft, adjust ticks to freqs (deactivate if too close) is_auto_range : bool in fft, display up to 1% of max fig : Matplotlib.figure.Figure existing figure to use if None create a new one ax : Matplotlib.axes.Axes object ax on which to plot the data barwidth : float barwidth scaling factor, only if type_plot = "bargraph" type_plot : str type of 2D graph : "curve", "bargraph", "barchart" or "quiver" fund_harm_dict : dict Dict containing axis name as key and frequency/order/wavenumber of fundamental harmonic as value to display fundamental harmonic in red in the fft is_show_fig : bool True to show figure after plot win_title : str Title of the plot window thresh : float threshold for automatic fft ticks is_outside_legend : bool True to display legend outside the graph is_frame_legend : bool True to display legend in a frame """ # Dynamic import to avoid import loop DataPattern = import_class("SciDataTool.Classes", "DataPattern") # Extract arg_list it the function called from another script with *arg_list if len(arg_list) == 1 and type(arg_list[0]) == tuple: arg_list = arg_list[0] # In case of 1D fft, keep only positive wavenumbers for i, arg in enumerate(arg_list): if "wavenumber" in arg and "=" not in arg and "[" not in arg: liste = list(arg_list) liste[i] = arg.replace("wavenumber", "wavenumber>0") arg_list = tuple(liste) if color_list == [] or color_list is None: color_list = COLORS new_color_list = color_list.copy() # Set unit if unit == "SI": unit = self.unit # Detect if is fft, build ylabel is_fft = False if ( any("wavenumber" in s for s in arg_list) or any("freqs" in s for s in arg_list) ) and type_plot != "curve": is_fft = True if "dB" in unit: unit_str = ( "[" + unit + " re. " + str(self.normalizations["ref"].ref) + " $" + self.unit + "$]" ) else: unit_str = r"$[" + unit + "]$" if self.symbol == "Magnitude": if ylabel is None: ylabel = "Magnitude " + unit_str else: if ylabel is None: ylabel = r"$|\widehat{" + self.symbol + "}|$ " + unit_str else: if is_norm: if ylabel is None: ylabel = ( r"$\frac{" + self.symbol + "}{" + self.symbol + "_0}\, [" + unit + "]$" ) else: if self.symbol == "Magnitude": if ylabel is None: ylabel = "Magnitude " + r"$[" + unit + "]$" else: if ylabel is None: ylabel = r"$" + self.symbol + "\, [" + unit + "]$" # Extract field and axes Xdatas = [] Ydatas = [] data_list2 = [self] + data_list for i, d in enumerate(data_list2): if is_fft or "dB" in unit: result = d.get_magnitude_along( arg_list, axis_data=axis_data, unit=unit, is_norm=is_norm ) if i == 0: axes_list = result.pop("axes_list") axes_dict_other = result.pop("axes_dict_other") result_0 = result else: result = d.get_along( arg_list, axis_data=axis_data, unit=unit, is_norm=is_norm ) if i == 0: axes_list = result.pop("axes_list") axes_dict_other = result.pop("axes_dict_other") result_0 = result Ydatas.append(result.pop(d.symbol)) # in string case not overlay, Xdatas is a linspace if axes_list[0].is_components and axes_list[0].extension != "list": xdata = linspace( 0, len(result[list(result)[0]]) - 1, len(result[list(result)[0]]) ) else: xdata = result[list(result)[0]] Xdatas.append(xdata) # Build xlabel and title title1 = self.name.capitalize() + " " title2 = "for " for axis in axes_list: if axis.unit in norm_dict: name = norm_dict[axis.unit].split(" [")[0] elif axis.name in axes_dict: name = axes_dict[axis.name] else: name = axis.name if ( axis.extension in [ "whole", "interval", "oneperiod", "antiperiod", "smallestperiod", "axis_data", ] and len(axis.values) > 1 or (len(axis.values) == 1 and len(axes_list) == 1) ): if axis.unit == "SI": if axis.name in unit_dict: axis_unit = unit_dict[axis.name] else: axis_unit = axis.unit if xlabel is None: xlabel = name.capitalize() + " [" + axis_unit + "]" main_axis_name = name elif axis.unit in norm_dict: if xlabel is None: xlabel = norm_dict[axis.unit] if axis.unit == "Hz": main_axis_name = "frequency" else: main_axis_name = axis.unit else: axis_unit = axis.unit if xlabel is None: xlabel = name.capitalize() + " [" + axis_unit + "]" main_axis_name = name if ( axis.name == "angle" and axis.unit == "°" and round(np_max(axis.values) / 6) % 5 == 0 ): xticks = [i * round(np_max(axis.values) / 6) for i in range(7)] else: xticks = None if axes_list[0].is_components and axes_list[0].extension != "list": xticklabels = result[list(result)[0]] xticks = Xdatas[0] else: xticklabels = None else: is_display = True if axis.is_pattern and len(axis.values) == 1: is_display = False if is_display: if axis.unit == "SI": if axis.name in unit_dict: axis_unit = unit_dict[axis.name] else: axis_unit = axis.unit elif axis.unit in norm_dict: axis_unit = norm_dict[axis.unit] else: axis_unit = axis.unit if isinstance(result_0[axis.name], str): title2 += name + "=" + result_0[axis.name] else: axis_str = array2string( result_0[axis.name], formatter={"float_kind": "{:.3g}".format} ).replace(" ", ", ") if len(result_0[axis.name]) == 1: axis_str = axis_str.strip("[]") title2 += ( name + "=" + axis_str.rstrip(", ") + " [" + axis_unit + "], " ) # Title part 3 containing axes that are here but not involved in requested axes title3 = "" for axis_name in axes_dict_other: is_display = True for axis in self.axes: if axis.name == axis_name: if isinstance(axis, DataPattern) and len(axis.unique_indices) == 1: is_display = False if is_display: title3 += ( axis_name + "=" + array2string( axes_dict_other[axis_name][0], formatter={"float_kind": "{:.3g}".format}, ).replace(" ", ", ") + " [" + axes_dict_other[axis_name][1] + "], " ) if title2 == "for " and title3 == "": title2 = "" # Detect discontinuous axis (Norm_indices) to use bargraph for axis in axes_list: if axis.unit in self.axes[axis.index].normalizations: if isinstance( self.axes[axis.index].normalizations[axis.unit], Norm_indices ): type_plot = "bargraph" # Detect how many curves are overlaid, build legend and color lists if legend_list == [] and data_list != []: legend_list = [d.name for d in data_list2] elif legend_list == []: legend_list = ["" for d in data_list2] legends = [] # Prepare colors linestyle_list = linestyles for i, d in enumerate(data_list2): is_overlay = False for axis in axes_list: if axis.extension == "list": is_overlay = True if linestyles is None: linestyles = ["dashed"] n_curves = len(axis.values) if axis.unit == "SI": if axis.name in unit_dict: axis_unit = unit_dict[axis.name] else: axis_unit = axis.unit elif axis.unit in norm_dict: axis_unit = norm_dict[axis.unit] else: axis_unit = axis.unit if len(d.axes[axis.index].get_values()) > 1: legends += [ legend_list[i] + axis.name + "=" + axis.values.tolist()[j] + " " + axis_unit if isinstance(axis.values.tolist()[j], str) else legend_list[i] + axis.name + "=" + "%.3g" % axis.values.tolist()[j] + " " + axis_unit for j in range(n_curves) ] else: legends += [legend_list[i]] if not is_overlay: legends += [legend_list[i]] # Adjust colors in non overlay case with overlay axis if len(data_list2) > 1: for axis in self.get_axes(): if axis.is_overlay and len(color_list) > len(axis.values): new_color_list[1:] = color_list[len(axis.values) :] # Split Ydatas if the plot overlays several curves if is_overlay: Ydata = [] for d in Ydatas: if d.ndim != 1: axis_index = where(array(d.shape) == n_curves)[0] if axis_index.size > 1: print("WARNING, several axes with same dimensions") Ydata += split(d, n_curves, axis=axis_index[0]) else: Ydata += [d] Ydatas = [squeeze(d) for d in Ydata] Xdata = [] for i in range(len(data_list2)): Xdata += [Xdatas[i] for x in range(n_curves)] Xdatas = Xdata # Finish title if title is None: # Concatenate all title parts if is_overlay: title = title1 + title3 else: title = title1 + title2 + title3 # Remove last coma due to title2 or title3 title = title.rstrip(", ") # Remove dimless and quotes title = title.replace("[]", "") title = title.replace("'", "") # Overall computation if overall_axes != []: if self.unit == "W": op = "=sum" else: op = "=rss" arg_list_ovl = [0 for i in range(len(arg_list))] # Add sum to overall_axes for axis in overall_axes: is_match = False for i, arg in enumerate(arg_list): if axis in arg: is_match = True arg_list_ovl[i] = axis + op if not is_match: arg_list_ovl.append(axis + op) # Add other requested axes for i, arg in enumerate(arg_list): if arg_list_ovl[i] == 0: arg_list_ovl[i] = arg if is_fft or "dB" in unit: result = self.get_magnitude_along(*arg_list_ovl, unit=unit) else: result = self.get_along(*arg_list_ovl, unit=unit) Y_overall = result[self.symbol] # in string case not overlay, Xdatas is a linspace if axes_list[0].is_components and axes_list[0].extension != "list": xdata = linspace( 0, len(result[list(result)[0]]) - 1, len(result[list(result)[0]]) ) else: xdata = result[list(result)[0]] Ydatas.insert(0, Y_overall) Xdatas.insert(0, xdata) color_list = color_list.copy() color_list.insert(0, "#000000") legends.insert(0, "Overall") if "dB" in unit: # Replace <=0 by nans for ydata in Ydatas: ydata[ydata <= 0] = nan # Call generic plot function if is_fft: if thresh is None: if self.normalizations is not None and "ref" in self.normalizations: thresh = self.normalizations["ref"].ref else: thresh = 0.02 freqs = Xdatas[0] if "dB" in unit: indices = [ ind for ind, y in enumerate(Ydatas[0]) if abs(y) > max(10 * log10(thresh) + abs(np_max(Ydatas[0])), 0) ] else: if Ydatas[0].size == 1: indices = [0] else: indices = [ ind for ind, y in enumerate(Ydatas[0]) if abs(y) > abs(thresh * np_max(Ydatas[0])) ] xticks = unique(insert(freqs[indices], 0, 0)) if is_auto_range: if len(xticks) > 1: if x_min is None: x_min = xticks[0] else: x_min = max(x_min, xticks[0]) if x_max is None: x_max = xticks[-1] else: x_max = min(x_max, xticks[-1]) else: if x_min is None: x_min = np_min(freqs) else: x_min = max(x_min, np_min(freqs)) if x_max is None: x_max = np_max(freqs) else: x_max = min(x_max, np_max(freqs)) else: if x_min is None: x_min = np_min(freqs) if x_max is None: x_max = np_max(freqs) x_min = x_min - x_max * 0.05 x_max = x_max * 1.05 if ( len(xticks) == 0 or (len(xticks) > 20 and not axes_list[0].is_components) or not is_auto_range ): xticks = None # Force bargraph for fft if type_graph not specified if type_plot is None: type_plot = "bargraph" # Option to draw fundamental harmonic in red if not fund_harm_dict: fund_harm = None else: # Activate the option only if main axis is in dict and only one Data is plotted if main_axis_name in fund_harm_dict and len(Ydatas) == 1: fund_harm = fund_harm_dict[main_axis_name] else: # Deactivate the option fund_harm = None plot_2D( Xdatas, Ydatas, legend_list=legends, color_list=new_color_list, linestyle_list=linestyle_list, linewidth_list=linewidth_list, fig=fig, ax=ax, title=title, xlabel=xlabel, ylabel=ylabel, type_plot=type_plot, x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max, is_logscale_x=is_logscale_x, is_logscale_y=is_logscale_y, is_disp_title=is_disp_title, is_grid=is_grid, xticks=xticks, xticklabels=xticklabels, save_path=save_path, barwidth=barwidth, fund_harm=fund_harm, is_show_fig=is_show_fig, win_title=win_title, font_name=font_name, font_size_title=font_size_title, font_size_label=font_size_label, font_size_legend=font_size_legend, is_show_legend=is_show_legend, is_outside_legend=is_outside_legend, is_frame_legend=is_frame_legend, ) else: # Force curve plot if type_plot not specified if type_plot is None: type_plot = "curve" plot_2D( Xdatas, Ydatas, legend_list=legends, color_list=new_color_list, fig=fig, ax=ax, title=title, xlabel=xlabel, ylabel=ylabel, type_plot=type_plot, x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max, is_logscale_x=is_logscale_x, is_logscale_y=is_logscale_y, is_disp_title=is_disp_title, is_grid=is_grid, xticks=xticks, xticklabels=xticklabels, barwidth=barwidth, linestyle_list=linestyle_list, linewidth_list=linewidth_list, save_path=save_path, is_show_fig=is_show_fig, win_title=win_title, font_name=font_name, font_size_title=font_size_title, font_size_label=font_size_label, font_size_legend=font_size_legend, is_show_legend=is_show_legend, is_outside_legend=is_outside_legend, is_frame_legend=is_frame_legend, )
0
0
0
617b66d0fa72fce403e4b7e7d45099612d8d9b4e
817
py
Python
api/cities.py
csmets/travel-wish-list
83d49d93e6f50fd20839f2097f8936cd88f166fb
[ "MIT" ]
null
null
null
api/cities.py
csmets/travel-wish-list
83d49d93e6f50fd20839f2097f8936cd88f166fb
[ "MIT" ]
null
null
null
api/cities.py
csmets/travel-wish-list
83d49d93e6f50fd20839f2097f8936cd88f166fb
[ "MIT" ]
null
null
null
#!/usr/bin/python3 import json import falcon from database import Database
20.425
61
0.518972
#!/usr/bin/python3 import json import falcon from database import Database class City(object): def on_get(self, req, resp, country, city): db = Database() country_data = db.fetch('countries', 'name', country) country_code = country_data['code'] if city.lower() == 'all': city_details = db.fetchall_match_column( 'cities', 'code', country_code) resp.body = json.dumps(city_details) else: city_details = db.fetchand( 'cities', 'name', city, 'code', country_code) resp.body = json.dumps(city_details) resp.content_type = 'application/json' resp.status = falcon.HTTP_200
693
-2
50
1c85d6299243c4559cbeefd7d9134659dcbdcb5b
294
py
Python
src/python/nimbusml/utils/__init__.py
michaelgsharp/NimbusML
50031157265f49eec85d27fe67582d9ddaf01ef9
[ "MIT" ]
134
2018-11-01T22:15:24.000Z
2019-05-04T11:30:08.000Z
src/python/nimbusml/utils/__init__.py
michaelgsharp/NimbusML
50031157265f49eec85d27fe67582d9ddaf01ef9
[ "MIT" ]
226
2019-05-07T19:00:44.000Z
2021-01-06T07:59:48.000Z
src/python/nimbusml/utils/__init__.py
michaelgsharp/NimbusML
50031157265f49eec85d27fe67582d9ddaf01ef9
[ "MIT" ]
43
2019-05-15T20:19:42.000Z
2022-03-30T10:26:07.000Z
from .utils import get_X_y, evaluate_binary_classifier, load_img, ColumnSelector try: from inspect import signature except ImportError: from funcsigs import signature __all__ = [ 'get_X_y', 'evaluate_binary_classifier', 'load_img', 'ColumnSelector', 'signature' ]
19.6
80
0.727891
from .utils import get_X_y, evaluate_binary_classifier, load_img, ColumnSelector try: from inspect import signature except ImportError: from funcsigs import signature __all__ = [ 'get_X_y', 'evaluate_binary_classifier', 'load_img', 'ColumnSelector', 'signature' ]
0
0
0
60d00d09c6b6a5e3e2a4aaaca9cc01937f5f3d0b
4,554
py
Python
gcloud/tasktmpl3/sites/utils.py
gangh/bk-sops
29f4b4915be42650c2eeee637e0cf798e4066f09
[ "Apache-2.0" ]
1
2019-12-23T07:23:35.000Z
2019-12-23T07:23:35.000Z
gcloud/tasktmpl3/sites/utils.py
bk-sops/bk-sops
9f5950b13473bf7b5032528b20016b7a571bb3cd
[ "Apache-2.0" ]
9
2020-02-12T03:15:49.000Z
2021-06-10T22:04:51.000Z
gcloud/tasktmpl3/sites/utils.py
bk-sops/bk-sops
9f5950b13473bf7b5032528b20016b7a571bb3cd
[ "Apache-2.0" ]
1
2022-01-17T11:32:05.000Z
2022-01-17T11:32:05.000Z
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
36.432
115
0.539306
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ def draw_pipeline_automatic(pipeline): line = [] for flow_id, flow in pipeline['flows'].iteritems(): line.append({ "source": { "id": flow['source'], "arrow": "Right" }, "id": flow_id, "target": { "id": flow['target'], "arrow": "Left" } }) # TODO: 兼容多层嵌套和子流程 acts = {} acts.update(pipeline['activities']) acts.update(pipeline['gateways']) acts.update({pipeline['end_event']['id']: pipeline['end_event']}) flows = pipeline['flows'] point_shift_y = 20 gateway_shift_y = 18 start_point_x = 60 shift_x = 180 shift_y = 140 last_node = pipeline['start_event'] last_node_x = 60 last_node_y = 280 current_flow = flows[last_node['outgoing']] location = [{ 'id': pipeline['start_event']['id'], 'type': 'startpoint', 'name': pipeline['start_event']['name'], 'status': '', 'x': start_point_x, 'y': last_node_y + point_shift_y, }] while current_flow['target'] != pipeline['end_event']['id']: current_node = acts[current_flow['target']] if current_node['type'] == 'ServiceActivity': location.append({ 'id': current_node['id'], 'type': 'tasknode', 'name': current_node['name'], 'stage_name': current_node.get('stage_name', ''), 'status': '', 'x': last_node_x + shift_x, 'y': last_node_y, }) last_node = current_node last_node_x = last_node_x + shift_x current_flow = flows[last_node['outgoing']] elif current_node['type'] == 'ParallelGateway': location.append({ 'id': current_node['id'], 'type': 'parallelgateway', 'name': current_node['name'], 'status': '', 'x': last_node_x + shift_x, 'y': last_node_y + gateway_shift_y, }) last_node_x = last_node_x + shift_x for index, flow_id in enumerate(current_node['outgoing']): parallel_flow = flows[flow_id] current_node = acts[parallel_flow['target']] location.append({ 'id': current_node['id'], 'type': 'tasknode', 'name': current_node['name'], 'stage_name': current_node.get('stage_name', ''), 'status': '', 'x': last_node_x + shift_x, # 第一个分支和网关对齐,剩余分支依次先下后上均匀分布两侧 'y': last_node_y + (-1) ** index * ((index + 1) / 2) * shift_y, }) last_node = current_node last_node_x = last_node_x + shift_x current_flow = flows[last_node['outgoing']] elif current_node['type'] == 'ConvergeGateway': location.append({ 'id': current_node['id'], 'type': 'convergegateway', 'name': current_node['name'], 'status': '', 'x': last_node_x + shift_x, 'y': last_node_y + gateway_shift_y, }) last_node = current_node last_node_x = last_node_x + shift_x current_flow = flows[last_node['outgoing']] current_node = acts[current_flow['target']] location.append({ 'id': current_node['id'], 'type': 'endpoint', 'name': current_node['name'], 'status': '', 'x': last_node_x + shift_x, 'y': last_node_y + point_shift_y, }) pipeline.update({ 'location': location, 'line': line, }) return pipeline
3,874
0
23
a3c9bf11fc548a2900def0c6b7c4d550e107b31f
231
py
Python
lab6/q1/l6q1b.py
nandiniproothi/itt-lab
fb0381c00abc94fb0653e8e723ca6c3f60c0f7b3
[ "MIT" ]
11
2021-02-21T14:44:28.000Z
2021-05-30T10:11:18.000Z
lab6/q1/l6q1b.py
nandiniproothi/itt-lab
fb0381c00abc94fb0653e8e723ca6c3f60c0f7b3
[ "MIT" ]
null
null
null
lab6/q1/l6q1b.py
nandiniproothi/itt-lab
fb0381c00abc94fb0653e8e723ca6c3f60c0f7b3
[ "MIT" ]
1
2021-05-05T05:39:56.000Z
2021-05-05T05:39:56.000Z
import re s = "is this a string?!" print("The original string is : " + s) res = re.sub(r'[^\w\s]', '', s) #remove any char that is not a word, space, or tab using regex print("The string after removing punctuation is: " + res)
23.1
94
0.645022
import re s = "is this a string?!" print("The original string is : " + s) res = re.sub(r'[^\w\s]', '', s) #remove any char that is not a word, space, or tab using regex print("The string after removing punctuation is: " + res)
0
0
0
2b6971d2880d9e652e3f26af8a4ea9df7c720f1e
1,301
py
Python
chapter_one/bin/car.py
wrzehu/open_the_gates
7c653067fe84d60988bf93feeb332645c989605a
[ "MIT" ]
null
null
null
chapter_one/bin/car.py
wrzehu/open_the_gates
7c653067fe84d60988bf93feeb332645c989605a
[ "MIT" ]
null
null
null
chapter_one/bin/car.py
wrzehu/open_the_gates
7c653067fe84d60988bf93feeb332645c989605a
[ "MIT" ]
null
null
null
class IllegalCarError(Exception): """Raised when the attributes of Car class are wrong""" pass
26.55102
82
0.661799
class IllegalCarError(Exception): """Raised when the attributes of Car class are wrong""" pass class Car: def __init__(self, pax_count, car_mass, gear_count, ): self.__pax_count = pax_count self.pax_count_validation() self.__car_mass = car_mass self.car_mass_validation() self.__gear_count = gear_count @property def total_mass(self): avg_weight_of_person = 70 return self.__car_mass + avg_weight_of_person * self.__pax_count def set_pax_count(self, pax_count): self.__pax_count = pax_count self.pax_count_validation() def set_car_mass(self, car_mass): self.__car_mass = car_mass self.car_mass_validation() def get_pax_count(self): return self.__pax_count def get_car_mass(self): return self.__car_mass def get_gear_count(self): return self.__gear_count def pax_count_validation(self): if self.__pax_count == 0: raise IllegalCarError('you need at least driver') elif self.__pax_count > 5: raise IllegalCarError('maximum 5 people in car') def car_mass_validation(self): if self.__car_mass > 2000: raise IllegalCarError('max weight of car can\'t be more than 2000 kg')
928
246
23
e2998563bd59901909ecc6b9bc2b036a4c157519
3,268
py
Python
BSTU App/BSTU App/Database/DatabaseManagement/NavigationDB/Premise/GUK/Stage_5.py
alextar04/BSTU-APP
ff9df53335be4a94a1e04d4d4072b3bba18ba08b
[ "MIT" ]
null
null
null
BSTU App/BSTU App/Database/DatabaseManagement/NavigationDB/Premise/GUK/Stage_5.py
alextar04/BSTU-APP
ff9df53335be4a94a1e04d4d4072b3bba18ba08b
[ "MIT" ]
null
null
null
BSTU App/BSTU App/Database/DatabaseManagement/NavigationDB/Premise/GUK/Stage_5.py
alextar04/BSTU-APP
ff9df53335be4a94a1e04d4d4072b3bba18ba08b
[ "MIT" ]
null
null
null
import os from peewee import * from NavigationDB.Premise.PremiseDB import Premise os.chdir('../../..') PremiseRemove() PremiseAdd()
61.660377
93
0.705936
import os from peewee import * from NavigationDB.Premise.PremiseDB import Premise def PremiseAdd(): Premise.create(id=137, idMap=5, idTypePremise=0, name="515а", description="ГУК 515а") Premise.create(id=138, idMap=5, idTypePremise=0, name="517", description="ГУК 517") Premise.create(id=139, idMap=5, idTypePremise=0, name="519", description="ГУК 519") Premise.create(id=140, idMap=5, idTypePremise=0, name="520", description="ГУК 520") Premise.create(id=141, idMap=5, idTypePremise=0, name="515", description="ГУК 515") Premise.create(id=142, idMap=5, idTypePremise=0, name="514", description="ГУК 514") Premise.create(id=143, idMap=5, idTypePremise=0, name="513", description="ГУК 513") Premise.create(id=144, idMap=5, idTypePremise=0, name="512б", description="ГУК 512б") Premise.create(id=145, idMap=5, idTypePremise=0, name="512а", description="ГУК 512а") Premise.create(id=146, idMap=5, idTypePremise=0, name="512", description="ГУК 512") Premise.create(id=147, idMap=5, idTypePremise=0, name="511", description="ГУК 511") Premise.create(id=148, idMap=5, idTypePremise=0, name="510", description="ГУК 510") Premise.create(id=149, idMap=5, idTypePremise=0, name="509", description="ГУК 509") Premise.create(id=150, idMap=5, idTypePremise=0, name="508", description="ГУК 508") Premise.create(id=151, idMap=5, idTypePremise=0, name="506", description="ГУК 506") Premise.create(id=152, idMap=5, idTypePremise=0, name="505а", description="ГУК 505а") Premise.create(id=153, idMap=5, idTypePremise=0, name="504а", description="ГУК 504а") Premise.create(id=154, idMap=5, idTypePremise=0, name="504", description="ГУК 504") Premise.create(id=155, idMap=5, idTypePremise=0, name="502", description="ГУК 502") Premise.create(id=156, idMap=5, idTypePremise=0, name="501", description="ГУК 501") Premise.create(id=157, idMap=5, idTypePremise=0, name="523", description="ГУК 523") Premise.create(id=158, idMap=5, idTypePremise=0, name="524", description="ГУК 524") Premise.create(id=159, idMap=5, idTypePremise=0, name="525", description="ГУК 525") Premise.create(id=160, idMap=5, idTypePremise=0, name="526", description="ГУК 526") Premise.create(id=161, idMap=5, idTypePremise=0, name="528", description="ГУК 528") Premise.create(id=162, idMap=5, idTypePremise=0, name="528а", description="ГУК 528а") Premise.create(id=163, idMap=5, idTypePremise=0, name="529", description="ГУК 529") Premise.create(id=164, idMap=5, idTypePremise=0, name="530", description="ГУК 530") Premise.create(id=165, idMap=5, idTypePremise=0, name="531", description="ГУК 531") Premise.create(id=166, idMap=5, idTypePremise=3, name="WC", description="Туалет мужской") Premise.create(id=167, idMap=5, idTypePremise=3, name="WC", description="Туалет женский") Premise.create(id=168, idMap=5, idTypePremise=5, name="Лифт", description="Лифт") Premise.create(id=169, idMap=5, idTypePremise=4, name="Вход", description="Вход 1") Premise.create(id=170, idMap=5, idTypePremise=4, name="Вход", description="Вход 2") def PremiseRemove(): query = Premise.delete().where(Premise.idMap == 5) query.execute() os.chdir('../../..') PremiseRemove() PremiseAdd()
3,237
0
46
b2df41919d3facb549d20991b7050ec9f9c68b19
1,131
py
Python
src/AgentAI/aiframework/AIModelParameter.py
Passer-D/GameAISDK
a089330a30b7bfe1f6442258a12d8c0086240606
[ "Apache-2.0" ]
1,210
2020-08-18T07:57:36.000Z
2022-03-31T15:06:05.000Z
src/AgentAI/aiframework/AIModelParameter.py
guokaiSama/GameAISDK
a089330a30b7bfe1f6442258a12d8c0086240606
[ "Apache-2.0" ]
37
2020-08-24T02:48:38.000Z
2022-01-30T06:41:52.000Z
src/AgentAI/aiframework/AIModelParameter.py
guokaiSama/GameAISDK
a089330a30b7bfe1f6442258a12d8c0086240606
[ "Apache-2.0" ]
275
2020-08-18T08:35:16.000Z
2022-03-31T15:06:07.000Z
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making GameAISDK available. This source code file is licensed under the GNU General Public License Version 3. For full details, please refer to the file "LICENSE.txt" which is provided as part of this source code package. Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved. """ class AIModelParameter(object): """ Agent AI model parameter, including env, module, model package and class etc provider the data class manage the parameter """
41.888889
111
0.727675
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making GameAISDK available. This source code file is licensed under the GNU General Public License Version 3. For full details, please refer to the file "LICENSE.txt" which is provided as part of this source code package. Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved. """ class AIModelParameter(object): """ Agent AI model parameter, including env, module, model package and class etc provider the data class manage the parameter """ def __init__(self, use_plugin_env, env_package, env_module, env_class, use_plugin_model, model_package ,model_module, model_class, use_default_run_func): self.use_plugin_env = use_plugin_env self.env_package = env_package self.env_module = env_module self.env_class = env_class self.use_plugin_model = use_plugin_model self.model_package = model_package self.model_module = model_module self.model_class = model_class self.use_default_run_func = use_default_run_func
534
0
26
1f9c259eab6beff9924a84b7996362aabcacc9cf
535
py
Python
day3/supermarket.py
dikshaa1702/ml
c35f279b8fa7544517ca713c2c1e55f08270d4c3
[ "Apache-2.0" ]
1
2019-06-13T13:52:09.000Z
2019-06-13T13:52:09.000Z
day3/supermarket.py
dikshaa1702/ml
c35f279b8fa7544517ca713c2c1e55f08270d4c3
[ "Apache-2.0" ]
null
null
null
day3/supermarket.py
dikshaa1702/ml
c35f279b8fa7544517ca713c2c1e55f08270d4c3
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Thu May 9 10:29:45 2019 @author: DiPu """ from collections import OrderedDict od=OrderedDict() while True: user_inp = input("Enter Product: ") if user_inp == "": break user_inp = user_inp.split() key = " ".join(user_inp[:-1]) value = int(user_inp[-1]) od[key] = od.get(key,0)+value print(od) #for key,value in od.items(): # if "apple" in od.keys(): # od["apple"] = od["apple"]+20 # else: # od["apple"] = 20
17.833333
39
0.530841
# -*- coding: utf-8 -*- """ Created on Thu May 9 10:29:45 2019 @author: DiPu """ from collections import OrderedDict od=OrderedDict() while True: user_inp = input("Enter Product: ") if user_inp == "": break user_inp = user_inp.split() key = " ".join(user_inp[:-1]) value = int(user_inp[-1]) od[key] = od.get(key,0)+value print(od) #for key,value in od.items(): # if "apple" in od.keys(): # od["apple"] = od["apple"]+20 # else: # od["apple"] = 20
0
0
0
75835994dc81359fee88e3157fca38504d792923
1,500
py
Python
neutron/plugins/bigswitch/routerrule_db.py
sajuptpm/notification_neutron
45933f63c9eff0d2931a7209b040ff2dc69835c5
[ "Apache-2.0" ]
5
2015-10-20T07:56:53.000Z
2017-12-31T22:39:15.000Z
neutron/plugins/bigswitch/routerrule_db.py
sajuptpm/notification_neutron
45933f63c9eff0d2931a7209b040ff2dc69835c5
[ "Apache-2.0" ]
null
null
null
neutron/plugins/bigswitch/routerrule_db.py
sajuptpm/notification_neutron
45933f63c9eff0d2931a7209b040ff2dc69835c5
[ "Apache-2.0" ]
3
2015-05-08T22:36:28.000Z
2015-10-24T21:25:35.000Z
# Copyright 2013, Big Switch Networks # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from neutron.db import model_base
38.461538
78
0.656667
# Copyright 2013, Big Switch Networks # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from neutron.db import model_base class RouterRule(model_base.BASEV2): id = sa.Column(sa.Integer, primary_key=True) source = sa.Column(sa.String(64), nullable=False) destination = sa.Column(sa.String(64), nullable=False) nexthops = orm.relationship('NextHop', cascade='all,delete') action = sa.Column(sa.String(10), nullable=False) router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id', ondelete="CASCADE")) class NextHop(model_base.BASEV2): rule_id = sa.Column(sa.Integer, sa.ForeignKey('routerrules.id', ondelete="CASCADE"), primary_key=True) nexthop = sa.Column(sa.String(64), nullable=False, primary_key=True)
0
730
46
c054d5e14cb3c24004c03f3f34246371b7c0bd0c
315
py
Python
Mundo 2/Ex048.py
FelipeDreissig/Prog-em-Py---CursoEmVideo
59a85e228b4c7bc0738d1a213e71b0f7fb07d03a
[ "MIT" ]
null
null
null
Mundo 2/Ex048.py
FelipeDreissig/Prog-em-Py---CursoEmVideo
59a85e228b4c7bc0738d1a213e71b0f7fb07d03a
[ "MIT" ]
null
null
null
Mundo 2/Ex048.py
FelipeDreissig/Prog-em-Py---CursoEmVideo
59a85e228b4c7bc0738d1a213e71b0f7fb07d03a
[ "MIT" ]
null
null
null
### soma de números print('Iremos somar números ímpares que são múltiplos de 3') s = 0 p = 0 for c in range(1,501,2): print(c, end=' ') if c%3==0: p = p + 1 s = s + c print('\nA soma de todos os números ímpares que são múltiplos de 3 é {}.\n O total de números múltiplos é {}.'.format(s, p))
31.5
124
0.596825
### soma de números print('Iremos somar números ímpares que são múltiplos de 3') s = 0 p = 0 for c in range(1,501,2): print(c, end=' ') if c%3==0: p = p + 1 s = s + c print('\nA soma de todos os números ímpares que são múltiplos de 3 é {}.\n O total de números múltiplos é {}.'.format(s, p))
0
0
0
cf5a28dbd86e8414bfaddf8c6202802aaaded915
14,327
py
Python
testcases/OpenStack/vPing/vping_util.py
rski/functest-mirror
7a2538438eab7a406c821acd7c72352f4a6ba364
[ "Apache-2.0" ]
null
null
null
testcases/OpenStack/vPing/vping_util.py
rski/functest-mirror
7a2538438eab7a406c821acd7c72352f4a6ba364
[ "Apache-2.0" ]
null
null
null
testcases/OpenStack/vPing/vping_util.py
rski/functest-mirror
7a2538438eab7a406c821acd7c72352f4a6ba364
[ "Apache-2.0" ]
null
null
null
import os import pprint import re import sys import time import paramiko from scp import SCPClient import functest.utils.functest_utils as ft_utils import functest.utils.openstack_utils as os_utils FUNCTEST_REPO = ft_utils.FUNCTEST_REPO NAME_VM_1 = ft_utils.get_functest_config('vping.vm_name_1') NAME_VM_2 = ft_utils.get_functest_config('vping.vm_name_2') VM_BOOT_TIMEOUT = 180 VM_DELETE_TIMEOUT = 100 PING_TIMEOUT = ft_utils.get_functest_config('vping.ping_timeout') GLANCE_IMAGE_NAME = ft_utils.get_functest_config('vping.image_name') GLANCE_IMAGE_FILENAME = \ ft_utils.get_functest_config('general.openstack.image_file_name') GLANCE_IMAGE_FORMAT = \ ft_utils.get_functest_config('general.openstack.image_disk_format') GLANCE_IMAGE_PATH = \ ft_utils.get_functest_config('general.directories.dir_functest_data') + \ "/" + GLANCE_IMAGE_FILENAME FLAVOR = ft_utils.get_functest_config('vping.vm_flavor') # NEUTRON Private Network parameters PRIVATE_NET_NAME = \ ft_utils.get_functest_config('vping.vping_private_net_name') PRIVATE_SUBNET_NAME = \ ft_utils.get_functest_config('vping.vping_private_subnet_name') PRIVATE_SUBNET_CIDR = \ ft_utils.get_functest_config('vping.vping_private_subnet_cidr') ROUTER_NAME = ft_utils.get_functest_config('vping.vping_router_name') SECGROUP_NAME = ft_utils.get_functest_config('vping.vping_sg_name') SECGROUP_DESCR = ft_utils.get_functest_config('vping.vping_sg_descr') neutron_client = None glance_client = None nova_client = None logger = None pp = pprint.PrettyPrinter(indent=4) def pMsg(value): """pretty printing""" pp.pprint(value)
31.010823
79
0.580024
import os import pprint import re import sys import time import paramiko from scp import SCPClient import functest.utils.functest_utils as ft_utils import functest.utils.openstack_utils as os_utils FUNCTEST_REPO = ft_utils.FUNCTEST_REPO NAME_VM_1 = ft_utils.get_functest_config('vping.vm_name_1') NAME_VM_2 = ft_utils.get_functest_config('vping.vm_name_2') VM_BOOT_TIMEOUT = 180 VM_DELETE_TIMEOUT = 100 PING_TIMEOUT = ft_utils.get_functest_config('vping.ping_timeout') GLANCE_IMAGE_NAME = ft_utils.get_functest_config('vping.image_name') GLANCE_IMAGE_FILENAME = \ ft_utils.get_functest_config('general.openstack.image_file_name') GLANCE_IMAGE_FORMAT = \ ft_utils.get_functest_config('general.openstack.image_disk_format') GLANCE_IMAGE_PATH = \ ft_utils.get_functest_config('general.directories.dir_functest_data') + \ "/" + GLANCE_IMAGE_FILENAME FLAVOR = ft_utils.get_functest_config('vping.vm_flavor') # NEUTRON Private Network parameters PRIVATE_NET_NAME = \ ft_utils.get_functest_config('vping.vping_private_net_name') PRIVATE_SUBNET_NAME = \ ft_utils.get_functest_config('vping.vping_private_subnet_name') PRIVATE_SUBNET_CIDR = \ ft_utils.get_functest_config('vping.vping_private_subnet_cidr') ROUTER_NAME = ft_utils.get_functest_config('vping.vping_router_name') SECGROUP_NAME = ft_utils.get_functest_config('vping.vping_sg_name') SECGROUP_DESCR = ft_utils.get_functest_config('vping.vping_sg_descr') neutron_client = None glance_client = None nova_client = None logger = None pp = pprint.PrettyPrinter(indent=4) def pMsg(value): """pretty printing""" pp.pprint(value) def check_repo_exist(): if not os.path.exists(FUNCTEST_REPO): logger.error("Functest repository not found '%s'" % FUNCTEST_REPO) exit(-1) def get_vmname_1(): return NAME_VM_1 def get_vmname_2(): return NAME_VM_2 def init(vping_logger): global nova_client nova_client = os_utils.get_nova_client() global neutron_client neutron_client = os_utils.get_neutron_client() global glance_client glance_client = os_utils.get_glance_client() global logger logger = vping_logger def waitVmActive(nova, vm): # sleep and wait for VM status change sleep_time = 3 count = VM_BOOT_TIMEOUT / sleep_time while True: status = os_utils.get_instance_status(nova, vm) logger.debug("Status: %s" % status) if status == "ACTIVE": return True if status == "ERROR" or status == "error": return False if count == 0: logger.debug("Booting a VM timed out...") return False count -= 1 time.sleep(sleep_time) return False def create_security_group(): sg_id = os_utils.get_security_group_id(neutron_client, SECGROUP_NAME) if sg_id != '': logger.info("Using existing security group '%s'..." % SECGROUP_NAME) else: logger.info("Creating security group '%s'..." % SECGROUP_NAME) SECGROUP = os_utils.create_security_group(neutron_client, SECGROUP_NAME, SECGROUP_DESCR) if not SECGROUP: logger.error("Failed to create the security group...") return False sg_id = SECGROUP['id'] logger.debug("Security group '%s' with ID=%s created successfully." % (SECGROUP['name'], sg_id)) logger.debug("Adding ICMP rules in security group '%s'..." % SECGROUP_NAME) if not os_utils.create_secgroup_rule(neutron_client, sg_id, 'ingress', 'icmp'): logger.error("Failed to create the security group rule...") return False logger.debug("Adding SSH rules in security group '%s'..." % SECGROUP_NAME) if not os_utils.create_secgroup_rule(neutron_client, sg_id, 'ingress', 'tcp', '22', '22'): logger.error("Failed to create the security group rule...") return False if not os_utils.create_secgroup_rule( neutron_client, sg_id, 'egress', 'tcp', '22', '22'): logger.error("Failed to create the security group rule...") return False return sg_id def create_image(): _, image_id = os_utils.get_or_create_image(GLANCE_IMAGE_NAME, GLANCE_IMAGE_PATH, GLANCE_IMAGE_FORMAT) if not image_id: exit(-1) return image_id def get_flavor(): EXIT_CODE = -1 # Check if the given flavor exists try: flavor = nova_client.flavors.find(name=FLAVOR) logger.info("Using existing Flavor '%s'..." % FLAVOR) return flavor except: logger.error("Flavor '%s' not found." % FLAVOR) logger.info("Available flavors are: ") pMsg(nova_client.flavor.list()) exit(EXIT_CODE) def create_network_full(): EXIT_CODE = -1 network_dic = os_utils.create_network_full(neutron_client, PRIVATE_NET_NAME, PRIVATE_SUBNET_NAME, ROUTER_NAME, PRIVATE_SUBNET_CIDR) if not network_dic: logger.error( "There has been a problem when creating the neutron network") exit(EXIT_CODE) network_id = network_dic["net_id"] return network_id def delete_exist_vms(): servers = nova_client.servers.list() for server in servers: if server.name == NAME_VM_1 or server.name == NAME_VM_2: logger.info("Instance %s found. Deleting..." % server.name) server.delete() def is_userdata(case): return case == 'vping_userdata' def is_ssh(case): return case == 'vping_ssh' def boot_vm(case, name, image_id, flavor, network_id, test_ip, sg_id): EXIT_CODE = -1 config = dict() config['name'] = name config['flavor'] = flavor config['image'] = image_id config['nics'] = [{"net-id": network_id}] if is_userdata(case): config['config_drive'] = True if name == NAME_VM_2: u = ("#!/bin/sh\n\n" "while true; do\n" " ping -c 1 %s 2>&1 >/dev/null\n" " RES=$?\n" " if [ \"Z$RES\" = \"Z0\" ] ; then\n" " echo 'vPing OK'\n" " break\n" " else\n" " echo 'vPing KO'\n" " fi\n" " sleep 1\n" "done\n" % test_ip) config['userdata'] = u logger.info("Creating instance '%s'..." % name) logger.debug("Configuration: %s" % config) vm = nova_client.servers.create(**config) # wait until VM status is active if not waitVmActive(nova_client, vm): logger.error("Instance '%s' cannot be booted. Status is '%s'" % ( name, os_utils.get_instance_status(nova_client, vm))) exit(EXIT_CODE) else: logger.info("Instance '%s' is ACTIVE." % name) add_secgroup(name, vm.id, sg_id) return vm def get_test_ip(vm): test_ip = vm.networks.get(PRIVATE_NET_NAME)[0] logger.debug("Instance '%s' got %s" % (vm.name, test_ip)) return test_ip def add_secgroup(vmname, vm_id, sg_id): logger.info("Adding '%s' to security group '%s'..." % (vmname, SECGROUP_NAME)) os_utils.add_secgroup_to_instance(nova_client, vm_id, sg_id) def add_float_ip(vm): EXIT_CODE = -1 logger.info("Creating floating IP for VM '%s'..." % NAME_VM_2) floatip_dic = os_utils.create_floating_ip(neutron_client) floatip = floatip_dic['fip_addr'] if floatip is None: logger.error("Cannot create floating IP.") exit(EXIT_CODE) logger.info("Floating IP created: '%s'" % floatip) logger.info("Associating floating ip: '%s' to VM '%s' " % (floatip, NAME_VM_2)) if not os_utils.add_floating_ip(nova_client, vm.id, floatip): logger.error("Cannot associate floating IP to VM.") exit(EXIT_CODE) return floatip def establish_ssh(vm, floatip): EXIT_CODE = -1 logger.info("Trying to establish SSH connection to %s..." % floatip) username = 'cirros' password = 'cubswin:)' ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) timeout = 50 nolease = False got_ip = False discover_count = 0 cidr_first_octet = PRIVATE_SUBNET_CIDR.split('.')[0] while timeout > 0: try: ssh.connect(floatip, username=username, password=password, timeout=2) logger.debug("SSH connection established to %s." % floatip) break except: logger.debug("Waiting for %s..." % floatip) time.sleep(6) timeout -= 1 console_log = vm.get_console_output() # print each "Sending discover" captured on the console log if (len(re.findall("Sending discover", console_log)) > discover_count and not got_ip): discover_count += 1 logger.debug("Console-log '%s': Sending discover..." % NAME_VM_2) # check if eth0 got an ip,the line looks like this: # "inet addr:192.168.".... # if the dhcp agent fails to assing ip, this line will not appear if "inet addr:" + cidr_first_octet in console_log and not got_ip: got_ip = True logger.debug("The instance '%s' succeeded to get the IP " "from the dhcp agent." % NAME_VM_2) # if dhcp doesnt work,it shows "No lease, failing".The test will fail if "No lease, failing" in console_log and not nolease and not got_ip: nolease = True logger.debug("Console-log '%s': No lease, failing..." % NAME_VM_2) logger.info("The instance failed to get an IP from the " "DHCP agent. The test will probably timeout...") if timeout == 0: # 300 sec timeout (5 min) logger.error("Cannot establish connection to IP '%s'. Aborting" % floatip) exit(EXIT_CODE) return ssh def transfer_ping_script(ssh, floatip): EXIT_CODE = -1 logger.info("Trying to transfer ping.sh to %s..." % floatip) scp = SCPClient(ssh.get_transport()) ping_script = FUNCTEST_REPO + "/testcases/OpenStack/vPing/ping.sh" try: scp.put(ping_script, "~/") except: logger.error("Cannot SCP the file '%s' to VM '%s'" % (ping_script, floatip)) exit(EXIT_CODE) cmd = 'chmod 755 ~/ping.sh' (stdin, stdout, stderr) = ssh.exec_command(cmd) for line in stdout.readlines(): print line def do_vping_ssh(ssh, test_ip): logger.info("Waiting for ping...") sec = 0 cmd = '~/ping.sh ' + test_ip flag = False while True: time.sleep(1) (stdin, stdout, stderr) = ssh.exec_command(cmd) output = stdout.readlines() for line in output: if "vPing OK" in line: logger.info("vPing detected!") EXIT_CODE = 0 flag = True break elif sec == PING_TIMEOUT: logger.info("Timeout reached.") flag = True break if flag: break logger.debug("Pinging %s. Waiting for response..." % test_ip) sec += 1 return EXIT_CODE, time.time() def do_vping_userdata(vm, test_ip): logger.info("Waiting for ping...") EXIT_CODE = -1 sec = 0 metadata_tries = 0 while True: time.sleep(1) console_log = vm.get_console_output() if "vPing OK" in console_log: logger.info("vPing detected!") EXIT_CODE = 0 break elif ("failed to read iid from metadata" in console_log or metadata_tries > 5): EXIT_CODE = -2 break elif sec == PING_TIMEOUT: logger.info("Timeout reached.") break elif sec % 10 == 0: if "request failed" in console_log: logger.debug("It seems userdata is not supported in " "nova boot. Waiting a bit...") metadata_tries += 1 else: logger.debug("Pinging %s. Waiting for response..." % test_ip) sec += 1 return EXIT_CODE, time.time() def do_vping(case, vm, test_ip): if is_userdata(case): return do_vping_userdata(vm, test_ip) else: floatip = add_float_ip(vm) ssh = establish_ssh(vm, floatip) transfer_ping_script(ssh, floatip) return do_vping_ssh(ssh, test_ip) def check_result(code, start_time, stop_time): test_status = "FAIL" if code == 0: logger.info("vPing OK") duration = round(stop_time - start_time, 1) logger.info("vPing duration:'%s'" % duration) test_status = "PASS" elif code == -2: duration = 0 logger.info("Userdata is not supported in nova boot. Aborting test...") else: duration = 0 logger.error("vPing FAILED") details = {'timestart': start_time, 'duration': duration, 'status': test_status} return details def push_result(report, case, start_time, stop_time, details): if report: try: logger.debug("Pushing vPing %s results into DB..." % case) ft_utils.push_results_to_db('functest', case, start_time, stop_time, details['status'], details=details) except: logger.error("Error pushing results into Database '%s'" % sys.exc_info()[0])
12,151
0
529
3628d9d479bbe8458d81ca54ccbdce2f1d00cf41
919
py
Python
src/analysis/duration/warping.py
EstevaoVieira/spikelearn
060206558cc37c31493f1c9f01412d90375403cb
[ "MIT" ]
null
null
null
src/analysis/duration/warping.py
EstevaoVieira/spikelearn
060206558cc37c31493f1c9f01412d90375403cb
[ "MIT" ]
null
null
null
src/analysis/duration/warping.py
EstevaoVieira/spikelearn
060206558cc37c31493f1c9f01412d90375403cb
[ "MIT" ]
null
null
null
from spikelearn.data import io, select, to_feature_array, SHORTCUTS from spikelearn.models.shuffle_decoding import shuffle_cross_predict from catboost import CatBoostClassifier from sklearn.linear_model import BayesianRidgeRegression import pickle allres = {} for rat, dset in product(SHORTCUTS['group']['eletro'], DSETS): data = select(io.load(rat, dset), _min_duration=.5, is_tired=False) tercils = [data.duration.quantile(q) for q in [1/3, 2/3]] t1 = to_feature_array(select(data, _max_duration=tercils[0]), subset='full') t3 = to_feature_array(select(data, _min_duration=tercils[1]), subset='full') res = shuffle_cross_predict(reg, [t1,t3], ['short', 'long'], n_splits=5, problem='regression', feature_scaling='robust') allres[(rat, dset)] = res pickle.dump(open('data/results/warping.pickle', 'wb')) # TODO calculate bias and mean bias direction
45.95
80
0.713819
from spikelearn.data import io, select, to_feature_array, SHORTCUTS from spikelearn.models.shuffle_decoding import shuffle_cross_predict from catboost import CatBoostClassifier from sklearn.linear_model import BayesianRidgeRegression import pickle allres = {} for rat, dset in product(SHORTCUTS['group']['eletro'], DSETS): data = select(io.load(rat, dset), _min_duration=.5, is_tired=False) tercils = [data.duration.quantile(q) for q in [1/3, 2/3]] t1 = to_feature_array(select(data, _max_duration=tercils[0]), subset='full') t3 = to_feature_array(select(data, _min_duration=tercils[1]), subset='full') res = shuffle_cross_predict(reg, [t1,t3], ['short', 'long'], n_splits=5, problem='regression', feature_scaling='robust') allres[(rat, dset)] = res pickle.dump(open('data/results/warping.pickle', 'wb')) # TODO calculate bias and mean bias direction
0
0
0
5ae2e5d8421127ef312b8bedb5149f9d2700c48b
798
py
Python
pytrade/__broker/__init__.py
sasano8/magnet-migrade
b5669b34a6a3b845df8df96dfedaf967df6b88e2
[ "MIT" ]
null
null
null
pytrade/__broker/__init__.py
sasano8/magnet-migrade
b5669b34a6a3b845df8df96dfedaf967df6b88e2
[ "MIT" ]
4
2021-03-24T23:38:22.000Z
2021-03-31T07:24:30.000Z
pytrade/__broker/__init__.py
sasano8/magnet-migrade
b5669b34a6a3b845df8df96dfedaf967df6b88e2
[ "MIT" ]
null
null
null
from decimal import Decimal from .. import logic from ..portfolio import VirtualAccount # class Calculator: # def __init__(self, allocated_margin: Decimal): # self.margin = allocated_margin # def calc_amount_and_round_by_unit(self, real_price: Decimal, min_unit: Decimal): # return logic.calc_unit_amount( # budget=self.margin, real_price=real_price, min_unit=min_unit # ) # def calc_amount_and_round_by_infered_min_unit(self, real_price: Decimal): # inferd = logic.infer_min_unit(real_price) # return logic.calc_unit_amount( # budget=self.margin, real_price=real_price, min_unit=inferd # ) # @staticmethod # def infer_min_unit(price: Decimal) -> Decimal: # return logic.infer_min_unit(price)
33.25
86
0.692982
from decimal import Decimal from .. import logic from ..portfolio import VirtualAccount # class Calculator: # def __init__(self, allocated_margin: Decimal): # self.margin = allocated_margin # def calc_amount_and_round_by_unit(self, real_price: Decimal, min_unit: Decimal): # return logic.calc_unit_amount( # budget=self.margin, real_price=real_price, min_unit=min_unit # ) # def calc_amount_and_round_by_infered_min_unit(self, real_price: Decimal): # inferd = logic.infer_min_unit(real_price) # return logic.calc_unit_amount( # budget=self.margin, real_price=real_price, min_unit=inferd # ) # @staticmethod # def infer_min_unit(price: Decimal) -> Decimal: # return logic.infer_min_unit(price)
0
0
0
077cd167f7e416bf1feacdd22f3ce9f5d2b54a8e
9,897
py
Python
codes/data_scripts/extract_subimages.py
Yangzhen0000/EDVR
388ae869a1b4e2e6399f5feeea1f3acc969a3c75
[ "Apache-2.0" ]
null
null
null
codes/data_scripts/extract_subimages.py
Yangzhen0000/EDVR
388ae869a1b4e2e6399f5feeea1f3acc969a3c75
[ "Apache-2.0" ]
null
null
null
codes/data_scripts/extract_subimages.py
Yangzhen0000/EDVR
388ae869a1b4e2e6399f5feeea1f3acc969a3c75
[ "Apache-2.0" ]
null
null
null
"""A multi-thread tool to crop large images to sub-images for faster IO.""" import os import os.path as osp import sys from multiprocessing import Pool import numpy as np import cv2 from PIL import Image sys.path.append(osp.dirname(osp.dirname(osp.abspath(__file__)))) from utils.util import ProgressBar # noqa: E402 import data.util as data_util # noqa: E402 if __name__ == '__main__': main()
40.896694
131
0.579469
"""A multi-thread tool to crop large images to sub-images for faster IO.""" import os import os.path as osp import sys from multiprocessing import Pool import numpy as np import cv2 from PIL import Image sys.path.append(osp.dirname(osp.dirname(osp.abspath(__file__)))) from utils.util import ProgressBar # noqa: E402 import data.util as data_util # noqa: E402 def main(): mode = 'pair' # single (one input folder) | pair (extract corresponding GT and LR pairs) opt = {} opt['n_thread'] = 1 opt['compression_level'] = 3 # 3 is the default value in cv2 # CV_IMWRITE_PNG_COMPRESSION from 0 to 9. A higher value means a smaller size and longer # compression time. If read raw images during training, use 0 for faster IO speed. if mode == 'single': opt['input_folder'] = '../../datasets/DIV2K/DIV2K_train_HR' opt['save_folder'] = '../../datasets/DIV2K/DIV2K800_sub' opt['crop_sz'] = 480 # the size of each sub-image opt['step'] = 240 # step of the sliding crop window opt['thres_sz'] = 48 # size threshold extract_single(opt) elif mode == 'pair': GT_folder = '../../datasets/SDR4k/train/SDR_10BIT' # '../../datasets/DIV2K/DIV2K_train_HR' LR_folder = '../../datasets/SDR4k/train/SDR_4BIT' # '../../datasets/DIV2K/DIV2K_train_LR_bicubic/X4' save_GT_folder = '../../datasets/SDR4k/train/SDR_10BIT_sub' # '../../datasets/DIV2K/DIV2K800_sub' save_LR_folder = '../../datasets/SDR4k/train/SDR_4BIT_sub' # '../../datasets/DIV2K/DIV2K800_sub_bicLRx4' scale_ratio = 1 # 4 crop_sz = 480 # the size of each sub-image (GT) step = 240 # step of the sliding crop window (GT) thres_sz = 48 # size threshold ######################################################################## # check that all the GT and LR images have correct scale ratio # img_GT_list = data_util._get_paths_from_images(GT_folder) # img_LR_list = data_util._get_paths_from_images(LR_folder) # assert len(img_GT_list) == len(img_LR_list), 'different length of GT_folder and LR_folder.' # for path_GT, path_LR in zip(img_GT_list, img_LR_list): # img_GT = Image.open(path_GT) # img_LR = Image.open(path_LR) # w_GT, h_GT = img_GT.size # w_LR, h_LR = img_LR.size # assert w_GT / w_LR == scale_ratio, 'GT width [{:d}] is not {:d}X as LR weight [{:d}] for {:s}.'.format( # noqa: E501 # w_GT, scale_ratio, w_LR, path_GT) # assert w_GT / w_LR == scale_ratio, 'GT width [{:d}] is not {:d}X as LR weight [{:d}] for {:s}.'.format( # noqa: E501 # w_GT, scale_ratio, w_LR, path_GT) # # check crop size, step and threshold size # assert crop_sz % scale_ratio == 0, 'crop size is not {:d}X multiplication.'.format( # scale_ratio) # assert step % scale_ratio == 0, 'step is not {:d}X multiplication.'.format(scale_ratio) # assert thres_sz % scale_ratio == 0, 'thres_sz is not {:d}X multiplication.'.format( # scale_ratio) print('process GT...') opt['input_folder'] = GT_folder opt['save_folder'] = save_GT_folder opt['crop_sz'] = crop_sz opt['step'] = step opt['thres_sz'] = thres_sz extract_single(opt) # all GT videos print('process LR...') opt['input_folder'] = LR_folder opt['save_folder'] = save_LR_folder opt['crop_sz'] = crop_sz // scale_ratio opt['step'] = step // scale_ratio opt['thres_sz'] = thres_sz // scale_ratio extract_single(opt) # all LQ videos assert len(data_util._get_paths_from_images(save_GT_folder)) == len( data_util._get_paths_from_images( save_LR_folder)), 'different length of save_GT_folder and save_LR_folder.' else: raise ValueError('Wrong mode.') def rm_border(img_file): image = cv2.imread(img_file) b = cv2.threshold(image, 15, 255, cv2.THRESH_BINARY) binary_image = b[1] binary_image = cv2.cvtColor(binary_image,cv2.COLOR_BGR2GRAY) # rint(binary_image.shape) x = binary_image.shape[0] y = binary_image.shape[1] edges_x = [] edges_y = [] for i in range(x): for j in range(y): if binary_image[i][j] == 255: edges_x.append(i) edges_y.append(j) left = min(edges_x) right = max(edges_x) bottom = min(edges_y) top = max(edges_y) # print(left, right, bottom, top) return left, right, bottom, top def extract_single(opt): input_folder = opt['input_folder'] save_folder = opt['save_folder'] if not osp.exists(save_folder): os.makedirs(save_folder) # print('mkdir [{:s}] ...'.format(save_folder)) else: print('Folder [{:s}] already exists. Continue...'.format(save_folder)) # sys.exit(1) # img_list = data_util._get_paths_from_images(input_folder) video_list, _ = data_util.get_video_paths(input_folder) # def update(arg): # pbar.update(arg) pbar = ProgressBar(len(video_list)) # pool = Pool(opt['n_thread']) # for path in img_list: # pool.apply_async(worker, args=(path, opt), callback=update) # pool.close() # pool.join() # print('All subprocesses done.') crop_sz = opt['crop_sz'] step = opt['step'] thres_sz = opt['thres_sz'] for video_path in video_list: pbar.update() video_name = osp.basename(video_path) save_video_folder = osp.join(opt['save_folder'], video_name) if not osp.exists(save_video_folder): os.makedirs(save_video_folder) # print('mkdir [{:s}] ...'.format(save_folder)) else: print('Folder [{:s}] already exists. Continue...'.format(save_video_folder)) continue print('Processing video {:s}...'.format(video_name)) left, right, bottom, top = rm_border(osp.join(video_path, "001.png")) # cut for all images in one video for img_name in sorted(os.listdir((video_path))): save_folder = osp.join(save_video_folder, osp.splitext(img_name)[0]) # print("save_folder", save_folder) if not osp.exists(save_folder): os.makedirs(save_folder) # print('mkdir [{:s}] ...'.format(save_folder)) else: print('Folder [{:s}] already exists. Continue...'.format(save_folder)) continue print('Processing frame {:s} ...'.format(img_name)) img_path = osp.join(video_path, img_name) img = cv2.imread(img_path, cv2.IMREAD_UNCHANGED) img = img[left:right, bottom:top, :] n_channels = len(img.shape) if n_channels == 2: h, w = img.shape elif n_channels == 3: h, w, c = img.shape else: raise ValueError('Wrong image shape - {}'.format(n_channels)) h_space = np.arange(0, h - crop_sz + 1, step) if h - (h_space[-1] + crop_sz) > thres_sz: h_space = np.append(h_space, h - crop_sz) w_space = np.arange(0, w - crop_sz + 1, step) if w - (w_space[-1] + crop_sz) > thres_sz: w_space = np.append(w_space, w - crop_sz) index = 0 for x in h_space: for y in w_space: index += 1 if n_channels == 2: crop_img = img[x:x + crop_sz, y:y + crop_sz] else: crop_img = img[x:x + crop_sz, y:y + crop_sz, :] crop_img = np.ascontiguousarray(crop_img) cv2.imwrite( osp.join(save_folder, 'p{:03d}.png'.format(index)), crop_img, [cv2.IMWRITE_PNG_COMPRESSION, opt['compression_level']]) # print('Write patch {:s}'.format(osp.join(save_folder, 'p{:03d}.png'.format(index)))) def worker(path, opt): crop_sz = opt['crop_sz'] step = opt['step'] thres_sz = opt['thres_sz'] img_name = osp.basename(path) video_name = osp.basename(osp.dirname(path)) img = cv2.imread(path, cv2.IMREAD_UNCHANGED) n_channels = len(img.shape) if n_channels == 2: h, w = img.shape elif n_channels == 3: h, w, c = img.shape else: raise ValueError('Wrong image shape - {}'.format(n_channels)) h_space = np.arange(0, h - crop_sz + 1, step) if h - (h_space[-1] + crop_sz) > thres_sz: h_space = np.append(h_space, h - crop_sz) w_space = np.arange(0, w - crop_sz + 1, step) if w - (w_space[-1] + crop_sz) > thres_sz: w_space = np.append(w_space, w - crop_sz) index = 0 save_folder = osp.join(opt['save_folder'], video_name, osp.splitext(img_name)[0]) print("save_folder", save_folder) if not osp.exists(save_folder): os.makedirs(save_folder) print('mkdir [{:s}] ...'.format(folder)) else: print('Folder [{:s}] already exists. Continue...'.format(save_folder)) # sys.exit(1) for x in h_space: for y in w_space: index += 1 # if n_channels == 2: # crop_img = img[x:x + crop_sz, y:y + crop_sz] # else: # crop_img = img[x:x + crop_sz, y:y + crop_sz, :] # crop_img = np.ascontiguousarray(crop_img) # cv2.imwrite( # osp.join(save_folder, 'p{:03d}.png'.format(index)), crop_img, # [cv2.IMWRITE_PNG_COMPRESSION, opt['compression_level']]) print('Write patch {:s}'.format(osp.join(save_folder, 'p{:03d}.png'.format(index)))) return 'Processing {:s} ...'.format(img_name) if __name__ == '__main__': main()
9,399
0
92
a6c5666bb6e7e97b7a610fc225c2db5329510d51
9,818
py
Python
synthetic/synthetic_utils.py
mlaugharn/EB_GFN
2d20b5d37edb9c50e0bc0fb7feedbc390ddfefd7
[ "MIT" ]
24
2022-02-04T01:48:01.000Z
2022-03-10T19:01:38.000Z
synthetic/synthetic_utils.py
mlaugharn/EB_GFN
2d20b5d37edb9c50e0bc0fb7feedbc390ddfefd7
[ "MIT" ]
null
null
null
synthetic/synthetic_utils.py
mlaugharn/EB_GFN
2d20b5d37edb9c50e0bc0fb7feedbc390ddfefd7
[ "MIT" ]
5
2022-03-06T10:44:41.000Z
2022-03-10T19:01:40.000Z
import torch as T import torch import torch.nn.functional as F import numpy as np import tqdm import random import sys, os from matplotlib import pyplot as plt from sympy.combinatorics.graycode import GrayCode import time import ipdb from torch.autograd import Variable, Function def pairwise_distances(x, y=None): ''' Input: x is a Nxd matrix y is an optional Mxd matirx Output: dist is a NxM matrix where dist[i,j] is the square norm between x[i,:] and y[j,:] if y is not given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm = (x**2).sum(1).view(-1, 1) if y is not None: y_t = torch.transpose(y, 0, 1) y_norm = (y**2).sum(1).view(1, -1) else: y_t = torch.transpose(x, 0, 1) y_norm = x_norm.view(1, -1) dist = x_norm + y_norm - 2.0 * torch.mm(x, y_t) # Ensure diagonal is zero if x=y # if y is None: # dist = dist - torch.diag(dist.diag) return torch.clamp(dist, 0.0, np.inf) ############# Model Architecture
32.40264
116
0.574659
import torch as T import torch import torch.nn.functional as F import numpy as np import tqdm import random import sys, os from matplotlib import pyplot as plt from sympy.combinatorics.graycode import GrayCode import time import ipdb def get_true_samples(db, size, bm, int_salce, discrete_dim, seed=None): if seed is None: samples = float2bin(db.gen_batch(size), bm, int_salce, discrete_dim) else: samples = float2bin(db.gen_batch_with_seed(size, seed), bm, int_salce, discrete_dim) return torch.from_numpy(samples).float() def get_ebm_samples(score_func, size, inv_bm, int_scale, discrete_dim, device, gibbs_sampler=None, gibbs_steps=20): unif_dist = torch.distributions.Bernoulli(probs=0.5) ebm_samples = unif_dist.sample((size, discrete_dim)).to(device) ebm_samp_float = [] for ind in range(gibbs_steps * discrete_dim): # takes about 1s ebm_samples = gibbs_sampler.step(ebm_samples, score_func) ebm_samp_float.append(bin2float(ebm_samples.data.cpu().numpy().astype(int), inv_bm, int_scale, discrete_dim)) ebm_samp_float = np.concatenate(ebm_samp_float, axis=0) return ebm_samples, ebm_samp_float def estimate_ll(score_func, samples, n_partition=None, rand_samples=None): with torch.no_grad(): if rand_samples is None: rand_samples = torch.randint(2, (n_partition, samples.shape[1])).float().to(samples.device) n_partition = rand_samples.shape[0] f_z_list = [] for i in range(0, n_partition, samples.shape[0]): # 从0数到n_partition,每一份是samples.shape[0]大小 f_z = score_func(rand_samples[i:i+samples.shape[0]]).view(-1, 1) f_z_list.append(f_z) f_z = torch.cat(f_z_list, dim=0) f_z = f_z - samples.shape[1] * np.log(0.5) - np.log(n_partition) # log(1/2)是unif的概率,importance sample的时候在分母 # log_part = logsumexp(f_z) log_part = f_z.logsumexp(0) f_sample = score_func(samples) ll = f_sample - log_part return torch.mean(ll).item() def exp_hamming_sim(x, y, bd): x = x.unsqueeze(1) y = y.unsqueeze(0) d = T.sum(T.abs(x - y), dim=-1) return T.exp(-bd * d) def exp_hamming_mmd(x, y, bandwidth=0.1): x = x.float() y = y.float() with T.no_grad(): kxx = exp_hamming_sim(x, x, bd=bandwidth) idx = T.arange(0, x.shape[0], out=T.LongTensor()) kxx[idx, idx] = 0.0 kxx = T.sum(kxx) / x.shape[0] / (x.shape[0] - 1) kyy = exp_hamming_sim(y, y, bd=bandwidth) idx = T.arange(0, y.shape[0], out=T.LongTensor()) kyy[idx, idx] = 0.0 kyy = T.sum(kyy) / y.shape[0] / (y.shape[0] - 1) kxy = T.sum(exp_hamming_sim(x, y, bd=bandwidth)) / x.shape[0] / y.shape[0] mmd = kxx + kyy - 2 * kxy return mmd def hamming_sim(x, y): x = x.unsqueeze(1) y = y.unsqueeze(0) d = torch.sum(torch.abs(x - y), dim=-1) return x.shape[-1] - d def hamming_mmd(x, y): x = x.float() y = y.float() with torch.no_grad(): kxx = hamming_sim(x, x) idx = torch.arange(0, x.shape[0], out=torch.LongTensor()) kxx[idx, idx] = 0.0 kxx = torch.sum(kxx) / x.shape[0] / (x.shape[0] - 1) kyy = hamming_sim(y, y) idx = torch.arange(0, y.shape[0], out=torch.LongTensor()) kyy[idx, idx] = 0.0 kyy = torch.sum(kyy) / y.shape[0] / (y.shape[0] - 1) kxy = torch.sum(hamming_sim(x, y)) / x.shape[0] / y.shape[0] mmd = kxx + kyy - 2 * kxy return mmd def linear_mmd(x, y): x = x.float() y = y.float() with torch.no_grad(): kxx = torch.mm(x, x.transpose(0, 1)) idx = torch.arange(0, x.shape[0], out=torch.LongTensor()) kxx = kxx * (1 - torch.eye(x.shape[0]).to(x.device)) kxx = torch.sum(kxx) / x.shape[0] / (x.shape[0] - 1) kyy = torch.mm(y, y.transpose(0, 1)) idx = torch.arange(0, y.shape[0], out=torch.LongTensor()) kyy[idx, idx] = 0.0 kyy = torch.sum(kyy) / y.shape[0] / (y.shape[0] - 1) kxy = torch.sum(torch.mm(y, x.transpose(0, 1))) / x.shape[0] / y.shape[0] mmd = kxx + kyy - 2 * kxy return mmd from torch.autograd import Variable, Function def get_gamma(X, bandwidth): with torch.no_grad(): x_norm = torch.sum(X ** 2, dim=1, keepdim=True) x_t = torch.transpose(X, 0, 1) x_norm_t = x_norm.view(1, -1) t = x_norm + x_norm_t - 2.0 * torch.matmul(X, x_t) dist2 = F.relu(Variable(t)).detach().data d = dist2.cpu().numpy() d = d[np.isfinite(d)] d = d[d > 0] median_dist2 = float(np.median(d)) gamma = 0.5 / median_dist2 / bandwidth return gamma def pairwise_distances(x, y=None): ''' Input: x is a Nxd matrix y is an optional Mxd matirx Output: dist is a NxM matrix where dist[i,j] is the square norm between x[i,:] and y[j,:] if y is not given then use 'y=x'. i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2 ''' x_norm = (x**2).sum(1).view(-1, 1) if y is not None: y_t = torch.transpose(y, 0, 1) y_norm = (y**2).sum(1).view(1, -1) else: y_t = torch.transpose(x, 0, 1) y_norm = x_norm.view(1, -1) dist = x_norm + y_norm - 2.0 * torch.mm(x, y_t) # Ensure diagonal is zero if x=y # if y is None: # dist = dist - torch.diag(dist.diag) return torch.clamp(dist, 0.0, np.inf) def get_kernel_mat(x, landmarks, gamma): d = pairwise_distances(x, landmarks) k = torch.exp(d * -gamma) k = k.view(x.shape[0], -1) return k def MMD(x, y, bandwidth=1.0): y = y.detach() gamma = get_gamma(x.detach(), bandwidth) kxx = get_kernel_mat(x, x, gamma) idx = torch.arange(0, x.shape[0], out=torch.LongTensor()) kxx = kxx * (1 - torch.eye(x.shape[0]).to(x.device)) kxx = torch.sum(kxx) / x.shape[0] / (x.shape[0] - 1) kyy = get_kernel_mat(y, y, gamma) idx = torch.arange(0, y.shape[0], out=torch.LongTensor()) kyy[idx, idx] = 0.0 kyy = torch.sum(kyy) / y.shape[0] / (y.shape[0] - 1) kxy = torch.sum(get_kernel_mat(y, x, gamma)) / x.shape[0] / y.shape[0] mmd = kxx + kyy - 2 * kxy return mmd def get_binmap(discrete_dim, binmode): b = discrete_dim // 2 - 1 all_bins = [] for i in range(1 << b): bx = np.binary_repr(i, width=discrete_dim // 2 - 1) all_bins.append('0' + bx) all_bins.append('1' + bx) vals = all_bins[:] if binmode == 'rand': print('remapping binary repr with random permute') random.shuffle(vals) elif binmode == 'gray': print('remapping binary repr with gray code') a = GrayCode(b) vals = [] for x in a.generate_gray(): vals.append('0' + x) vals.append('1' + x) else: assert binmode == 'normal' bm = {} inv_bm = {} for i, key in enumerate(all_bins): bm[key] = vals[i] inv_bm[vals[i]] = key return bm, inv_bm def compress(x, discrete_dim): bx = np.binary_repr(int(abs(x)), width=discrete_dim // 2 - 1) bx = '0' + bx if x >= 0 else '1' + bx return bx def recover(bx): x = int(bx[1:], 2) return x if bx[0] == '0' else -x def float2bin(samples, bm, int_scale, discrete_dim): bin_list = [] for i in range(samples.shape[0]): x, y = samples[i] * int_scale bx, by = compress(x, discrete_dim), compress(y, discrete_dim) bx, by = bm[bx], bm[by] bin_list.append(np.array(list(bx + by), dtype=int)) return np.array(bin_list) def bin2float(samples, inv_bm, int_scale, discrete_dim): floats = [] for i in range(samples.shape[0]): s = '' for j in range(samples.shape[1]): s += str(samples[i, j]) x, y = s[:discrete_dim // 2], s[discrete_dim // 2:] x, y = inv_bm[x], inv_bm[y] x, y = recover(x), recover(y) x /= int_scale y /= int_scale floats.append((x, y)) return np.array(floats) def plot_heat(score_func, bm, size, device, int_scale, discrete_dim, out_file=None): w = 100 x = np.linspace(-size, size, w) y = np.linspace(-size, size, w) xx, yy = np.meshgrid(x, y) xx = np.reshape(xx, [-1, 1]) yy = np.reshape(yy, [-1, 1]) heat_samples = float2bin(np.concatenate((xx, yy), axis=-1), bm, int_scale, discrete_dim) heat_samples = torch.from_numpy(heat_samples).to(device).float() heat_score = F.softmax(score_func(heat_samples).view(1, -1), dim=-1) a = heat_score.view(w, w).data.cpu().numpy() a = np.flip(a, axis=0) print("energy max and min:", a.max(), a.min()) plt.imshow(a) plt.axis('equal') plt.axis('off') # if out_file is None: # out_file = os.path.join(save_dir, 'heat.pdf') plt.savefig(out_file, bbox_inches='tight') plt.close() def plot_samples(samples, out_name, lim=None, axis=True): plt.scatter(samples[:, 0], samples[:, 1], marker='.') plt.axis('equal') if lim is not None: plt.xlim(-lim, lim) plt.ylim(-lim, lim) if not axis: plt.axis('off') plt.savefig(out_name, bbox_inches='tight') plt.close() ############# Model Architecture class EnergyModel(T.nn.Module): def __init__(self, s, mid_size): super(EnergyModel, self).__init__() self.m = T.nn.Sequential(T.nn.Linear(s, mid_size), T.nn.ELU(), T.nn.Linear(mid_size, mid_size), T.nn.ELU(), T.nn.Linear(mid_size, mid_size), T.nn.ELU(), T.nn.Linear(mid_size, 1)) def forward(self, x): x = x.view((x.shape[0], -1)) x = self.m(x) return x[:, -1]
8,301
10
490