hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
447db70fcaa354f16105d556a4943d598b33148f | 816 | py | Python | scifiweb/urls.py | project-scifi/scifiweb | cc51d9ea6e7f302c503174e92029188a7e252753 | [
"Apache-2.0"
] | 1 | 2018-04-18T04:37:43.000Z | 2018-04-18T04:37:43.000Z | scifiweb/urls.py | project-scifi/scifiweb | cc51d9ea6e7f302c503174e92029188a7e252753 | [
"Apache-2.0"
] | 12 | 2017-06-26T05:20:28.000Z | 2022-01-13T00:48:58.000Z | scifiweb/urls.py | project-scifi/scifiweb | cc51d9ea6e7f302c503174e92029188a7e252753 | [
"Apache-2.0"
] | null | null | null | from django.conf.urls import include
from django.conf.urls import url
from django.shortcuts import redirect
from django.shortcuts import reverse
import scifiweb.about.urls
import scifiweb.news.urls
from scifiweb.home import home
from scifiweb.robots import robots_dot_txt
urlpatterns = [
url(r'^$', home, name='home'),
url(r'^robots\.txt$', robots_dot_txt, name='robots.txt'),
url(r'^about/', include(scifiweb.about.urls.urlpatterns)),
url(r'^news/', include(scifiweb.news.urls.urlpatterns)),
# Legacy redirects to /about/
url(r'^info/about/$', lambda _: redirect(reverse('about'), permanent=True)),
url(r'^info/about/contact$', lambda _: redirect(reverse('about/contact'), permanent=True)),
url(r'^info/about/team$', lambda _: redirect(reverse('about/team'), permanent=True)),
]
| 35.478261 | 95 | 0.720588 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 180 | 0.220588 |
447dd172df06dd5c29acac5e5539b2c7e9813f1e | 5,234 | py | Python | heltour/tournament/tests/test_login.py | zbidwell/heltour | 3895142695096a81cc65c3fefb7d4501ed796f46 | [
"MIT"
] | 41 | 2016-08-17T19:58:42.000Z | 2021-11-08T10:52:07.000Z | heltour/tournament/tests/test_login.py | zbidwell/heltour | 3895142695096a81cc65c3fefb7d4501ed796f46 | [
"MIT"
] | 257 | 2016-08-17T22:29:05.000Z | 2022-01-13T00:42:05.000Z | heltour/tournament/tests/test_login.py | zbidwell/heltour | 3895142695096a81cc65c3fefb7d4501ed796f46 | [
"MIT"
] | 31 | 2016-09-23T23:36:14.000Z | 2022-01-14T17:05:08.000Z | import datetime
import responses
from django.test import TestCase
from unittest.mock import patch
from heltour.tournament import oauth
from .testutils import *
class LoginTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
def test_encode_decode_state(self, *args):
# Just verify that encode/decode are symmetrical
original_state = {'league': 'teamleague', 'token': None}
encoded = oauth._encode_state(original_state)
new_state = oauth._decode_state(encoded)
self.assertEqual(original_state, new_state)
@patch('heltour.tournament.oauth._encode_state', return_value='encodedstate')
def test_oauth_redirect(self, *args):
response = self.client.get(league_url('team', 'login'))
expected_oauth_url = 'https://oauth.lichess.org/oauth/authorize' + \
'?response_type=code' + \
'&client_id=clientid' + \
'&redirect_uri=http://testserver/auth/lichess/' + \
'&scope=email:read%20challenge:read%20challenge:write' + \
'&state=encodedstate'
self.assertRedirects(response, expected_oauth_url, fetch_redirect_response=False)
oauth._encode_state.assert_called_with({'league': 'teamleague', 'token': None})
@patch('heltour.tournament.lichessapi.get_user_meta',
return_value={'perfs': {'classical': {'rating': 2121, 'games': 10}}})
@patch('heltour.tournament.oauth._decode_state',
return_value={'league': league_tag('team'), 'token': None})
@patch('django.utils.timezone.now',
return_value=datetime.datetime(2019, 1, 1, 10, 30, 0, tzinfo=timezone.utc))
@responses.activate
class LoginWithCodeTestCase(TestCase):
def setUp(self):
createCommonLeagueData()
responses.add(responses.POST, 'https://oauth.lichess.org/oauth',
json={'access_token': '1234',
'refresh_token': '4567',
'expires_in': 3600,
'token_type': 'bearer'})
responses.add(responses.GET, 'https://lichess.org/api/account',
json={'username': 'testuser'})
responses.add(responses.GET, 'https://lichess.org/api/email',
json={'email': 'testuser@example.com'})
self.auth_params = {'code': 'abc', 'state': 'encodedstate'}
def test_new_user(self, *args):
response = self.client.get(reverse('lichess_auth'), self.auth_params, follow=True)
self.assertRedirects(response, league_url('team', 'user_dashboard'))
self.assertContains(response, '<h3>testuser</h3>', status_code=200)
oauth._decode_state.assert_called_with('encodedstate')
player = Player.objects.get(lichess_username='testuser')
self.assertEqual(2121, player.rating_for(get_league('team')))
self.assertEqual(10, player.games_played_for(get_league('team')))
self.assertEqual('1234', player.oauth_token.access_token)
self.assertEqual('4567', player.oauth_token.refresh_token)
self.assertEqual(datetime.datetime(2019, 1, 1, 11, 30, 0, tzinfo=timezone.utc),
player.oauth_token.expires)
self.assertEqual('bearer', player.oauth_token.token_type)
self.assertEqual('email:read challenge:read challenge:write', player.oauth_token.scope)
self.assertEqual('testuser', player.oauth_token.account_username)
self.assertEqual('testuser@example.com', player.oauth_token.account_email)
def test_existing_user(self, *args):
created_player = Player.objects.create(lichess_username='testuser')
created_player.profile = {}
created_player.save()
User.objects.create(username='testuser')
response = self.client.get(reverse('lichess_auth'), self.auth_params, follow=True)
self.assertRedirects(response, league_url('team', 'user_dashboard'))
self.assertContains(response, '<h3>testuser</h3>', status_code=200)
player = Player.objects.get(lichess_username='testuser')
# The existing player already has a profile field, so it shouldn't have been re-fetched
self.assertIsNone(player.rating_for(get_league('team')))
self.assertIsNone(player.games_played_for(get_league('team')))
@patch('heltour.tournament.oauth._decode_state',
return_value={'league': league_tag('team'), 'token': '999'})
def test_slack_link(self, *args):
LoginToken.objects.create(secret_token='999', slack_user_id='U1234')
response = self.client.get(reverse('lichess_auth'), self.auth_params, follow=True)
self.assertRedirects(response, league_url('team', 'user_dashboard'))
self.assertContains(response, '<h3>testuser</h3>', status_code=200)
player = Player.objects.get(lichess_username='testuser')
self.assertEqual('U1234', player.slack_user_id)
def test_session_redirect(self, *args):
self.client.session['redirect_url'] = league_url('team', 'about')
response = self.client.get(reverse('lichess_auth'), self.auth_params)
self.assertRedirects(response, league_url('team', 'about'))
| 47.581818 | 95 | 0.661827 | 4,682 | 0.894536 | 0 | 0 | 4,653 | 0.888995 | 0 | 0 | 1,367 | 0.261177 |
447e060bfaf57ff9d2bd90b071b3844072392c17 | 3,912 | py | Python | tests/test_file_structure.py | ChrisWellsWood/carpyt | 6bdabd4924d0af5a83863dc25bec8961f321daaa | [
"MIT"
] | null | null | null | tests/test_file_structure.py | ChrisWellsWood/carpyt | 6bdabd4924d0af5a83863dc25bec8961f321daaa | [
"MIT"
] | null | null | null | tests/test_file_structure.py | ChrisWellsWood/carpyt | 6bdabd4924d0af5a83863dc25bec8961f321daaa | [
"MIT"
] | null | null | null | """Tests for creating file structure."""
import os
from pathlib import Path
import tempfile
from unittest import TestCase
import carpyt
TEST_TEMPLATES = Path(os.path.abspath(__file__)).parent / 'test_templates'
class TestTemplateParsing(TestCase):
"""Tests that templates are parsed correctly."""
def test_simple_template(self):
"""Tests creation of nested parse tree."""
template_path = TEST_TEMPLATES / 'nested_simple.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(file_tree.name == 'nested_simple')
self.assertTrue(file_tree[0].name == '{module}')
self.assertTrue(file_tree[0][0].name == 'tests')
self.assertTrue(file_tree[0][0][0].name == 'test.py')
self.assertTrue(file_tree[0][0][0].content is None)
def test_branched_template(self):
"""Tests creation of branched parse tree."""
template_path = TEST_TEMPLATES / 'nested_branched.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(file_tree.name == 'nested_branched')
self.assertTrue(file_tree[0].name == 'docs')
self.assertTrue(file_tree[1].name == 'tests')
self.assertTrue(file_tree[1][0].name == 'test_files')
self.assertTrue(file_tree[1][0].content is None)
self.assertTrue(file_tree[2].name == '{module}')
self.assertTrue(file_tree[2][0].name == '__init__.py')
self.assertTrue('content' in file_tree[2][0].content)
def test_linked_template(self):
"""Tests creation of parse tree with linked templates."""
template_path = TEST_TEMPLATES / 'parent.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(file_tree.name == 'parent')
self.assertTrue(file_tree[0].name == '{module}')
self.assertTrue(file_tree[0][0].name == 'child')
self.assertTrue(file_tree[0][0][0].name == 'test_files')
self.assertTrue(file_tree[0][0][0][0].name == 'tests.py')
self.assertTrue(file_tree[0][0][0][0].content is None)
self.assertTrue(file_tree[1].name == 'setup.py')
self.assertTrue(file_tree[1].content is None)
def test_recursive_template(self):
"""Tests error handling in recursive linked templates."""
template_path = TEST_TEMPLATES / 'recursive.yml'
with self.assertRaises(RecursionError):
carpyt.run_template_parser(template_path)
def test_reuse_template(self):
"""Tests reuse of linked templates."""
template_path = TEST_TEMPLATES / 'reuse.yml'
file_tree = carpyt.run_template_parser(template_path)
self.assertTrue(len(file_tree.content) == 1)
class TestFileCreation(TestCase):
"""Tests that all required files are generated correctly."""
def test_simple_project(self):
"""Tests the file structure of the standard python template."""
with tempfile.TemporaryDirectory() as tempdir:
td_path = Path(tempdir)
template_path = carpyt.TEMPLATES / 'python_module.yml'
file_tree = carpyt.run_template_parser(template_path)
file_tree.make(td_path)
top_dir = td_path / 'python_module'
self.assertTrue(top_dir.exists())
lib_dir = top_dir / '{module}'
self.assertTrue((lib_dir).exists())
self.assertTrue((lib_dir / '__init__.py').exists())
self.assertTrue((lib_dir / 'lib.py').exists())
tests_dir = top_dir / 'tests'
self.assertTrue(tests_dir.exists())
self.assertTrue((tests_dir / 'test_lib.py').exists())
docs_dir = top_dir / 'docs'
self.assertTrue(docs_dir.exists())
self.assertTrue((top_dir / 'README.md').exists())
self.assertTrue((top_dir / 'MANIFEST.in').exists())
self.assertTrue((top_dir / 'setup.py').exists())
| 43.466667 | 74 | 0.650562 | 3,691 | 0.943507 | 0 | 0 | 0 | 0 | 0 | 0 | 840 | 0.214724 |
447ff5764e0b94f97e6c66b9deb62f15c5007620 | 535 | py | Python | solutions/Array-02.py | mrocklin/dask-tutorial | efb87e83eefa816ef23083fc1329af1d7da452a8 | [
"BSD-3-Clause"
] | 2 | 2018-03-17T16:41:28.000Z | 2019-06-19T06:38:06.000Z | solutions/Array-02.py | mrocklin/dask-tutorial | efb87e83eefa816ef23083fc1329af1d7da452a8 | [
"BSD-3-Clause"
] | null | null | null | solutions/Array-02.py | mrocklin/dask-tutorial | efb87e83eefa816ef23083fc1329af1d7da452a8 | [
"BSD-3-Clause"
] | 3 | 2018-07-13T15:33:55.000Z | 2020-11-29T14:27:45.000Z | import h5py
from glob import glob
import os
filenames = sorted(glob(os.path.join('data', 'weather-big', '*.hdf5')))
dsets = [h5py.File(filename)['/t2m'] for filename in filenames]
import dask.array as da
arrays = [da.from_array(dset, chunks=(500, 500)) for dset in dsets]
x = da.stack(arrays, axis=0)
result = x.mean(axis=0)
from matplotlib import pyplot as plt
fig = plt.figure(figsize=(16, 8))
plt.imshow(result, cmap='RdBu_r')
result = x[0] - x.mean(axis=0)
fig = plt.figure(figsize=(16, 8))
plt.imshow(result, cmap='RdBu_r')
| 24.318182 | 71 | 0.699065 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 0.091589 |
448053e30b8251f7c76cad61ff98d10c9caa5bc1 | 2,944 | py | Python | auto-test/pytest_main.py | asterfusion/Tapplet | 917020fce2aaa2678c36a91fb91f60b36142ad9e | [
"Apache-2.0"
] | 1 | 2019-12-30T11:49:35.000Z | 2019-12-30T11:49:35.000Z | auto-test/pytest_main.py | asterfusion/Tapplet | 917020fce2aaa2678c36a91fb91f60b36142ad9e | [
"Apache-2.0"
] | null | null | null | auto-test/pytest_main.py | asterfusion/Tapplet | 917020fce2aaa2678c36a91fb91f60b36142ad9e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import pytest
import argparse
import configparser
from tools.rest_tools import *
from tools.rest_helper import *
parser = argparse.ArgumentParser(description="Single test")
parser.add_argument( '-m', '--mod_name', help="exec only one directory under tapplet/")
parser.add_argument( '-f', '--test_func', help="exec only one test")
parser.add_argument( '-V', '--verbose' , action="store_true", help="show more info, default is false")
parser.add_argument( '-v', '--pytest_verbose' , action="store_true", help="show more pytest info, default is false")
parser.add_argument( '-s', '--sw' , action="store_true", help="exit on test fail, continue from last failing test next time")
parser.add_argument( '-l', '--list' , action="store_true", help="list all test case, not execute")
parser.add_argument( '-n', '--count' , default="3", help="test counts/loops, default is 3")
parser.add_argument( '-L', '--long_time' , action="store_true", help="run long time tests")
# parser.add_argument( '-f', '--function')
args = parser.parse_args()
mod_name = args.mod_name
test_func = args.test_func
global_verbose= args.verbose
pytest_verbose= args.pytest_verbose
global_sw = args.sw
global_list = args.list
global_count = args.count
long_time_test = args.long_time
global_config = configparser.ConfigParser()
global_config.read("global.cfg", encoding = "utf-8")
host_config = global_config.get("auto_test", "host")
eth_config = global_config.get("auto_test", "eth")
dump_eth_config = global_config.get("auto_test", "dump_eth")
port1_config = global_config.get("auto_test", "port1")
port2_config = global_config.get("auto_test", "port2")
device_config = global_config.get("auto_test", "device")
device_config_list = ["VM"]
###### initial login ######
sf_helper = Sf_rest(host_config)
try_rest_login(sf_helper)
def check_device_config():
if device_config not in device_config_list:
print("device_config not right!")
print("supported device:")
print(device_config_list)
exit(-1)
if __name__ == "__main__":
check_device_config()
addopt = []
if global_verbose is True:
addopt.append("-v")
if pytest_verbose is True and global_verbose is False:
addopt.append("-v")
if global_sw is True:
addopt.append("--sw")
if global_list is True:
addopt.append("--collect-only")
if mod_name != None:
mod_name = "tapplet/"+mod_name
addopt.append(mod_name)
pytest.main(addopt)
elif test_func != None:
addopt.append(test_func)
pytest.main(addopt)
else:
for i in range(int(global_count)):
print("############ {0} ############".format(i+1))
ret = pytest.main(addopt)
if ret != 0:
break
if global_list is True:
break
###### log out ######
try_rest_logout(sf_helper)
#pytest.main(["gtpcv1"])
| 33.078652 | 125 | 0.66712 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 881 | 0.299253 |
44816770ada7ebc5eb5982830625b9550b145802 | 408 | py | Python | Flask/server/config.py | VincentParsons/DigSig | 195c97e34237dab78de40efd2d3a1e0269a82b70 | [
"MIT"
] | null | null | null | Flask/server/config.py | VincentParsons/DigSig | 195c97e34237dab78de40efd2d3a1e0269a82b70 | [
"MIT"
] | 1 | 2022-02-13T01:23:59.000Z | 2022-02-13T01:23:59.000Z | Flask/server/config.py | VincentParsons/DigSig | 195c97e34237dab78de40efd2d3a1e0269a82b70 | [
"MIT"
] | null | null | null | from dotenv import load_dotenv
import os
load_dotenv()
class ApplicationConfig:
SECRET_KEY = "asdadsd"
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = True
SQLALCHEMY_DATABASE_URI = r"sqlite:///./db.sqlite"
SESSION_TYPE = "filesystem"
SESSION_PERMANENT = False
SESSION_USE_SIGNER = True
EMAIL_ADDRESS = "digsigsupp@gmail.com"
EMAIL_PASSWORD = "DigSigRocks"
| 20.4 | 54 | 0.732843 | 349 | 0.855392 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.196078 |
44817e771a2edf24c0ddc2da2aab4dd39a0fd550 | 3,280 | py | Python | covigator/precomputations/load_top_occurrences.py | TRON-Bioinformatics/covigator | 59cd5012217cb043d97c77ce5273d8930e74390d | [
"MIT"
] | 7 | 2021-07-23T14:09:51.000Z | 2022-01-26T20:26:27.000Z | covigator/precomputations/load_top_occurrences.py | TRON-Bioinformatics/covigator | 59cd5012217cb043d97c77ce5273d8930e74390d | [
"MIT"
] | 2 | 2021-07-27T08:30:22.000Z | 2022-02-22T20:06:05.000Z | covigator/precomputations/load_top_occurrences.py | TRON-Bioinformatics/covigator | 59cd5012217cb043d97c77ce5273d8930e74390d | [
"MIT"
] | null | null | null | from typing import List
import pandas as pd
from logzero import logger
from sqlalchemy.orm import Session
from covigator import SYNONYMOUS_VARIANT, MISSENSE_VARIANT
from covigator.database.model import DataSource, PrecomputedSynonymousNonSynonymousCounts, RegionType, \
VARIANT_OBSERVATION_TABLE_NAME, SAMPLE_ENA_TABLE_NAME, SAMPLE_GISAID_TABLE_NAME, PrecomputedOccurrence
from covigator.database.queries import Queries
NUMBER_TOP_OCCURRENCES = 1000
class TopOccurrencesLoader:
def __init__(self, session: Session):
self.session = session
self.queries = Queries(session=self.session)
def load(self):
# gets the top occurrent variants for each source and overall
top_occurring_variants_ena = None
try:
top_occurring_variants_ena = self.queries.get_top_occurring_variants(
top=NUMBER_TOP_OCCURRENCES, source=DataSource.ENA.name)
except ValueError as e:
logger.exception(e)
logger.error("No top occurrences for ENA data")
top_occurring_variants_gisaid = None
try:
top_occurring_variants_gisaid = self.queries.get_top_occurring_variants(
top=NUMBER_TOP_OCCURRENCES, source=DataSource.GISAID.name)
except ValueError:
logger.error("No top occurrences for GISAID data")
top_occurring_variants = None
try:
top_occurring_variants = self.queries.get_top_occurring_variants(top=NUMBER_TOP_OCCURRENCES)
except ValueError:
logger.error("No top occurrences")
# delete all rows before starting
self.session.query(PrecomputedOccurrence).delete()
self.session.commit()
database_rows = []
# stores the precomputed data
if top_occurring_variants_ena is not None:
for index, row in top_occurring_variants_ena.iterrows():
# add entries per gene
database_rows.append(self._row_to_top_occurrence(row, source=DataSource.ENA))
if top_occurring_variants_gisaid is not None:
for index, row in top_occurring_variants_gisaid.iterrows():
# add entries per gene
database_rows.append(self._row_to_top_occurrence(row, source=DataSource.GISAID))
if top_occurring_variants is not None:
for index, row in top_occurring_variants.iterrows():
# add entries per gene
database_rows.append(self._row_to_top_occurrence(row))
if len(database_rows) > 0:
self.session.add_all(database_rows)
self.session.commit()
logger.info("Added {} entries to {}".format(len(database_rows), PrecomputedOccurrence.__tablename__))
def _row_to_top_occurrence(self, row, source=None):
return PrecomputedOccurrence(
total=row["total"],
frequency=row["frequency"],
variant_id=row["variant_id"],
hgvs_p=row["hgvs_p"],
gene_name=row["gene_name"],
domain=row["pfam_name"],
annotation=row["annotation_highest_impact"],
source=source,
month=row["month"],
count=row["count"],
frequency_by_month=row["frequency_by_month"],
)
| 38.588235 | 109 | 0.670427 | 2,820 | 0.859756 | 0 | 0 | 0 | 0 | 0 | 0 | 423 | 0.128963 |
448311e46f0ca162a8f3845e469f641ffd0d79ff | 3,029 | py | Python | petrarch2/readBBN.py | Sayeedsalam/political-actor-recommender | 20dbc37ac419e4ecd5436d4e5b9685846639b2bc | [
"MIT"
] | 1 | 2018-03-15T09:48:28.000Z | 2018-03-15T09:48:28.000Z | petrarch2/readBBN.py | Sayeedsalam/political-actor-recommender | 20dbc37ac419e4ecd5436d4e5b9685846639b2bc | [
"MIT"
] | null | null | null | petrarch2/readBBN.py | Sayeedsalam/political-actor-recommender | 20dbc37ac419e4ecd5436d4e5b9685846639b2bc | [
"MIT"
] | null | null | null | __author__ = 'root'
import json
from cameoxml import cameoxml
from StringIO import StringIO
discard_words_set = set(['THE', 'A', 'AN', 'OF', 'IN', 'AT', 'OUT', '', ' '])
def read_actors_and_role(cameo_doc):
actor_dict = dict()
role_dict = dict()
for event in cameo_doc.events:
for participant in event.participants:
if participant.actor_id not in actor_dict:
if participant.actor_name != '':
actor_name_with_under_score = participant.actor_name
actor_dict[participant.actor_id] = actor_name_with_under_score
if participant.agent_id not in role_dict:
if participant.agent_name != '':
role_dict[participant.agent_name] = 1
return actor_dict, role_dict
def make_actor_list_for_each_sentence(text):
response = cameoxml.send_document(text, hostname='10.176.148.60', port=8001, document_date='2016-01-21')
cameo_doc = cameoxml.build_document_from_response(response)
actor_list = list()
actor_dict, role_dict = read_actors_and_role(cameo_doc)
for item in actor_dict:
actor_list.append(actor_dict[item].upper())
return actor_list, role_dict
def make_new_actor_list(line):
new_actor_list = list()
sentences = json.load(StringIO(line), encoding='utf-8')
doc_id = sentences['doc_id']
count = 0
ner = set()
role_dict = dict()
for s in sentences['sentences']:
#"(MONEY,$|48|million),(ORGANIZATION,United|Nations),(DATE,30|August|2016|today),(NUMBER,1.3|million),(LOCATION,Central|Africa|West|Central|Africa),(PERSON,WFP/Daouda|Guirou)"
ner_text_list = ''
actor_list, role_dict_line = make_actor_list_for_each_sentence(s['sentence'])
new_actor_list = new_actor_list + actor_list
for role_item in role_dict_line:
if role_item not in role_dict:
role_dict[role_item] = role_dict_line[role_item]
else:
role_dict[role_item] = role_dict[role_item] + role_dict_line[role_item]
if len(s['ner']) > 0:
for ner_item in s['ner'].replace('),(', ':').split(':'):
ner_item_list = ner_item.replace('(', '').replace(')', '').split(',')
if len(ner_item_list) != 2:
continue
if ner_item_list[0] == 'PERSON': # or ner_item_list[0] == 'MISC' or ner_item_list[0] == 'ORGANIZATION':
ner_text_list = ner_item_list[1]
ner = ner | set([x.strip().upper() for x in ner_text_list.split('|')])
ner = ner - discard_words_set
new_actor_list = list(ner - set(new_actor_list))
return new_actor_list, doc_id #, role_dict
# response = cameoxml.send_document('The American president Barack Obama met with Putin.', hostname='10.176.148.60', port=8001, document_date='2016-01-21')
#
# print response
#
# print make_actor_list_for_each_sentence('The American president Barack Obama met Putin')
| 31.226804 | 183 | 0.638495 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 674 | 0.222516 |
448425d1503d367721976154b4af5535896d4e6c | 4,178 | py | Python | utilis/evaluation.py | jianghan2013/NMR_clustering | 9cf2285101dd0fc9a1274e102c8ea094053c4a17 | [
"MIT"
] | null | null | null | utilis/evaluation.py | jianghan2013/NMR_clustering | 9cf2285101dd0fc9a1274e102c8ea094053c4a17 | [
"MIT"
] | null | null | null | utilis/evaluation.py | jianghan2013/NMR_clustering | 9cf2285101dd0fc9a1274e102c8ea094053c4a17 | [
"MIT"
] | null | null | null | ### evaluation
import numpy as np
from sklearn.linear_model import LinearRegression
class Evaluate(object):
def __init__(self, model_names, X_train, y_preds, config,verbose=0):
self.distance_min = config['distance_min']
self.point_min = config['point_min'] #0.05, point_min = 50
self.model_names = model_names
self.X_train= X_train
self.y_preds = y_preds
self.verbose = verbose
self.metrics = {'ratios':{}, 'slopes': {}, 'inters':{}, 'slopes_raw':{}}
self.boundary_points = {}
def fit(self):
for model_name in self.model_names:
ratios = get_ratio_range(self.X_train, self.y_preds[model_name])
slopes, inters, slopes_raw, boundaries = get_boundary_and_slope(self.X_train, self.y_preds[model_name], self.distance_min, self.point_min)
self.metrics['ratios'][model_name] =ratios
self.metrics['slopes'][model_name] = slopes
self.metrics['slopes_raw'][model_name] = slopes_raw
self.metrics['inters'][model_name] = inters
self.boundary_points[model_name] = boundaries
if self.verbose:
print('model_name {}, metrics ratios {}, slopes {}, inters{}'.format(model_name,
self.metrics['ratios'][model_name], self.metrics['slopes'][model_name],
self.metrics['inters'][model_name]))
return self
def get_ratio_range(X_train, y_pred):
"""
Compute range ratio index
"""
range_ratios=[]
n_components = max(y_pred)+1
for i in range(n_components):
X_train_i = X_train[y_pred==i]
T2_v = 10**(X_train_i[:,0])
T1_v = 10**(X_train_i[:,1])
range_ratio = (np.max(T1_v/T2_v)/np.min(T1_v/T2_v))
range_ratios.append(range_ratio)
return range_ratios
def get_boundary_from_two_clusters_(cluster_a, cluster_b, distance_min = 0.05):
# cluster_a: shape(n,2)
# cluster_b: shape(n,2)
id_a =set()
id_b =set()# the pair of row id (i,j), i is for cluster_a and j is for cluster_b
for i in range(cluster_a.shape[0]):
#i = 0
clsuter_a_i = cluster_a[i,:]
distance_list = np.sqrt( (clsuter_a_i[0]-cluster_b[:,0])**2 + (clsuter_a_i[1]-cluster_b[:,1])**2)
distance_ = np.amin(distance_list) # mini distance
if distance_ < distance_min:
j = np.argmin(distance_list)
id_a.add(i)
id_b.add(j)
if len(id_a) == 0 and len(id_a) == 0:
return []
else:
id_a = list(id_a)
id_b = list(id_b)
id_a.sort()
id_b.sort()
boundary_points = np.vstack( (cluster_a[id_a,:],cluster_b[id_b,:] ) )
return boundary_points
def get_boundary_and_slope(X_train, y_pred, distance_min=0.05, point_min = 50):
# point_min minimum point for the boundary points
# get the decision boundary and their slopes
boundary_list = [] # contains all boundary points
slope_raw_list = []
angle_diff_list = [] # contains the slope for that boundary
inter_list = []
n_components = max(y_pred)+1
data_all = [X_train[y_pred==i] for i in range(n_components)] # get each cluster points
for i in range(n_components-1):
for j in range(i+1, n_components):
cluster_a = data_all[i]
cluster_b = data_all[j]
boundary_points = get_boundary_from_two_clusters_(cluster_a, cluster_b,distance_min = distance_min)
if len(boundary_points) > point_min:
boundary_list.append(boundary_points)
# linear regression
lr_ = LinearRegression()
X_ = boundary_points[:,0].reshape(-1,1)
y_ = boundary_points[:,1]
lr_.fit(X_,y_)
slope = lr_.coef_[0]/np.pi*180
inter = lr_.intercept_
slope_raw_list.append(slope)
inter_list.append(inter)
diff_slope = abs(slope-45)
angle_diff_list.append(diff_slope) # normalize slope
return angle_diff_list, inter_list, slope_raw_list, boundary_list
| 41.366337 | 153 | 0.608186 | 1,357 | 0.324797 | 0 | 0 | 0 | 0 | 0 | 0 | 611 | 0.146242 |
44858cf0de73a5d338b1c6ecbc4904c2ec0268d9 | 1,464 | py | Python | userManagment/userManager.py | uzairAK/serverom-panel | 3dcde05ad618e6bef280db7d3180f926fe2ab1db | [
"MIT"
] | null | null | null | userManagment/userManager.py | uzairAK/serverom-panel | 3dcde05ad618e6bef280db7d3180f926fe2ab1db | [
"MIT"
] | null | null | null | userManagment/userManager.py | uzairAK/serverom-panel | 3dcde05ad618e6bef280db7d3180f926fe2ab1db | [
"MIT"
] | null | null | null | #!/usr/local/CyberCP/bin/python
import os, sys
sys.path.append('/usr/local/CyberCP')
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
django.setup()
import threading as multi
from plogical.acl import ACLManager
from plogical.CyberCPLogFileWriter import CyberCPLogFileWriter as logging
class UserManager(multi.Thread):
def __init__(self, function, extraArgs):
multi.Thread.__init__(self)
self.function = function
self.extraArgs = extraArgs
def run(self):
try:
if self.function == 'controlUserState':
self.controlUserState()
except:
pass
def controlUserState(self):
try:
websites = ACLManager.findAllSites(self.extraArgs['currentACL'],self.extraArgs['user'].pk)
from websiteFunctions.website import WebsiteManager
wm = WebsiteManager()
if self.extraArgs['state'] == 'SUSPEND':
for items in websites:
data = {'websiteName': items, 'state': 'Suspend'}
wm.submitWebsiteStatus(self.extraArgs['user'].pk, data)
else:
for items in websites:
data = {'websiteName': items, 'state': 'UN-Suspend'}
wm.submitWebsiteStatus(self.extraArgs['user'].pk, data)
except BaseException as msg:
logging.writeToFile(str(msg) + '[Error:UserManager:32]') | 31.148936 | 102 | 0.619536 | 1,142 | 0.780055 | 0 | 0 | 0 | 0 | 0 | 0 | 242 | 0.165301 |
4485c2de2544e40db08213560e7dfbba9933c235 | 245 | py | Python | examples/python/corepy/userandom.py | airgiser/ucb | d03e62a17f35a9183ed36662352f603f0f673194 | [
"MIT"
] | 1 | 2022-01-08T14:59:44.000Z | 2022-01-08T14:59:44.000Z | examples/python/corepy/userandom.py | airgiser/just-for-fun | d03e62a17f35a9183ed36662352f603f0f673194 | [
"MIT"
] | null | null | null | examples/python/corepy/userandom.py | airgiser/just-for-fun | d03e62a17f35a9183ed36662352f603f0f673194 | [
"MIT"
] | null | null | null | #!/usr/bin/python
from random import Random
onelist = [1, 2, 3, 4, 5, 6, 7]
rd = Random()
for i in range(5):
print(rd.randint(0, 100))
print(rd.uniform(0, 100))
print(rd.random())
print(rd.choice(onelist))
print('-' * 30)
| 17.5 | 31 | 0.587755 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 0.081633 |
44867576f7b712888203781d9257b7fe360df819 | 2,763 | py | Python | src/third_party/beaengine/tests/0f388c.py | CrackerCat/rp | 5fe693c26d76b514efaedb4084f6e37d820db023 | [
"MIT"
] | 1 | 2022-01-17T17:40:29.000Z | 2022-01-17T17:40:29.000Z | src/third_party/beaengine/tests/0f388c.py | CrackerCat/rp | 5fe693c26d76b514efaedb4084f6e37d820db023 | [
"MIT"
] | null | null | null | src/third_party/beaengine/tests/0f388c.py | CrackerCat/rp | 5fe693c26d76b514efaedb4084f6e37d820db023 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# @author : beaengine@gmail.com
from headers.BeaEnginePython import *
from nose.tools import *
class TestSuite:
def test(self):
# VEX.128.66.0F38.W0 8C /r
# VPMASKMOVD xmm1, xmm2, m128
myVEX = VEX('VEX.128.66.0F38.W0')
myVEX.vvvv = 0b1111
Buffer = bytes.fromhex('{}8c10'.format(myVEX.c4()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x8c)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaskmovd')
assert_equal(myDisasm.repr(), 'vpmaskmovd xmm10, xmm0, xmmword ptr [r8]')
# VEX.256.66.0F38.W0 8C /r
# VPMASKMOVD ymm1, ymm2, m256
myVEX = VEX('VEX.256.66.0F38.W0')
myVEX.vvvv = 0b1111
Buffer = bytes.fromhex('{}8c10'.format(myVEX.c4()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x8c)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaskmovd')
assert_equal(myDisasm.repr(), 'vpmaskmovd ymm10, ymm0, ymmword ptr [r8]')
# VEX.128.66.0F38.W1 8C /r
# VPMASKMOVQ xmm1, xmm2, m128
myVEX = VEX('VEX.128.66.0F38.W1')
myVEX.vvvv = 0b1111
Buffer = bytes.fromhex('{}8c10'.format(myVEX.c4()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x8c)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaskmovq')
assert_equal(myDisasm.repr(), 'vpmaskmovq xmm10, xmm0, xmmword ptr [r8]')
# VEX.256.66.0F38.W1 8C /r
# VPMASKMOVQ ymm1, ymm2, m256
myVEX = VEX('VEX.256.66.0F38.W1')
myVEX.vvvv = 0b1111
Buffer = bytes.fromhex('{}8c10'.format(myVEX.c4()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x8c)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpmaskmovq')
assert_equal(myDisasm.repr(), 'vpmaskmovq ymm10, ymm0, ymmword ptr [r8]')
| 37.849315 | 81 | 0.652552 | 1,958 | 0.70865 | 0 | 0 | 0 | 0 | 0 | 0 | 1,275 | 0.461455 |
44877d856034bf8fe04f668f3d82b679a04011e9 | 516 | py | Python | productdb/testing/unittests/products.py | tspycher/python-productdb | 17970a681b32eb249b78fab7dbeaee9d63ca7c05 | [
"MIT"
] | null | null | null | productdb/testing/unittests/products.py | tspycher/python-productdb | 17970a681b32eb249b78fab7dbeaee9d63ca7c05 | [
"MIT"
] | null | null | null | productdb/testing/unittests/products.py | tspycher/python-productdb | 17970a681b32eb249b78fab7dbeaee9d63ca7c05 | [
"MIT"
] | null | null | null | from . import BasicTestCase
class ProductsTestCase(BasicTestCase):
def test_basic(self):
rv = self.client.get('/')
assert rv.status_code == 200
data = self.parseJsonResponse(rv)
assert '_links' in data
def test_get_all_products(self):
rv = self.client.get('/product')
assert rv.status_code == 200
data = self.parseJsonResponse(rv)
assert '_items' in data
# add further data tests here, load basic data by fixtures
pass
| 21.5 | 66 | 0.625969 | 484 | 0.937984 | 0 | 0 | 0 | 0 | 0 | 0 | 87 | 0.168605 |
448799a4fc1952a7fcd243848401d2c0a08268d0 | 196 | py | Python | src/main.py | Quin-Darcy/Crawler | 3e0d9f0b5dce43206606b19e5fb4dd84f4614fb1 | [
"MIT"
] | null | null | null | src/main.py | Quin-Darcy/Crawler | 3e0d9f0b5dce43206606b19e5fb4dd84f4614fb1 | [
"MIT"
] | null | null | null | src/main.py | Quin-Darcy/Crawler | 3e0d9f0b5dce43206606b19e5fb4dd84f4614fb1 | [
"MIT"
] | null | null | null | import crawler
import os
def main():
root = crawler.Crawler()
root.set_start()
root.burrow()
root.show()
os.system('killall firefox')
if __name__ == '__main__':
main()
| 13.066667 | 32 | 0.617347 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.137755 |
448833e3b59142ea7c65646061303d7a33f3c57f | 538 | py | Python | modulo 2/Exercicios/Ex058.1 - Palpite.py | GabrielBrotas/Python | 9441b6b86ff3cb7fa5921b508c484075adac08b3 | [
"MIT"
] | null | null | null | modulo 2/Exercicios/Ex058.1 - Palpite.py | GabrielBrotas/Python | 9441b6b86ff3cb7fa5921b508c484075adac08b3 | [
"MIT"
] | null | null | null | modulo 2/Exercicios/Ex058.1 - Palpite.py | GabrielBrotas/Python | 9441b6b86ff3cb7fa5921b508c484075adac08b3 | [
"MIT"
] | null | null | null | from random import randint
computador = randint(0, 10)
print('Sou seu computador... Acabei de pensar em um numero entre 0 e 10')
print('Tente adivinhar qual foi')
acertou = False
palpite = 0
while not acertou:
jogador = int(input('Digite um numero entre 0 e 10: '))
palpite += 1
if jogador == computador:
acertou = True
else:
if jogador > computador:
print('Menos...')
elif jogador < computador:
print('Mais')
print('Acertou com {} tentativas. Parabens'.format(palpite))
| 25.619048 | 73 | 0.637546 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 178 | 0.330855 |
448a636d024dfa5f84fbc271a201ee5abe77a7d5 | 2,118 | py | Python | jerk_agent_for_understanding/scripts/map-paths.py | tristansokol/Bobcats | 71461b5c969b24e5379a63d2bc22bf173ae76f9b | [
"MIT"
] | 2 | 2018-06-20T13:36:54.000Z | 2018-10-28T17:06:31.000Z | jerk_agent_for_understanding/scripts/map-paths.py | tristansokol/Bobcats | 71461b5c969b24e5379a63d2bc22bf173ae76f9b | [
"MIT"
] | null | null | null | jerk_agent_for_understanding/scripts/map-paths.py | tristansokol/Bobcats | 71461b5c969b24e5379a63d2bc22bf173ae76f9b | [
"MIT"
] | 1 | 2019-04-30T06:24:03.000Z | 2019-04-30T06:24:03.000Z | #!/usr/bin/python
import sys
import retro
import numpy as np
from os import listdir
from os.path import isfile, join, isdir, dirname, realpath
from PIL import Image
# find level maps here: http://info.sonicretro.org/Sonic_the_Hedgehog_(16-bit)_level_maps
mp = Image.open(dirname(realpath(__file__))+"/01.PNG")
mp.load()
level_map =np.array(mp.convert(mode='RGB'), dtype="uint8" )
hf = 10 # highlight factor
def render(file):
movie = retro.Movie(file)
movie.step()
env = retro.make(game=movie.get_game(), state=retro.STATE_NONE, use_restricted_actions=retro.ACTIONS_ALL)
env.initial_state = movie.get_state()
env.reset()
while movie.step():
keys = []
for i in range(env.NUM_BUTTONS):
keys.append(movie.get_key(i))
_obs, _rew, _done, _info = env.step(keys)
y = _info['y']
x = _info['x']
highlight = [[[min(x[0][0]+hf,255), min(x[0][1]+hf,255), min(x[0][2]+hf,255)],
[min(x[1][0]+hf,255), min(x[1][1]+hf,255), min(x[1][2]+hf,255)],
[min(x[2][0]+hf,255), min(x[2][1]+hf,255), min(x[2][2]+hf,255)],
[min(x[3][0]+hf,255), min(x[3][1]+hf,255), min(x[3][2]+hf,255)],
[min(x[4][0]+hf,255), min(x[4][1]+hf,255), min(x[4][2]+hf,255)],
[min(x[5][0]+hf,255), min(x[5][1]+hf,255), min(x[5][2]+hf,255)],
[min(x[6][0]+hf,255), min(x[6][1]+hf,255), min(x[6][2]+hf,255)],
[min(x[7][0]+hf,255), min(x[7][1]+hf,255), min(x[7][2]+hf,255)],
] for x in level_map[y:(y+8), x:(x+8)]]
level_map[y:(y+8), x:(x+8)] = highlight
env.close()
if isdir(sys.argv[1]):
onlyfiles = [f for f in listdir(sys.argv[1]) if isfile(join(sys.argv[1], f))]
onlyfiles.sort()
c = 0
for file in onlyfiles:
if ".bk2" in file :
print('playing', file)
render(sys.argv[1]+file)
if c % 5==0:
lm = Image.fromarray(level_map)
lm.save('levelmapv9.jpeg')
c+=1
lm.show()
else:
print('playing', sys.argv[1])
render(sys.argv[1])
lm = Image.fromarray(level_map)
# lm.save('levelmap.jpeg')
lm.show()
| 33.09375 | 109 | 0.566572 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 218 | 0.102927 |
4491484c500017a7a5d3c28b0aff70d389c8e7ae | 3,643 | py | Python | src/TensorFlow/venv/Lab/Tutorials/Models/SaveRestore.py | KarateJB/Python.Practice | a5f00f669dc4b815601c093ce0753a0a82b4328a | [
"MIT"
] | 1 | 2020-08-14T07:21:05.000Z | 2020-08-14T07:21:05.000Z | src/TensorFlow/venv/Lab/Tutorials/Models/SaveRestore.py | KarateJB/Python.Practice | a5f00f669dc4b815601c093ce0753a0a82b4328a | [
"MIT"
] | null | null | null | src/TensorFlow/venv/Lab/Tutorials/Models/SaveRestore.py | KarateJB/Python.Practice | a5f00f669dc4b815601c093ce0753a0a82b4328a | [
"MIT"
] | 3 | 2018-04-08T13:35:20.000Z | 2019-09-01T04:59:03.000Z | """Save and Load model sample
See https://github.com/aymericdamien/TensorFlow-Examples/blob/master/examples/4_Utils/save_restore_model.py
"""
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.examples.tutorials.mnist import input_data
import os
# Initialize random weights
def init_weights(shape):
return tf.Variable(tf.random_normal(shape, stddev=0.01))
# Define model
def model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden):
# 1st fully connected layer
X = tf.nn.dropout(X, p_keep_input) # Apply dropout to hidden layer
h = tf.nn.relu(tf.matmul(X, w_h))
h = tf.nn.dropout(h, p_keep_hidden) # Apply dropout to hidden layer
# 2nd fully connected layer
h2 = tf.nn.relu(tf.matmul(h, w_h2))
h2 = tf.nn.dropout(h2, p_keep_hidden) # Apply dropout to hidden layer
return tf.matmul(h2, w_o)
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels
X = tf.placeholder("float", [None, 784])
Y = tf.placeholder("float", [None, 10])
#Initialize weights
w_h = init_weights([784, 625]) # weight for 1st layer
w_h2 = init_weights([625, 625]) # Weight for 2nd layer
w_o = init_weights([625, 10])
p_keep_input = tf.placeholder("float")
p_keep_hidden = tf.placeholder("float")
# Create neural network model and get log probabilities
py_x = model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden)
# Calculate the loss
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=py_x, labels=Y))
# Train
train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(loss) # See https://www.tensorflow.org/api_docs/python/tf/train/RMSPropOptimizer
predict_op = tf.argmax(py_x, 1) # Returns the index with the largest value
# Define the store-model directory
ckpt_dir = "./ckpt_dir"
if not os.path.exists(ckpt_dir):
os.makedirs(ckpt_dir)
# Set a global step with trainable = false
global_step = tf.Variable(0, name='global_step', trainable=False)
# Call this after declaring all tf.Variables.
saver = tf.train.Saver()
# This variable won't be stored, since it is declared after tf.train.Saver()
non_storable_variable = tf.Variable(777)
# Launch the graph in a session
with tf.Session() as sess:
# you need to initialize all variables
tf.global_variables_initializer().run()
# Load last train state
ckpt = tf.train.get_checkpoint_state(ckpt_dir) # Returns CheckpointState proto from the "checkpoint" file
if ckpt and ckpt.model_checkpoint_path:
print(ckpt.model_checkpoint_path)
saver.restore(sess, ckpt.model_checkpoint_path) # restore all variables
start = global_step.eval() # get last global_step
print("Start from:", start)
for i in range(start, 100):
for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):
sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end],
p_keep_input: 0.8, p_keep_hidden: 0.5})
global_step.assign(i+1).eval() # set and update(eval) global_step with index, i
saver.save(sess, ckpt_dir + "/model.ckpt", global_step=global_step)
print(i, np.mean(np.argmax(teY, axis=1) ==
sess.run(predict_op, feed_dict={X: teX,
p_keep_input: 1.0,
p_keep_hidden: 1.0})))
# If you want to only save the graph in to binary file
tf.train.write_graph(sess.graph_def, '/tmp/tfmodel','train.pbtxt')
| 36.43 | 138 | 0.682954 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,203 | 0.330222 |
449284bb2c40e87e204639779832368eaadcd46c | 2,113 | py | Python | complexity/utils.py | remiomosowon/complexity | 710dc771b11c65fb65820cfb2a519dd749c65419 | [
"BSD-3-Clause"
] | 59 | 2015-02-08T19:38:03.000Z | 2020-05-24T19:34:43.000Z | complexity/utils.py | remiomosowon/complexity | 710dc771b11c65fb65820cfb2a519dd749c65419 | [
"BSD-3-Clause"
] | 11 | 2015-01-25T12:15:02.000Z | 2020-01-01T18:53:31.000Z | complexity/utils.py | remiomosowon/complexity | 710dc771b11c65fb65820cfb2a519dd749c65419 | [
"BSD-3-Clause"
] | 29 | 2015-01-13T16:53:27.000Z | 2020-08-01T12:37:52.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
complexity.utils
----------------
Helper functions used throughout Complexity.
"""
import errno
import os
import sys
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
input = raw_input
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
if PY3:
return open(filename, *args, **kwargs)
kwargs['encoding'] = "utf-8"
return codecs.open(filename, *args, **kwargs)
def query_yes_no(question, default="yes"):
"""
Ask a yes/no question via `raw_input()` and return their answer.
:param question: A string that is presented to the user.
:param default: The presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
Adapted from
http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input
http://code.activestate.com/recipes/577058/
"""
valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
| 24.569767 | 79 | 0.587317 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,036 | 0.490298 |
4492b8f104d0fabe71c89becba245f5e4644669e | 228 | py | Python | src/Brain/Functions/Analyzer.py | SidisLiveYT/Discord-AI | b321856266740304b70ab63bbfcdb5285854aa7d | [
"MIT"
] | null | null | null | src/Brain/Functions/Analyzer.py | SidisLiveYT/Discord-AI | b321856266740304b70ab63bbfcdb5285854aa7d | [
"MIT"
] | null | null | null | src/Brain/Functions/Analyzer.py | SidisLiveYT/Discord-AI | b321856266740304b70ab63bbfcdb5285854aa7d | [
"MIT"
] | null | null | null | from Resources.StorageGround import Greetings
print(Greetings);
def Analyzer_Text_Department(RawString):
#Taking List of Words to Analyze Indiviually
SplitSting = RawString.split()
#for word in SplitSting:
| 25.333333 | 48 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 68 | 0.298246 |
4494d26537d59e35ecf0654ea3fabf3cb5c426aa | 2,742 | py | Python | prep_scripts/labels.py | Open-Speech-EkStep/common_scripts | 916f01444e028f9111d5499217abf4443bd24017 | [
"MIT"
] | 4 | 2021-07-22T15:32:13.000Z | 2022-01-25T08:13:45.000Z | prep_scripts/labels.py | Open-Speech-EkStep/common_scripts | 916f01444e028f9111d5499217abf4443bd24017 | [
"MIT"
] | 2 | 2021-03-31T10:58:54.000Z | 2021-05-07T09:50:52.000Z | prep_scripts/labels.py | Open-Speech-EkStep/common_scripts | 916f01444e028f9111d5499217abf4443bd24017 | [
"MIT"
] | 3 | 2021-04-12T05:04:55.000Z | 2021-08-25T06:55:42.000Z |
# Usage: python labels.py --jobs 64 --tsv <path to train.tsv>train.tsv --output-dir <destination dir> --output-name test --txt-dir
import argparse
import os
import re
from tqdm import tqdm
from joblib import Parallel, delayed
# def get_cleaned_text(original_text):
# pattern = '[^ ँ-ःअ-ऋए-ऑओ-नप-रलव-हा-ृे-ॉो-्0-9क़-य़ॅ]+'
# # pattern = '[^ ँ-ःअ-ऋए-ऑओ-नप-रलव-ह़ा-ृे-ॉो-्0-9क़-य़ॅ]+'
# return (re.sub(pattern, '', original_text)).strip()
# def get_replacement(value):
# dicti={2325:2392,2326:2393,
# 2327:2394,2332:2395,
# 2337:2396,2338:2397,
# 2347:2398,2351:2399}
# return dicti[value]
# def replace_nuktas(text):
# while chr(2364) in text:
# index_of_nukta=text.index(chr(2364))
# to_replace=ord(text[index_of_nukta-1])
# replace_value=get_replacement(to_replace)
# text=text.replace(chr(2364),"").replace(chr(to_replace),chr(replace_value))
# return text
def get_text(line,root):
txt_path = line.split("\t")[0].replace("wav","txt").strip() ## implies that the text filename and wav filename should be same
txt_path = os.path.join( root , txt_path )
text = ''
with open(txt_path , mode = "r", encoding="utf-8") as file_local:
text = file_local.readline().strip()
return text
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--tsv", type = str, help = "TSV file for which labels need to be generated")
parser.add_argument("--output-dir", required=True)
parser.add_argument("--output-name", required=True)
parser.add_argument("--txt-dir")
parser.add_argument("--jobs", default=-1, type=int, help="Number of jobs to run in parallel")
args = parser.parse_args()
os.makedirs(args.output_dir, exist_ok=True)
tsv_file=args.tsv
output_dir=args.output_dir
output_name=args.output_name
with open(tsv_file) as tsv, open(
os.path.join(output_dir, output_name + ".ltr"), "w",encoding="utf-8"
) as ltr_out, open(
os.path.join(output_dir, output_name + ".wrd"), "w",encoding="utf-8"
) as wrd_out:
root = next(tsv).strip()
if not args.txt_dir:
args.txt_dir = root
local_arr = []
local_arr.extend(Parallel(n_jobs = args.jobs)( delayed(get_text)(line , args.txt_dir) for line in tqdm(tsv)))
formatted_text = []
for text in local_arr:
local_list = list( text.replace(" ", "|") )
final_text = " ".join(local_list) + ' |'
formatted_text.append(final_text)
wrd_out.writelines("\n".join(local_arr))
ltr_out.writelines("\n".join(formatted_text))
if __name__ == "__main__":
main()
| 31.883721 | 130 | 0.619985 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,216 | 0.429379 |
4495bc5ea5d78a2ae432884f81122c1c3f6cbbb8 | 8,235 | py | Python | df.py | denizumutdereli/dialogflow_nlp_ai_powered_chat_bot | e624927527a32f21763d23cd585c868fe9190211 | [
"Apache-2.0"
] | 1 | 2022-01-13T00:35:20.000Z | 2022-01-13T00:35:20.000Z | df.py | denizumutdereli/dialogflow_nlp_ai_powered_chat_bot | e624927527a32f21763d23cd585c868fe9190211 | [
"Apache-2.0"
] | null | null | null | df.py | denizumutdereli/dialogflow_nlp_ai_powered_chat_bot | e624927527a32f21763d23cd585c868fe9190211 | [
"Apache-2.0"
] | null | null | null | import os
import sys
import settings
#import google.cloud.dialogflow_v2 as dialogflow_v2
from google.cloud import dialogflow as dialogflow_v2
from google.protobuf import field_mask_pb2
from rich import print
import dffunc as dff #special!
def get_intent_id(display_name):
try:
intents_client = dialogflow_v2.IntentsClient()
parent = dialogflow_v2.AgentsClient.agent_path(settings.project_id)
intents = intents_client.list_intents(request={"parent": parent})
intent_names = [
intent.name for intent in intents if intent.display_name == display_name
]
intent_ids = [intent_name.split("/")[-1] for intent_name in intent_names]
if len(intent_ids) != 0:
return(intent_ids[0])
else:
return False
except Exception as e:
#print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def getIntent(intent_id):
try:
client = dialogflow_v2.IntentsClient()
intent_name = client.intent_path(settings.project_id, intent_id)
intent = client.get_intent(request={"name": intent_name})
return intent;
except Exception as e:
print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def create_intent(display_name, training_phrases_parts, message_texts):
try:
intents_client = dialogflow_v2.IntentsClient()
parent = dialogflow_v2.AgentsClient.agent_path(settings.project_id)
training_phrases = []
for training_phrases_part in training_phrases_parts:
part = dialogflow_v2.Intent.TrainingPhrase.Part(text=training_phrases_part)
# Here we create a new training phrase for each provided part.
training_phrase = dialogflow_v2.Intent.TrainingPhrase(parts=[part])
training_phrases.append(training_phrase)
text = dialogflow_v2.Intent.Message.Text(text=message_texts)
message = dialogflow_v2.Intent.Message(text=text)
intent = dialogflow_v2.Intent(
display_name=display_name, training_phrases=training_phrases, messages=[message],webhook_state = 'WEBHOOK_STATE_ENABLED'
)
response = intents_client.create_intent(
request={"parent": parent, "intent": intent}
)
except Exception as e:
#print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def changeIntentName(intent_id, newName):
try:
client = dialogflow_v2.IntentsClient()
intent_name = client.intent_path(settings.project_id, intent_id)
intent = client.get_intent(request={"name": intent_name})
intent.display_name = newName
update_mask = field_mask_pb2.FieldMask(paths=["display_name"])
response = client.update_intent(intent=intent, update_mask=update_mask, language_code=settings.language_code)
return response
except Exception as e:
print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def update_intent(intent_id, training_phrases_parts,message_texts):
try:
client = dialogflow_v2.IntentsClient()
intent_name = client.intent_path(settings.project_id, intent_id)
intent = client.get_intent(request={"name": intent_name})
training_phrases = []
for training_phrases_part in training_phrases_parts:
part = dialogflow_v2.Intent.TrainingPhrase.Part(
text=training_phrases_part)
training_phrase = dialogflow_v2.Intent.TrainingPhrase(parts=[part])
training_phrases.append(training_phrase)
intent.training_phrases.extend(training_phrases)
intent.messages[0].text.text.append(message_texts)
update_mask = field_mask_pb2.FieldMask(paths=["display_name","training_phrases","messages"])
response = client.update_intent(intent=intent, update_mask=update_mask, language_code=settings.language_code)
return response
except Exception as e:
print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def delete_intent_logic(intent_id):
try:
#finding Intent Id
intent_id = get_intent_id(intent_id)
if intent_id != False:
deleteIntent = delete_intent_function(intent_id)
if deleteIntent != False:
return True
else:
return False
else:
return False
except Exception as e:
#print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def delete_intent_function(intent_id):
try:
intents_client = dialogflow_v2.IntentsClient()
intent_path = intents_client.intent_path(settings.project_id, intent_id)
response = intents_client.delete_intent(request={"name": intent_path})
if response == '':
return False
else:
return True
except Exception as e:
#print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def detect_intent_texts(session_id, text, ai = False):
try:
if ai != False:
text = 'aiHello' #bypass
#ApiConnection
session_client = dialogflow_v2.SessionsClient()
session = session_client.session_path(settings.project_id, session_id)
text_input = dialogflow_v2.TextInput(text=text, language_code=settings.language_code)
query_input = dialogflow_v2.QueryInput(text=text_input)
response = session_client.detect_intent(session=session, query_input=query_input)
if response == '':
return False
else:
outcall_parameters = response.query_result.parameters
firstPerson = dff.parameterUpdate(settings.sessionid,outcall_parameters,text) #update Parameters
if firstPerson:
return response
else:
#print(0)
return detect_intent_texts(session_id, settings.defaultIntentFallback)
except Exception as e:
print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def flowup_input(session_id, inputName):
try:
#ApiConnection
session_client = dialogflow_v2.SessionsClient()
session = session_client.session_path(settings.project_id, session_id)
event_input = dialogflow_v2.EventInput(name=inputName, language_code=settings.language_code)
query_input = dialogflow_v2.QueryInput(event=event_input)
response = session_client.detect_intent(session=session, query_input=query_input)
#QueryInfo
outcall_query = response.query_result.query_text
outcall_parameters = response.query_result.parameters
#BotRepyInfo
incoming_reply_id = response.response_id
incoming_repy = response.query_result.fulfillment_text
incoming_replies = response.query_result.fulfillment_messages
#ContextInfo
incoming_contexts = []
for context in response.query_result.output_contexts:
incoming_contexts.append(context)
#IntentInfo
incoming_intent = response.query_result.intent.display_name
incoming_intent_link = response.query_result.intent.name
incoming_intent_id = incoming_intent_link.split('projects/'+settings.project_id+'/agent/intents/')
incoming_intent_id = incoming_intent_id[1]
incoming_intent_confidence = response.query_result.intent_detection_confidence
incoming_sentiment = response.query_result.sentiment_analysis_result
# Score between -1.0 (negative sentiment) and 1.0 (positive sentiment).
return response
except Exception as e:
print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
def train_agent():
try:
agents_client = dialogflow_v2.AgentsClient()
parent = dialogflow_v2.AgentsClient.common_project_path(settings.project_id)
response = agents_client.train_agent(request={"parent": parent})
return response.done()
except Exception as e:
#print('Error on line {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e)
return False
#text = 'what is your name?'
#detect_intent_texts('testbot-mldq', 'abc123', text, 'de')
#flowup_input('testbot-mldq', 'abc123', 'goygoy', 'de')
#getIntent('testbot-mldq','633debaa-6680-4108-b94b-499761d3300f')
#changeIntentName('testbot-mldq','633debaa-6680-4108-b94b-499761d3300f','ladung')
#update_intent('testbot-mldq','633debaa-6680-4108-b94b-499761d3300f', ['birinci cümle','ikinci cümle', 'üçüncü cümle'], 'yok bir sey!')
#train_agent('testbot-mldq')
#delete_intent_function('testbot-mldq','6a68b930-664e-4790-9c37-61ac2fc82e14')
#create_intent('testbot-mldq', 'deniz4@deniz', ['Başka bir cümle daha ekleyelim', 'İkinci ve farklı bir cümle daha ekleyelim'], ['yeni bir cevar versin mi?'])
#delete_intent_logic('testbot-mldq', 'deneme');
#train_agent('testbot-mldq') | 37.262443 | 158 | 0.769156 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,780 | 0.215836 |
4497c0bd8ba875e530e105c67eb98c37b9208182 | 2,151 | py | Python | tests/test_configManager.py | Helene/ibm-spectrum-scale-bridge-for-grafana | 9ec7953071d9fbe90332cb670a3f6c00ebf3291b | [
"Apache-2.0"
] | 28 | 2019-11-21T21:11:55.000Z | 2022-03-14T16:53:12.000Z | tests/test_configManager.py | Helene/ibm-spectrum-scale-bridge-for-grafana | 9ec7953071d9fbe90332cb670a3f6c00ebf3291b | [
"Apache-2.0"
] | 23 | 2019-12-17T08:44:09.000Z | 2022-01-07T11:02:01.000Z | tests/test_configManager.py | Helene/ibm-spectrum-scale-bridge-for-grafana | 9ec7953071d9fbe90332cb670a3f6c00ebf3291b | [
"Apache-2.0"
] | 7 | 2019-12-04T09:41:17.000Z | 2021-12-10T15:01:08.000Z | from source.confParser import ConfigManager
from source.__version__ import __version__ as version
def test_case01():
cm = ConfigManager()
result = cm.readConfigFile('config.ini')
assert isinstance(result, dict)
def test_case02():
cm = ConfigManager()
result = cm.readConfigFile('config.ini')
assert len(result.keys()) > 0
def test_case03():
cm = ConfigManager()
result = cm.readConfigFile('config.ini')
assert 'tls' in result.keys()
def test_case04():
cm = ConfigManager()
result = cm.readConfigFile('config.ini')
connection = result['connection']
assert len(connection) > 0
assert isinstance(connection, dict)
assert len(connection) > 0
assert 'port' in connection.keys()
def test_case05():
cm = ConfigManager()
result = cm.parse_defaults()
assert isinstance(result, dict)
def test_case06():
cm = ConfigManager()
result = cm.parse_defaults()
assert len(result.keys()) > 0
def test_case07():
cm = ConfigManager()
result = cm.parse_defaults()
result1 = cm.defaults
assert len(result) == len(result1)
def test_case08():
cm = ConfigManager()
result = cm.defaults
elements = list(result.keys())
mandatoryItems = ['port', 'serverPort']
assert all(item in elements for item in mandatoryItems)
def test_case09():
cm = ConfigManager()
result = cm.defaults
value = int(result['port'])
assert value == 4242
def test_case10():
cm = ConfigManager()
result = cm.defaults
if version < "7.0":
assert int(result['port']) == 4242 and int(result['serverPort']) == 9084
else:
assert int(result['port']) == 4242 and int(result['serverPort']) == 9980
def test_case11():
cm = ConfigManager()
result = cm.defaults
assert 'includeDiskData' in result.keys()
assert result['includeDiskData'] == 'no'
def test_case12():
cm = ConfigManager()
result = cm.defaults
assert 'apiKeyValue' not in result.keys()
def test_case13():
cm = ConfigManager()
result = cm.defaults
assert 'protocol' in result.keys()
assert result['protocol'] == 'http'
| 22.882979 | 80 | 0.656439 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 213 | 0.099024 |
4498797169e379e4f71ed514fe503e98825e7c72 | 4,897 | py | Python | languageModel.py | Zgjszjggjt/DeepLearning | 51889e57f8c7a0a861c356a0e3454c3c3625b0a7 | [
"MIT"
] | null | null | null | languageModel.py | Zgjszjggjt/DeepLearning | 51889e57f8c7a0a861c356a0e3454c3c3625b0a7 | [
"MIT"
] | null | null | null | languageModel.py | Zgjszjggjt/DeepLearning | 51889e57f8c7a0a861c356a0e3454c3c3625b0a7 | [
"MIT"
] | null | null | null | #!/usr/bin/evn python
#-*- coding: utf-8 -*-
# ===================================
# Filename : languageModel.py
# Author : GT
# Create date : 17-09-20 18:33:43
# Description:
# ===================================
# Script starts from here
# this is for chinese characters
# import sys
# reload(sys)
# sys.setdefaultencoding('utf-8')
import numpy as np
import nltk
import csv
import itertools
def softmax(x):
xt = np.exp(x - np.max(x))
return xt / np.sum(xt)
class getData(object):
def __init__(self):
self.vocabulary_size = 8000
self.unknown_token = 'UNKNOWN_TOKEN'
self.sentence_start_token = 'SENTENCE_START'
self.sentence_end_token = "SENTENCE_END"
def encode(self, path):
print 'Reading csv file %s' % path
with open(path, 'rb') as f:
reader = csv.reader(f, skipinitialspace = True)
reader.next()
self.sentences = itertools.chain(*[nltk.sent_tokenize(x[0].decode('utf-8').lower()) for x in reader])
self.sentences = ['%s %s %s' % (self.sentence_start_token, x, self.sentence_end_token) for x in self.sentences]
print 'Parsed %d sentences' % len(self.sentences)
self.tokenize_sentences = [nltk.word_tokenize(x) for x in self.sentences]
self.word_freq = nltk.FreqDist(itertools.chain(*self.tokenize_sentences))
print 'Found %d unique words tokens.' % len(self.word_freq)
vocab = self.word_freq.most_common(self.vocabulary_size - 1)
self.index_to_word = [x[0] for x in vocab]
self.index_to_word.append(self.unknown_token)
self.word_to_index = dict([(w, i) for i, w in enumerate(self.index_to_word)])
print 'Using vocabulary size %d .' % self.vocabulary_size
print 'The least frequent word in our vocabulary is %s and appear %d times.' % (vocab[-1][0], vocab[-1][1])
for i, sent in enumerate(self.tokenize_sentences):
self.tokenize_sentences[i] = [w if w in self.word_to_index else self.unknown_token for w in sent]
print '\nExample sentence: %s' % self.sentences[0]
print '\nExample sentence after encoding: %s' % self.tokenize_sentences[0]
self.x_train = np.asarray([[self.word_to_index[w] for w in sent[:-1]] for sent in self.tokenize_sentences])
self.y_train = np.asarray([[self.word_to_index[w] for w in sent[1:]] for sent in self.tokenize_sentences])
return self.x_train, self.y_train
class RNN(object):
def __init__(self, vocabulary_size, hidden_size, bptt_turns):
self.vocabulary_size = vocabulary_size
self.hidden_size = hidden_size
self.bptt_turns = bptt_turns
self.U = np.random.uniform(-np.sqrt(1. / vocabulary_size), np.sqrt(1. / vocabulary_size), (hidden_size, vocabulary_size))
self.V = np.random.uniform(-np.sqrt(1. / hidden_size), np.sqrt(1. / hidden_size), (vocabulary_size, hidden_size))
self.W = np.random.uniform(-np.sqrt(1. / hidden_size), np.sqrt(1. / hidden_size), (hidden_size, hidden_size))
def forward(self, x):
C_size = len(x)
s = np.zeros((C_size + 1, self.hidden_size))
s[-1] = np.zeros(self.hidden_size)
o = np.zeros((C_size, self.vocabulary_size))
for i in np.arange(C_size):
s[i] = np.tanh(self.U[:, x[i]] + self.W.dot(s[i - 1]))
o[i] = softmax(self.V.dot(s[i]))
return s, o
def predict(self):
maxIndex = np.argmax(self.o, axis = 1)
print self.maxIndex
def get_s(self):
return self.s
def get_o(self):
return self.o
def caculate_total_loss(self, x, y):
loss = 0.
for i in range(len(y)):
s, o = self.forward(x[i])
correct_word_predictions = o[np.arange(len(y[i])), y[i]]
print correct_word_predictions.shape
print correct_word_predictions
loss += -1 * np.sum(np.log(correct_word_predictions))
return loss
def caculate_loss(self, x, y):
N = np.sum((len(y_i) for y_i in y))
print self.caculate_total_loss(x, y)/N
def bptt(self, x, y):
length = len(y);
s, o = self.forward(x)
dLdU = np.zeros_like(self.U)
dLdV = np.zeros_like(self.V)
dLdW = np.zeros_like(self.W)
delta_o = o
delta_o[np.arange(len(y)), y] -= 1.
for t in np.arange(length)[::-1]:
dLdV += np.outer(delta_o[t]. s[t].T)
delta_t = self.V.T.dot(delta_o[t]) * (1 - (s[t] ** 2))
if __name__ == '__main__':
getData = getData()
x, y = getData.encode('./DateSet/reddit-comments-2015-08.csv')
# print x.shape
# print y.shape
# print x[0]
# print y[0]
rnn = RNN(8000, 100, 4)
# rnn.forward(x[:1000])
# rnn,predict()
# print rnn.get_o().shape
# print rnn.get_o()
rnn.caculate_loss(x[:1000], y[:1000])
| 37.381679 | 129 | 0.604656 | 4,048 | 0.826629 | 0 | 0 | 0 | 0 | 0 | 0 | 806 | 0.164591 |
44994ba8d3a7a8b4c205743af6ca6a46fdb3191a | 5,660 | py | Python | wordle.py | andytholmes/wordle_solver | 44062a60b45eea17ad5eda3a9cc37540b3c18866 | [
"MIT"
] | null | null | null | wordle.py | andytholmes/wordle_solver | 44062a60b45eea17ad5eda3a9cc37540b3c18866 | [
"MIT"
] | null | null | null | wordle.py | andytholmes/wordle_solver | 44062a60b45eea17ad5eda3a9cc37540b3c18866 | [
"MIT"
] | null | null | null | from typing import List
import numpy as np
import wordfreq
class Word:
def __init__(self):
self.word = ''
self.frequency = 0
self.distinct_vowels = []
self.distinct_letters = []
def __repr__(self):
return f"Word({self.word},{self.get_score()})"
def setproperties(self, word: str, frequency: float, distinct_vowels: int,
distinct_letters: int):
self.word = word
self.frequency = frequency
self.distinct_vowels = distinct_vowels
self.distinct_letters = distinct_letters
def get_score(self):
score = 0
if self.frequency != 0:
score = self.frequency + \
self.distinct_vowels + \
self.distinct_letters
return score
class Guess:
def __init__(self):
self.letters_in_word = []
self.final_word = "_____"
self.excluded_letters_ordinal = [[], [], [], [], []]
self.excluded_letters = []
self.used_words = []
self.wordles = []
def __repr__(self):
return f"Guess({self.final_word})"
def is_word_good_guess(self, word: str):
for w in self.used_words:
if w == word:
return False
for i, letter in enumerate(word):
if self.final_word[i] == word[i]:
continue
if i < len(self.excluded_letters_ordinal) and \
letter in self.excluded_letters_ordinal[i]:
return False
if letter.upper() == letter:
return False
if letter in self.excluded_letters:
return False
for letter in self.letters_in_word:
if letter not in word:
return False
for i, letter in enumerate(self.final_word):
if letter == '_':
continue
if letter != word[i]:
return False
return True
def add_wordle(self, wordle: str, word: str):
for i, letter in enumerate(wordle):
if letter == 'G':
self.final_word = self.final_word[:i] + \
word[i] + \
self.final_word[i+1:]
if letter == 'Y':
self.excluded_letters_ordinal[i].append(word[i])
self.letters_in_word.append(word[i])
if letter == '_':
self.excluded_letters.append(word[i])
self.used_words.append(word)
self.wordles.append(wordle)
def get_wordle_from_secret(guess, secret):
if len(secret) != 5:
raise Exception('Secret word needs to be 5 letters')
if len(guess) != 5:
raise Exception('guess word needs to be 5 letters')
wordle = ''
g_check = ''
s_check = ''
for i, letter in enumerate(guess):
if letter == secret[i]:
wordle = wordle + 'G'
g_check = g_check + 'X'
s_check = s_check + 'X'
else:
wordle = wordle + '_'
g_check = g_check + '_'
s_check = s_check + '_'
for g, g_letter in enumerate(guess):
if g_check[g] != '_':
continue
for s, s_letter in enumerate(secret):
if s_check[s] != '_':
continue
if s_letter == g_letter:
wordle = wordle[:g] + 'Y' + wordle[g+1:]
g_check = g_check[:g] + 'X' + g_check[g+1:]
s_check = s_check[:s] + 'X' + s_check[s+1:]
continue
return wordle
def is_guess_a_valid_word(guess):
word_list = ['valid', 'tough']
ret_val = False
if guess in word_list:
ret_val = True
return ret_val
def get_list_of_words(filepath):
text_file = open(filepath, "r")
file_str = text_file.read().strip()
words = file_str.split('\n')
text_file.close()
return words
def save_list_of_words(words, filepath):
a_file = open(filepath, "w")
np.savetxt(a_file, words, fmt='%s')
a_file.close()
return
# prepare_word_file('/usr/share/dict/words','words.txt')
def prepare_word_file(from_filepath, to_filepath):
word_array = get_list_of_words(from_filepath)
good_words = []
for word in word_array:
if len(word) == 5:
good_words.append(word)
save_list_of_words(good_words, to_filepath)
def count_vowels_in_word(word: str) -> int:
letters = set(word)
cnt = 0
for letter in letters:
if letter in ('a', 'e', 'i', 'o', 'u'):
cnt = cnt + 1
return cnt
def get_word_attributes(words: List[str]) -> List[Word]:
n = []
for word in words:
freq = wordfreq.word_frequency(word, 'en', wordlist='small')
distinct_vowels = count_vowels_in_word(word)
distinct_letters = len(set(word))
w = Word()
w.setproperties(word, freq, distinct_vowels, distinct_letters)
n.append(w)
n.sort(key=lambda x: x.get_score(), reverse=True)
return n
def make_a_guess(word_array: Word, guess: Guess):
for word in word_array:
if guess.is_word_good_guess(word.word) is True:
return word.word
return ''
def guess_from_secret(secret: str, guess: Guess):
word_string_array = get_list_of_words("words.txt")
word_array = get_word_attributes(word_string_array)
i = 0
while '_' in guess.final_word and i < 10:
word = make_a_guess(word_array, guess)
wordle = get_wordle_from_secret(word.word, secret)
parse_wordle(guess, wordle, word.word)
i = i+1
if '_' in guess.final_word:
return False
return True
| 28.159204 | 78 | 0.563781 | 2,493 | 0.440459 | 0 | 0 | 0 | 0 | 0 | 0 | 324 | 0.057244 |
92209edfa3b4b39c5cb3b3ecc44a58d42206bc22 | 3,264 | py | Python | webapp/app/routes.py | vladan-stojnic/NI4OS-RSSC | 02542fc6ed59acd52cd06a3c1ab191a7da2713d8 | [
"MIT"
] | null | null | null | webapp/app/routes.py | vladan-stojnic/NI4OS-RSSC | 02542fc6ed59acd52cd06a3c1ab191a7da2713d8 | [
"MIT"
] | null | null | null | webapp/app/routes.py | vladan-stojnic/NI4OS-RSSC | 02542fc6ed59acd52cd06a3c1ab191a7da2713d8 | [
"MIT"
] | null | null | null | from flask import render_template, redirect, url_for, escape, request
from app import app
from app.forms import URLForm, FilesForm
from app.utils import perform_url_request, perform_upload_request
import requests
import base64
import json
@app.route('/url-api', methods=['POST'])
def url_api():
urls = request.form['urls'].split(',')
headers = {'content-type': 'application/json'}
req = {'signature_name': 'serving_default',
'instances': []}
for url in urls:
image_bytes = base64.b64encode(requests.get(url).content).decode('utf-8')
req['instances'].append({'b64': image_bytes})
json_response = requests.post('http://rssc:8501/v1/models/rssc/versions/1:predict',
headers=headers,
data=json.dumps(req))
return json_response.json()
@app.route('/multilabel-url-api', methods=['POST'])
def multilabel_url_api():
urls = request.form['urls'].split(',')
headers = {'content-type': 'application/json'}
req = {'signature_name': 'serving_default',
'instances': []}
for url in urls:
image_bytes = base64.b64encode(requests.get(url).content).decode('utf-8')
req['instances'].append({'b64': image_bytes})
json_response = requests.post('http://rssc:8501/v1/models/multilabel-rssc/versions/1:predict',
headers=headers,
data=json.dumps(req))
return json_response.json()
@app.route('/upload-api', methods=['POST'])
def classify_images():
json_response = requests.post('http://rssc:8501/v1/models/rssc/versions/1:predict',
headers=request.headers,
data=request.data)
return json_response.json()
@app.route('/multilabel-upload-api', methods=['POST'])
def classify_images_multilabel():
json_response = requests.post('http://rssc:8501/v1/models/multilabel-rssc/versions/1:predict',
headers=request.headers,
data=request.data)
return json_response.json()
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
def index():
return redirect(url_for('url'))
@app.route('/url', methods=['GET', 'POST'])
def url():
form = URLForm()
if form.validate_on_submit():
result = perform_url_request(form.url.data, form.task.data)
return render_template('result.html', title='Results', res=result, task=form.task.data.lower())
return render_template('url.html', title='URL', form=form)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
form = FilesForm()
if form.validate_on_submit():
result = perform_upload_request(form.files.data, form.task.data)
#print(form.files.data.mimetype)
return render_template('result.html', title='Results',
res=result, task=form.task.data.lower())
#print(form.files.data.read())
return render_template('files.html', title='Upload', form=form)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', title='404 - Not Found'), 404
@app.errorhandler(500)
def internal_error(e):
return render_template('500.html', title='500 - Server Error'), 500
| 34 | 103 | 0.6394 | 0 | 0 | 0 | 0 | 3,002 | 0.91973 | 0 | 0 | 811 | 0.248468 |
9223388ce50bc2c66a5df942c1f011a31ba0d6df | 3,145 | py | Python | globus_automate_client/graphviz_rendering.py | globus/globus-automate-client | ec5f21a0b1de1c2d90c3e4b4a117177e52ff74e7 | [
"Apache-2.0"
] | 2 | 2020-08-26T19:43:39.000Z | 2020-09-03T20:50:07.000Z | globus_automate_client/graphviz_rendering.py | globus/globus-automate-client | ec5f21a0b1de1c2d90c3e4b4a117177e52ff74e7 | [
"Apache-2.0"
] | 49 | 2020-08-24T15:15:05.000Z | 2022-03-07T18:39:44.000Z | globus_automate_client/graphviz_rendering.py | globus/globus-automate-client | ec5f21a0b1de1c2d90c3e4b4a117177e52ff74e7 | [
"Apache-2.0"
] | 3 | 2020-08-23T21:53:00.000Z | 2021-09-30T23:34:01.000Z | import json
from typing import Any, Dict, List, Mapping, Optional
from graphviz import Digraph
_SHAPE_TYPES = {
"Choice": {"shape": "diamond"},
"Action": {"shape": "box"},
"Succeed": {"shape": "box", "style": "rounded"},
}
_COLOR_PRECEDENCE = ["", "yellow", "orange", "green", "red"]
def json_to_label_text(json_dict: Mapping[str, Any]) -> str:
label_text = json.dumps(json_dict, indent=1)
label_text = label_text.replace("\n", '<br ALIGN="LEFT"/>')
return label_text
def state_colors_for_log(flow_action_log_entries: List[Mapping]) -> Dict[str, str]:
color_dict: Dict[str, str] = {}
for log_entry in flow_action_log_entries:
state_name = log_entry.get("details", {}).get("state_name")
if state_name is not None:
code = log_entry.get("code", "")
cur_state_color_precedence = _COLOR_PRECEDENCE.index(
color_dict.get(state_name, "")
)
color = ""
if code.endswith("Completed"):
color = "green"
elif code.endswith("Started"):
color = "yellow"
elif code == "ActionPolled":
color = "orange"
if _COLOR_PRECEDENCE.index(color) > cur_state_color_precedence:
color_dict[state_name] = color
return color_dict
def graphviz_format(
flow: Dict[str, Any], state_colors: Optional[Dict[str, str]] = None
) -> Digraph:
states = flow.get("States")
graph = Digraph()
if state_colors is None:
state_colors = {}
if isinstance(states, dict):
for state_name, state_def in states.items():
state_type = state_def.get("Type")
# At least on Choice, Default also exists as a next state name
next_state = state_def.get("Next", state_def.get("Default"))
node_params = _SHAPE_TYPES.get(state_type, {"shape": "ellipse"})
node_params["label"] = state_name
parameters = state_def.get("Parameters")
if parameters:
parameter_text = json_to_label_text(parameters)
node_params["label"] = node_params["label"] + "<br/>" + parameter_text
else:
input_path = state_def.get("InputPath")
if input_path:
node_params["label"] = (
node_params["label"] + "<br/>" + f"InputPath: {input_path}"
)
if state_name in state_colors:
node_params["fillcolor"] = state_colors[state_name]
node_params["style"] = "filled"
node_params["label"] = "<" + node_params["label"] + '<br ALIGN="LEFT"/>>'
graph.node(state_name, **node_params)
if next_state:
graph.edge(state_name, next_state)
choices = state_def.get("Choices", [])
for choice in choices:
choice_next = choice.pop("Next")
choice_text = "<" + json_to_label_text(choice) + '<br ALIGN="LEFT"/>>'
graph.edge(state_name, choice_next, label=choice_text, style="dotted")
return graph
| 37.891566 | 86 | 0.573927 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 541 | 0.172019 |
922347cbcb84554379a27400c108d56de27b542e | 9,731 | py | Python | ahio/drivers/generic_tcp_io.py | acristoffers/Loki | 9d209017593fabe4bc6b39b4156101bd32a824cb | [
"MIT"
] | 1 | 2018-05-06T18:43:40.000Z | 2018-05-06T18:43:40.000Z | ahio/drivers/generic_tcp_io.py | acristoffers/Loki | 9d209017593fabe4bc6b39b4156101bd32a824cb | [
"MIT"
] | null | null | null | ahio/drivers/generic_tcp_io.py | acristoffers/Loki | 9d209017593fabe4bc6b39b4156101bd32a824cb | [
"MIT"
] | null | null | null | # -*- coding: utf-8; -*-
#
# Copyright (c) 2016 Álan Crístoffer
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import ahio.abstract_driver
import json
import socket
from enum import Enum
class ahioDriverInfo(ahio.abstract_driver.AbstractahioDriverInfo):
NAME = 'GenericTCPIO'
AVAILABLE = True
class Driver(ahio.abstract_driver.AbstractDriver):
_socket = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if self._socket:
self._socket.send(b'QUIT\n')
self._socket.close()
def setup(self, address, port):
"""Connects to server at `address`:`port`.
Connects to a TCP server listening at `address`:`port` that implements
the protocol described in the file "Generic TCP I:O Protocol.md"
@arg address IP or address to connect to.
@arg port port to connect to.
@throw RuntimeError if connection was successiful but protocol isn't
supported.
@throw any exception thrown by `socket.socket`'s methods.
"""
address = str(address)
port = int(port)
self._socket = socket.socket()
self._socket.connect((address, port))
self._socket.send(b'HELLO 1.0\n')
with self._socket.makefile() as f:
if f.readline().strip() != 'OK':
raise RuntimeError('Protocol not supported')
def __clamp(self, value, min, max):
return sorted((min, value, max))[1]
def available_pins(self):
self._socket.send(b'LISTPORTS\n')
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
return json.loads(answer[3:])
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _set_pin_direction(self, pin, direction):
direction = 'INPUT' if direction == ahio.Direction.Input else 'OUTPUT'
command = ('SETDIRECTION %s %s\n' % (pin, direction)).encode('utf8')
self._socket.send(command)
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
return None
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _pin_direction(self, pin):
command = ('DIRECTION %s\n' % pin).encode('utf8')
self._socket.send(command)
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
direction = answer[3:].strip()
d = ahio.Direction
return d.Input if direction == 'INPUT' else d.Output
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _set_pin_type(self, pin, ptype):
ptype = 'DIGITAL' if ptype == ahio.PortType.Digital else 'ANALOG'
command = ('SETTYPE %s %s\n' % (pin, ptype)).encode('utf8')
self._socket.send(command)
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
return None
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _pin_type(self, pin):
command = ('TYPE %s\n' % pin).encode('utf8')
self._socket.send(command)
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
ptype = answer[3:].strip()
pt = ahio.PortType
return pt.Digital if ptype == 'DIGITAL' else pt.Analog
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _find_port_info(self, pin):
ps = [p for p in self.available_pins() if p['id'] == pin]
if ps:
return ps[0]
else:
return None
def _write(self, pin, value, pwm):
if self._pin_direction(pin) == ahio.Direction.Input:
return None
pin_info = self._find_port_info(pin)
if self._pin_type(pin) == ahio.PortType.Digital:
if not pin_info['digital']['output']:
raise RuntimeError('Pin does not support digital output')
if pwm:
if not pin_info['digital']['pwm']:
raise RuntimeError('Pin does not support PWM')
value = self.__clamp(value, 0, 1)
command = ('WRITEPWM %s %s\n' % (pin, value)).encode('utf8')
else:
value = 'HIGH' if value == ahio.LogicValue.High else 'LOW'
command = ('WRITEDIGITAL %s %s\n' %
(pin, value)).encode('utf8')
else:
if not pin_info['analog']['output']:
raise RuntimeError('Pin does not support analog output')
l = pin_info['analog']['write_range']
value = self.__clamp(value, l[0], l[1])
command = ('WRITEANALOG %s %s\n' % (pin, value)).encode('utf8')
self._socket.send(command)
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
return None
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _read(self, pin):
pin_info = self._find_port_info(pin)
pin_type = self._pin_type(pin)
if pin_info['digital']['input'] and pin_type == ahio.PortType.Digital:
da = ahio.PortType.Digital
command = ('READDIGITAL %s\n' % pin).encode('utf8')
elif pin_info['analog']['input'] and pin_type == ahio.PortType.Analog:
da = ahio.PortType.Analog
command = ('READANALOG %s\n' % pin).encode('utf8')
else:
raise RuntimeError('Pin does not support input or is not set up')
self._socket.send(command)
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
value = answer[3:].strip()
if da == ahio.PortType.Digital:
lv = ahio.LogicValue
return lv.High if value == 'HIGH' else lv.Low
else:
return int(value)
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def analog_references(self):
self._socket.send(b'ANALOGREFERENCES\n')
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
__, *opts = answer.strip().split(' ')
return opts
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _set_analog_reference(self, reference, pin):
if pin:
command = ('SETANALOGREFERENCE %s %s\n' % (reference, pin))
else:
command = ('SETANALOGREFERENCE %s\n' % reference)
self._socket.send(command.encode('utf8'))
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
return
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _analog_reference(self, pin):
if pin:
command = 'ANALOGREFERENCE %s\n' % pin
else:
command = 'ANALOGREFERENCE\n'
self._socket.send(command.encode('utf8'))
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
return answer.strip().split(' ')[1]
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
def _set_pwm_frequency(self, frequency, pin):
if pin:
command = 'SETPWMFREQUENCY %s %s\n' % (frequency, pin)
else:
command = 'SETPWMFREQUENCY %s\n' % frequency
self._socket.send(command.encode('utf8'))
with self._socket.makefile() as f:
answer = f.readline()
if answer.startswith('OK'):
return
elif answer.startswith('ERROR'):
raise RuntimeError(answer[6:])
else:
raise RuntimeError('Unknown response')
| 37.863813 | 79 | 0.589867 | 8,526 | 0.875989 | 0 | 0 | 0 | 0 | 0 | 0 | 2,680 | 0.275352 |
92254e7c44b4d02b62556b08a59fb86995acbb5b | 2,463 | py | Python | openstack/tests/unit/network/v2/test_firewall_v1_rule.py | morganseznec/openstacksdk | 7b245c16556a04497ce701d959a889eca6f26a83 | [
"Apache-2.0"
] | null | null | null | openstack/tests/unit/network/v2/test_firewall_v1_rule.py | morganseznec/openstacksdk | 7b245c16556a04497ce701d959a889eca6f26a83 | [
"Apache-2.0"
] | null | null | null | openstack/tests/unit/network/v2/test_firewall_v1_rule.py | morganseznec/openstacksdk | 7b245c16556a04497ce701d959a889eca6f26a83 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2019 Morgan Seznec <morgan.s134@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from openstack.network.v2 import firewall_v1_rule
EXAMPLE = {
'action': 'allow',
'description': '1',
'destination_ip_address': '10.0.0.2/24',
'destination_port': '2',
'name': '3',
'enabled': True,
'ip_version': 4,
'protocol': 'tcp',
'shared': True,
'source_ip_address': '10.0.1.2/24',
'source_port': '5',
'project_id': '6',
}
class TestFirewallV1Rule(testtools.TestCase):
def test_basic(self):
sot = firewall_v1_rule.FirewallV1Rule()
self.assertEqual('firewall_v1_rule', sot.resource_key)
self.assertEqual('firewall_v1_rules', sot.resources_key)
self.assertEqual('/fw/firewall_rules', sot.base_path)
self.assertTrue(sot.allow_create)
self.assertTrue(sot.allow_fetch)
self.assertTrue(sot.allow_commit)
self.assertTrue(sot.allow_delete)
self.assertTrue(sot.allow_list)
def test_make_it(self):
sot = firewall_v1_rule.FirewallV1Rule(**EXAMPLE)
self.assertEqual(EXAMPLE['action'], sot.action)
self.assertEqual(EXAMPLE['description'], sot.description)
self.assertEqual(EXAMPLE['destination_ip_address'],
sot.destination_ip_address)
self.assertEqual(EXAMPLE['destination_port'], sot.destination_port)
self.assertEqual(EXAMPLE['name'], sot.name)
self.assertEqual(EXAMPLE['enabled'], sot.enabled)
self.assertEqual(EXAMPLE['ip_version'], sot.ip_version)
self.assertEqual(EXAMPLE['protocol'], sot.protocol)
self.assertEqual(EXAMPLE['shared'], sot.shared)
self.assertEqual(EXAMPLE['source_ip_address'],
sot.source_ip_address)
self.assertEqual(EXAMPLE['source_port'], sot.source_port)
self.assertEqual(EXAMPLE['project_id'], sot.project_id)
| 37.892308 | 75 | 0.686561 | 1,437 | 0.583435 | 0 | 0 | 0 | 0 | 0 | 0 | 1,029 | 0.417783 |
9225b3c1ee55f4d6994467863ce60bcc9130be6c | 54,574 | py | Python | src/genie/libs/parser/junos/tests/ShowChassisEnvironment/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/junos/tests/ShowChassisEnvironment/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/junos/tests/ShowChassisEnvironment/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
'environment-information': {
'environment-item': [{
'class': 'Temp',
'name': 'PSM 0',
'status': 'OK',
'temperature': {
'#text': '25 '
'degrees '
'C '
'/ '
'77 '
'degrees '
'F',
'@junos:celsius': '25'
}
}, {
'class': 'Temp',
'name': 'PSM 1',
'status': 'OK',
'temperature': {
'#text': '24 '
'degrees '
'C '
'/ '
'75 '
'degrees '
'F',
'@junos:celsius': '24'
}
}, {
'class': 'Temp',
'name': 'PSM 2',
'status': 'OK',
'temperature': {
'#text': '24 '
'degrees '
'C '
'/ '
'75 '
'degrees '
'F',
'@junos:celsius': '24'
}
}, {
'class': 'Temp',
'name': 'PSM 3',
'status': 'OK',
'temperature': {
'#text': '23 '
'degrees '
'C '
'/ '
'73 '
'degrees '
'F',
'@junos:celsius': '23'
}
}, {
'class': 'Temp',
'name': 'PSM 4',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 5',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 6',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 7',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 8',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 9',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'PSM 10',
'status': 'OK',
'temperature': {
'#text': '30 '
'degrees '
'C '
'/ '
'86 '
'degrees '
'F',
'@junos:celsius': '30'
}
}, {
'class': 'Temp',
'name': 'PSM 11',
'status': 'OK',
'temperature': {
'#text': '30 '
'degrees '
'C '
'/ '
'86 '
'degrees '
'F',
'@junos:celsius': '30'
}
}, {
'class': 'Temp',
'name': 'PSM 12',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 13',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 14',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 15',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 16',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PSM 17',
'status': 'Check'
}, {
'class': 'Temp',
'name': 'PDM 0',
'status': 'OK'
}, {
'class': 'Temp',
'name': 'PDM 1',
'status': 'OK'
}, {
'class': 'Temp',
'name': 'PDM 2',
'status': 'OK'
}, {
'class': 'Temp',
'name': 'PDM 3',
'status': 'OK'
}, {
'class': 'Temp',
'name': 'CB 0 IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '39 '
'degrees '
'C '
'/ '
'102 '
'degrees '
'F',
'@junos:celsius': '39'
}
}, {
'class': 'Temp',
'name': 'CB 0 IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '36 '
'degrees '
'C '
'/ '
'96 '
'degrees '
'F',
'@junos:celsius': '36'
}
}, {
'class': 'Temp',
'name': 'CB 0 IntakeC-Zone0',
'status': 'OK',
'temperature': {
'#text': '51 '
'degrees '
'C '
'/ '
'123 '
'degrees '
'F',
'@junos:celsius': '51'
}
}, {
'class': 'Temp',
'name': 'CB 0 '
'ExhaustA-Zone0',
'status': 'OK',
'temperature': {
'#text': '40 '
'degrees '
'C '
'/ '
'104 '
'degrees '
'F',
'@junos:celsius': '40'
}
}, {
'class': 'Temp',
'name': 'CB 0 '
'ExhaustB-Zone1',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'CB 0 TCBC-Zone0',
'status': 'OK',
'temperature': {
'#text': '45 '
'degrees '
'C '
'/ '
'113 '
'degrees '
'F',
'@junos:celsius': '45'
}
}, {
'class': 'Temp',
'name': 'CB 1 IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'CB 1 IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'CB 1 IntakeC-Zone0',
'status': 'OK',
'temperature': {
'#text': '33 '
'degrees '
'C '
'/ '
'91 '
'degrees '
'F',
'@junos:celsius': '33'
}
}, {
'class': 'Temp',
'name': 'CB 1 '
'ExhaustA-Zone0',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'CB 1 '
'ExhaustB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'CB 1 TCBC-Zone0',
'status': 'OK',
'temperature': {
'#text': '39 '
'degrees '
'C '
'/ '
'102 '
'degrees '
'F',
'@junos:celsius': '39'
}
}, {
'class': 'Temp',
'name': 'SPMB 0 Intake',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'SPMB 1 Intake',
'status': 'OK',
'temperature': {
'#text': '33 '
'degrees '
'C '
'/ '
'91 '
'degrees '
'F',
'@junos:celsius': '33'
}
}, {
'class': 'Temp',
'name': 'Routing Engine 0',
'status': 'OK',
'temperature': {
'#text': '43 '
'degrees '
'C '
'/ '
'109 '
'degrees '
'F',
'@junos:celsius': '43'
}
}, {
'class': 'Temp',
'name': 'Routing Engine 0 '
'CPU',
'status': 'OK',
'temperature': {
'#text': '39 '
'degrees '
'C '
'/ '
'102 '
'degrees '
'F',
'@junos:celsius': '39'
}
}, {
'class': 'Temp',
'name': 'Routing Engine 1',
'status': 'OK',
'temperature': {
'#text': '40 '
'degrees '
'C '
'/ '
'104 '
'degrees '
'F',
'@junos:celsius': '40'
}
}, {
'class': 'Temp',
'name': 'Routing Engine 1 '
'CPU',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'SFB 0 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '37 '
'degrees '
'C '
'/ '
'98 '
'degrees '
'F',
'@junos:celsius': '37'
}
}, {
'class': 'Temp',
'name': 'SFB 0 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '45 '
'degrees '
'C '
'/ '
'113 '
'degrees '
'F',
'@junos:celsius': '45'
}
}, {
'class': 'Temp',
'name': 'SFB 0 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'SFB 0 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 0 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '36 '
'degrees '
'C '
'/ '
'96 '
'degrees '
'F',
'@junos:celsius': '36'
}
}, {
'class': 'Temp',
'name': 'SFB 0 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '63 '
'degrees '
'C '
'/ '
'145 '
'degrees '
'F',
'@junos:celsius': '63'
}
}, {
'class': 'Temp',
'name': 'SFB 0 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '55 '
'degrees '
'C '
'/ '
'131 '
'degrees '
'F',
'@junos:celsius': '55'
}
}, {
'class': 'Temp',
'name': 'SFB 0 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '52 '
'degrees '
'C '
'/ '
'125 '
'degrees '
'F',
'@junos:celsius': '52'
}
}, {
'class': 'Temp',
'name': 'SFB 1 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'SFB 1 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '42 '
'degrees '
'C '
'/ '
'107 '
'degrees '
'F',
'@junos:celsius': '42'
}
}, {
'class': 'Temp',
'name': 'SFB 1 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'SFB 1 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'SFB 1 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 1 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '63 '
'degrees '
'C '
'/ '
'145 '
'degrees '
'F',
'@junos:celsius': '63'
}
}, {
'class': 'Temp',
'name': 'SFB 1 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '53 '
'degrees '
'C '
'/ '
'127 '
'degrees '
'F',
'@junos:celsius': '53'
}
}, {
'class': 'Temp',
'name': 'SFB 1 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '50 '
'degrees '
'C '
'/ '
'122 '
'degrees '
'F',
'@junos:celsius': '50'
}
}, {
'class': 'Temp',
'name': 'SFB 2 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'SFB 2 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '42 '
'degrees '
'C '
'/ '
'107 '
'degrees '
'F',
'@junos:celsius': '42'
}
}, {
'class': 'Temp',
'name': 'SFB 2 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '30 '
'degrees '
'C '
'/ '
'86 '
'degrees '
'F',
'@junos:celsius': '30'
}
}, {
'class': 'Temp',
'name': 'SFB 2 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'SFB 2 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 2 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '60 '
'degrees '
'C '
'/ '
'140 '
'degrees '
'F',
'@junos:celsius': '60'
}
}, {
'class': 'Temp',
'name': 'SFB 2 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '53 '
'degrees '
'C '
'/ '
'127 '
'degrees '
'F',
'@junos:celsius': '53'
}
}, {
'class': 'Temp',
'name': 'SFB 2 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '56 '
'degrees '
'C '
'/ '
'132 '
'degrees '
'F',
'@junos:celsius': '56'
}
}, {
'class': 'Temp',
'name': 'SFB 3 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'SFB 3 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '42 '
'degrees '
'C '
'/ '
'107 '
'degrees '
'F',
'@junos:celsius': '42'
}
}, {
'class': 'Temp',
'name': 'SFB 3 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'SFB 3 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'SFB 3 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 3 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '61 '
'degrees '
'C '
'/ '
'141 '
'degrees '
'F',
'@junos:celsius': '61'
}
}, {
'class': 'Temp',
'name': 'SFB 3 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '53 '
'degrees '
'C '
'/ '
'127 '
'degrees '
'F',
'@junos:celsius': '53'
}
}, {
'class': 'Temp',
'name': 'SFB 3 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '50 '
'degrees '
'C '
'/ '
'122 '
'degrees '
'F',
'@junos:celsius': '50'
}
}, {
'class': 'Temp',
'name': 'SFB 4 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 4 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '42 '
'degrees '
'C '
'/ '
'107 '
'degrees '
'F',
'@junos:celsius': '42'
}
}, {
'class': 'Temp',
'name': 'SFB 4 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'SFB 4 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'SFB 4 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 4 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '64 '
'degrees '
'C '
'/ '
'147 '
'degrees '
'F',
'@junos:celsius': '64'
}
}, {
'class': 'Temp',
'name': 'SFB 4 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '53 '
'degrees '
'C '
'/ '
'127 '
'degrees '
'F',
'@junos:celsius': '53'
}
}, {
'class': 'Temp',
'name': 'SFB 4 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '50 '
'degrees '
'C '
'/ '
'122 '
'degrees '
'F',
'@junos:celsius': '50'
}
}, {
'class': 'Temp',
'name': 'SFB 5 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 5 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '41 '
'degrees '
'C '
'/ '
'105 '
'degrees '
'F',
'@junos:celsius': '41'
}
}, {
'class': 'Temp',
'name': 'SFB 5 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'SFB 5 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '31 '
'degrees '
'C '
'/ '
'87 '
'degrees '
'F',
'@junos:celsius': '31'
}
}, {
'class': 'Temp',
'name': 'SFB 5 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 5 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '63 '
'degrees '
'C '
'/ '
'145 '
'degrees '
'F',
'@junos:celsius': '63'
}
}, {
'class': 'Temp',
'name': 'SFB 5 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '53 '
'degrees '
'C '
'/ '
'127 '
'degrees '
'F',
'@junos:celsius': '53'
}
}, {
'class': 'Temp',
'name': 'SFB 5 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '50 '
'degrees '
'C '
'/ '
'122 '
'degrees '
'F',
'@junos:celsius': '50'
}
}, {
'class': 'Temp',
'name': 'SFB 6 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 6 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '42 '
'degrees '
'C '
'/ '
'107 '
'degrees '
'F',
'@junos:celsius': '42'
}
}, {
'class': 'Temp',
'name': 'SFB 6 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'SFB 6 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'SFB 6 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '34 '
'degrees '
'C '
'/ '
'93 '
'degrees '
'F',
'@junos:celsius': '34'
}
}, {
'class': 'Temp',
'name': 'SFB 6 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '62 '
'degrees '
'C '
'/ '
'143 '
'degrees '
'F',
'@junos:celsius': '62'
}
}, {
'class': 'Temp',
'name': 'SFB 6 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '53 '
'degrees '
'C '
'/ '
'127 '
'degrees '
'F',
'@junos:celsius': '53'
}
}, {
'class': 'Temp',
'name': 'SFB 6 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '49 '
'degrees '
'C '
'/ '
'120 '
'degrees '
'F',
'@junos:celsius': '49'
}
}, {
'class': 'Temp',
'name': 'SFB 7 Intake-Zone0',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'SFB 7 '
'Exhaust-Zone1',
'status': 'OK',
'temperature': {
'#text': '43 '
'degrees '
'C '
'/ '
'109 '
'degrees '
'F',
'@junos:celsius': '43'
}
}, {
'class': 'Temp',
'name': 'SFB 7 '
'IntakeA-Zone0',
'status': 'OK',
'temperature': {
'#text': '31 '
'degrees '
'C '
'/ '
'87 '
'degrees '
'F',
'@junos:celsius': '31'
}
}, {
'class': 'Temp',
'name': 'SFB 7 '
'IntakeB-Zone1',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'SFB 7 '
'Exhaust-Zone0',
'status': 'OK',
'temperature': {
'#text': '35 '
'degrees '
'C '
'/ '
'95 '
'degrees '
'F',
'@junos:celsius': '35'
}
}, {
'class': 'Temp',
'name': 'SFB 7 '
'SFB-XF2-Zone1',
'status': 'OK',
'temperature': {
'#text': '65 '
'degrees '
'C '
'/ '
'149 '
'degrees '
'F',
'@junos:celsius': '65'
}
}, {
'class': 'Temp',
'name': 'SFB 7 '
'SFB-XF1-Zone0',
'status': 'OK',
'temperature': {
'#text': '56 '
'degrees '
'C '
'/ '
'132 '
'degrees '
'F',
'@junos:celsius': '56'
}
}, {
'class': 'Temp',
'name': 'SFB 7 '
'SFB-XF0-Zone0',
'status': 'OK',
'temperature': {
'#text': '52 '
'degrees '
'C '
'/ '
'125 '
'degrees '
'F',
'@junos:celsius': '52'
}
}, {
'class': 'Temp',
'name': 'FPC 0 Intake',
'status': 'OK',
'temperature': {
'#text': '29 '
'degrees '
'C '
'/ '
'84 '
'degrees '
'F',
'@junos:celsius': '29'
}
}, {
'class': 'Temp',
'name': 'FPC 0 Exhaust A',
'status': 'OK',
'temperature': {
'#text': '53 '
'degrees '
'C '
'/ '
'127 '
'degrees '
'F',
'@junos:celsius': '53'
}
}, {
'class': 'Temp',
'name': 'FPC 0 Exhaust B',
'status': 'OK',
'temperature': {
'#text': '54 '
'degrees '
'C '
'/ '
'129 '
'degrees '
'F',
'@junos:celsius': '54'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 0 TSen',
'status': 'OK',
'temperature': {
'#text': '50 '
'degrees '
'C '
'/ '
'122 '
'degrees '
'F',
'@junos:celsius': '50'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 0 Chip',
'status': 'OK',
'temperature': {
'#text': '63 '
'degrees '
'C '
'/ '
'145 '
'degrees '
'F',
'@junos:celsius': '63'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 0 XR2 0 '
'TSen',
'status': 'OK',
'temperature': {
'#text': '50 '
'degrees '
'C '
'/ '
'122 '
'degrees '
'F',
'@junos:celsius': '50'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 0 XR2 0 '
'Chip',
'status': 'OK',
'temperature': {
'#text': '80 '
'degrees '
'C '
'/ '
'176 '
'degrees '
'F',
'@junos:celsius': '80'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 0 XR2 1 '
'TSen',
'status': 'OK',
'temperature': {
'#text': '50 '
'degrees '
'C '
'/ '
'122 '
'degrees '
'F',
'@junos:celsius': '50'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 0 XR2 1 '
'Chip',
'status': 'OK',
'temperature': {
'#text': '80 '
'degrees '
'C '
'/ '
'176 '
'degrees '
'F',
'@junos:celsius': '80'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 1 TSen',
'status': 'OK',
'temperature': {
'#text': '36 '
'degrees '
'C '
'/ '
'96 '
'degrees '
'F',
'@junos:celsius': '36'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 1 Chip',
'status': 'OK',
'temperature': {
'#text': '44 '
'degrees '
'C '
'/ '
'111 '
'degrees '
'F',
'@junos:celsius': '44'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 1 XR2 0 '
'TSen',
'status': 'OK',
'temperature': {
'#text': '36 '
'degrees '
'C '
'/ '
'96 '
'degrees '
'F',
'@junos:celsius': '36'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 1 XR2 0 '
'Chip',
'status': 'OK',
'temperature': {
'#text': '60 '
'degrees '
'C '
'/ '
'140 '
'degrees '
'F',
'@junos:celsius': '60'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 1 XR2 1 '
'TSen',
'status': 'OK',
'temperature': {
'#text': '36 '
'degrees '
'C '
'/ '
'96 '
'degrees '
'F',
'@junos:celsius': '36'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XL 1 XR2 1 '
'Chip',
'status': 'OK',
'temperature': {
'#text': '59 '
'degrees '
'C '
'/ '
'138 '
'degrees '
'F',
'@junos:celsius': '59'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 0 TSen',
'status': 'OK',
'temperature': {
'#text': '52 '
'degrees '
'C '
'/ '
'125 '
'degrees '
'F',
'@junos:celsius': '52'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 0 Chip',
'status': 'OK',
'temperature': {
'#text': '62 '
'degrees '
'C '
'/ '
'143 '
'degrees '
'F',
'@junos:celsius': '62'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 1 TSen',
'status': 'OK',
'temperature': {
'#text': '52 '
'degrees '
'C '
'/ '
'125 '
'degrees '
'F',
'@junos:celsius': '52'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 1 Chip',
'status': 'OK',
'temperature': {
'#text': '57 '
'degrees '
'C '
'/ '
'134 '
'degrees '
'F',
'@junos:celsius': '57'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 2 TSen',
'status': 'OK',
'temperature': {
'#text': '52 '
'degrees '
'C '
'/ '
'125 '
'degrees '
'F',
'@junos:celsius': '52'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 2 Chip',
'status': 'OK',
'temperature': {
'#text': '51 '
'degrees '
'C '
'/ '
'123 '
'degrees '
'F',
'@junos:celsius': '51'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 3 TSen',
'status': 'OK',
'temperature': {
'#text': '52 '
'degrees '
'C '
'/ '
'125 '
'degrees '
'F',
'@junos:celsius': '52'
}
}, {
'class': 'Temp',
'name': 'FPC 0 XM 3 Chip',
'status': 'OK',
'temperature': {
'#text': '45 '
'degrees '
'C '
'/ '
'113 '
'degrees '
'F',
'@junos:celsius': '45'
}
}, {
'class': 'Temp',
'name': 'FPC 0 PCIe Switch '
'TSen',
'status': 'OK',
'temperature': {
'#text': '52 '
'degrees '
'C '
'/ '
'125 '
'degrees '
'F',
'@junos:celsius': '52'
}
}, {
'class': 'Temp',
'name': 'FPC 0 PCIe Switch '
'Chip',
'status': 'OK',
'temperature': {
'#text': '30 '
'degrees '
'C '
'/ '
'86 '
'degrees '
'F',
'@junos:celsius': '30'
}
}, {
'class': 'Temp',
'name': 'FPC 9 Intake',
'status': 'OK',
'temperature': {
'#text': '31 '
'degrees '
'C '
'/ '
'87 '
'degrees '
'F',
'@junos:celsius': '31'
}
}, {
'class': 'Temp',
'name': 'FPC 9 Exhaust A',
'status': 'OK',
'temperature': {
'#text': '48 '
'degrees '
'C '
'/ '
'118 '
'degrees '
'F',
'@junos:celsius': '48'
}
}, {
'class': 'Temp',
'name': 'FPC 9 Exhaust B',
'status': 'OK',
'temperature': {
'#text': '41 '
'degrees '
'C '
'/ '
'105 '
'degrees '
'F',
'@junos:celsius': '41'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 0 TCAM '
'TSen',
'status': 'OK',
'temperature': {
'#text': '46 '
'degrees '
'C '
'/ '
'114 '
'degrees '
'F',
'@junos:celsius': '46'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 0 TCAM '
'Chip',
'status': 'OK',
'temperature': {
'#text': '55 '
'degrees '
'C '
'/ '
'131 '
'degrees '
'F',
'@junos:celsius': '55'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 0 TSen',
'status': 'OK',
'temperature': {
'#text': '46 '
'degrees '
'C '
'/ '
'114 '
'degrees '
'F',
'@junos:celsius': '46'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 0 Chip',
'status': 'OK',
'temperature': {
'#text': '55 '
'degrees '
'C '
'/ '
'131 '
'degrees '
'F',
'@junos:celsius': '55'
}
}, {
'class': 'Temp',
'name': 'FPC 9 MQ 0 TSen',
'status': 'OK',
'temperature': {
'#text': '46 '
'degrees '
'C '
'/ '
'114 '
'degrees '
'F',
'@junos:celsius': '46'
}
}, {
'class': 'Temp',
'name': 'FPC 9 MQ 0 Chip',
'status': 'OK',
'temperature': {
'#text': '57 '
'degrees '
'C '
'/ '
'134 '
'degrees '
'F',
'@junos:celsius': '57'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 1 TCAM '
'TSen',
'status': 'OK',
'temperature': {
'#text': '41 '
'degrees '
'C '
'/ '
'105 '
'degrees '
'F',
'@junos:celsius': '41'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 1 TCAM '
'Chip',
'status': 'OK',
'temperature': {
'#text': '46 '
'degrees '
'C '
'/ '
'114 '
'degrees '
'F',
'@junos:celsius': '46'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 1 TSen',
'status': 'OK',
'temperature': {
'#text': '41 '
'degrees '
'C '
'/ '
'105 '
'degrees '
'F',
'@junos:celsius': '41'
}
}, {
'class': 'Temp',
'name': 'FPC 9 LU 1 Chip',
'status': 'OK',
'temperature': {
'#text': '47 '
'degrees '
'C '
'/ '
'116 '
'degrees '
'F',
'@junos:celsius': '47'
}
}, {
'class': 'Temp',
'name': 'FPC 9 MQ 1 TSen',
'status': 'OK',
'temperature': {
'#text': '41 '
'degrees '
'C '
'/ '
'105 '
'degrees '
'F',
'@junos:celsius': '41'
}
}, {
'class': 'Temp',
'name': 'FPC 9 MQ 1 Chip',
'status': 'OK',
'temperature': {
'#text': '47 '
'degrees '
'C '
'/ '
'116 '
'degrees '
'F',
'@junos:celsius': '47'
}
}, {
'class': 'Temp',
'name': 'ADC 9 Intake',
'status': 'OK',
'temperature': {
'#text': '32 '
'degrees '
'C '
'/ '
'89 '
'degrees '
'F',
'@junos:celsius': '32'
}
}, {
'class': 'Temp',
'name': 'ADC 9 Exhaust',
'status': 'OK',
'temperature': {
'#text': '42 '
'degrees '
'C '
'/ '
'107 '
'degrees '
'F',
'@junos:celsius': '42'
}
}, {
'class': 'Temp',
'name': 'ADC 9 ADC-XF1',
'status': 'OK',
'temperature': {
'#text': '49 '
'degrees '
'C '
'/ '
'120 '
'degrees '
'F',
'@junos:celsius': '49'
}
}, {
'class': 'Temp',
'name': 'ADC 9 ADC-XF0',
'status': 'OK',
'temperature': {
'#text': '59 '
'degrees '
'C '
'/ '
'138 '
'degrees '
'F',
'@junos:celsius': '59'
}
}, {
'class': 'Fans',
'comment': '2760 RPM',
'name': 'Fan Tray 0 Fan 1',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 0 Fan 2',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 0 Fan 3',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 0 Fan 4',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 0 Fan 5',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 0 Fan 6',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 1 Fan 1',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 1 Fan 2',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 1 Fan 3',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 1 Fan 4',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 1 Fan 5',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 1 Fan 6',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 2 Fan 1',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 2 Fan 2',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 2 Fan 3',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 2 Fan 4',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 2 Fan 5',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 2 Fan 6',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 3 Fan 1',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2400 RPM',
'name': 'Fan Tray 3 Fan 2',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 3 Fan 3',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 3 Fan 4',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2640 RPM',
'name': 'Fan Tray 3 Fan 5',
'status': 'OK'
}, {
'class': 'Fans',
'comment': '2520 RPM',
'name': 'Fan Tray 3 Fan 6',
'status': 'OK'
}]
}
}
| 25.693974 | 41 | 0.234727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 19,959 | 0.365724 |
9225d697e78183c1ad43794ee9e4cd4755df5089 | 2,260 | py | Python | orbit.py | srujan71/CubeSat-Mission-Planner | 34030254109def0b13e82116ed5122ecf79a82f7 | [
"MIT"
] | 1 | 2021-07-26T16:17:30.000Z | 2021-07-26T16:17:30.000Z | orbit.py | leizhang2020/CubeSat-Mission-Planner | 62d1ad33c2dcb1a2f8fb3ff615cc5cc0e6716969 | [
"MIT"
] | null | null | null | orbit.py | leizhang2020/CubeSat-Mission-Planner | 62d1ad33c2dcb1a2f8fb3ff615cc5cc0e6716969 | [
"MIT"
] | 2 | 2021-09-18T08:50:44.000Z | 2022-01-20T02:41:38.000Z | """
orbit.py
"Frankly, a very limited and highly specific implementation of an Orbit class.
If used for applications other than the original usecase, this class will
either need to be bypassed or heavily expanded upon."
@author: Johan Monster (https://github.com/Hans-Bananendans/)
"""
from numpy import log
class Orbit:
"""This class stores and supplies orbital parameters for given circular
SSO orbit"""
def __init__(self,h,i,LTAN):
self.h = h #[km]
self.i = i #[deg]
self.LTAN = LTAN #0-23[h] e.g. 14 is 14:00
def period(self):
"""
Parameters
----------
h : double
Orbital altitude in [km].
Returns
-------
int
Orbital period in [s].
"""
return int(2*3.141593 * ((1000*(6371+self.h))**3/(3.986*10**14))**0.5)
def eclipse(self):
"""
eclipse(h)
Note: Only valid between LTAN [10:00, 11:00], based on logarithmic
regression of simulated eclipse data in GMAT. For more info,
consult eclipse_predictions.xlsx.
ACCURATE TO WITHIN A FEW SECONDS
Parameters
----------
h : double
Orbital altitude in [km].
Returns
-------
double
Total eclipse duration (including penumbras) in [s].
"""
# If LTAN is 10:00
# e = -151*log(self.h) + 2965 # [s]
# If LTAN is 10:30
e = -125*log(self.h) + 2860 # [s]
# If LTAN is 11:00
# e = -109*log(self.h) + 2800 # [s]
return e
def eclipse_frac(self):
"""
eclipse(h)
Note: Only valid for LTAN 10:00, 10:30, 11:00, based on logarithmic
regression of simulated eclipse data in GMAT. For more info,
consult eclipse_predictions.xlsx.
ACCURACY TO WITHIN 0.1 OF TRUE VALUE
Parameters
----------
h : double
Orbital altitude in [km].
Returns
-------
double
Percentage of orbit that is in ECLIPSE [%].
"""
return self.eclipse()/self.period() | 25.111111 | 78 | 0.509735 | 1,940 | 0.858407 | 0 | 0 | 0 | 0 | 0 | 0 | 1,744 | 0.771681 |
922840c6ba1e5ffbff03318305b8047e1c57c724 | 804 | py | Python | brownie/cli/console.py | banteg/brownie | eccd0c6ac8cf769ed3331c05189e8518e7cc2fdd | [
"MIT"
] | 3 | 2021-05-21T06:55:28.000Z | 2021-06-22T07:31:50.000Z | brownie/cli/console.py | banteg/brownie | eccd0c6ac8cf769ed3331c05189e8518e7cc2fdd | [
"MIT"
] | null | null | null | brownie/cli/console.py | banteg/brownie | eccd0c6ac8cf769ed3331c05189e8518e7cc2fdd | [
"MIT"
] | 1 | 2021-08-02T05:50:59.000Z | 2021-08-02T05:50:59.000Z | #!/usr/bin/python3
from docopt import docopt
from brownie import network, project
from brownie.cli.utils.console import Console
from brownie._config import ARGV, CONFIG, update_argv_from_docopt
__doc__ = f"""Usage: brownie console [options]
Options:
--network <name> Use a specific network (default {CONFIG['network_defaults']['name']})
--verbose -v Enable verbose reporting
--tb -t Show entire python traceback on exceptions
--help -h Display this message
Connects to the network and opens the brownie console.
"""
def main():
args = docopt(__doc__)
update_argv_from_docopt(args)
project.load()
network.connect(ARGV['network'])
shell = Console()
shell.interact(banner="Brownie environment is ready.", exitmsg="")
| 25.935484 | 95 | 0.689055 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 429 | 0.533582 |
92287892b36da60c9bf64d7187622768f0e41f59 | 2,638 | py | Python | tensorflow_examples/lite/model_maker/core/compat.py | PawelFaron/examples | 1a24291bb3ed617df21c6c15cdd3418ed2bf8ad3 | [
"Apache-2.0"
] | 1 | 2021-09-22T12:03:20.000Z | 2021-09-22T12:03:20.000Z | tensorflow_examples/lite/model_maker/core/compat.py | godofecht/examples | 5875c06c3cc76af5419986ab9d2f3d51bea43425 | [
"Apache-2.0"
] | null | null | null | tensorflow_examples/lite/model_maker/core/compat.py | godofecht/examples | 5875c06c3cc76af5419986ab9d2f3d51bea43425 | [
"Apache-2.0"
] | 1 | 2020-03-13T16:40:57.000Z | 2020-03-13T16:40:57.000Z | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compat modules."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import tensorflow as tf # TF2
_DEFAULT_TF_BEHAVIOR = 2
# Get version of tf behavior in use (valid 1 or 2).
_tf_behavior_version = _DEFAULT_TF_BEHAVIOR
def setup_tf_behavior(tf_version=_DEFAULT_TF_BEHAVIOR):
"""Setup tf behavior. It must be used before the main()."""
global _tf_behavior_version
if tf_version not in [1, 2]:
raise ValueError(
'tf_version should be in [1, 2], but got {}'.format(tf_version))
if tf_version == 1:
tf.compat.v1.logging.warn(
'Using v1 behavior. Please note that it is mainly to run legacy models,'
'however v2 is more preferrable if they are supported.')
tf.compat.v1.disable_v2_behavior()
else:
assert tf.__version__.startswith('2')
_tf_behavior_version = tf_version
def get_tf_behavior():
"""Gets version for tf behavior.
Returns:
int, 1 or 2 indicating the behavior version.
"""
return _tf_behavior_version
def test_in_tf_1(fn):
"""Decorator to test in tf 1 behaviors."""
@functools.wraps(fn)
def decorator(*args, **kwargs):
if get_tf_behavior() != 1:
tf.compat.v1.logging.info('Skip function {} for test_in_tf_1'.format(
fn.__name__))
return
fn(*args, **kwargs)
return decorator
def test_in_tf_2(fn):
"""Decorator to test in tf 2 behaviors."""
@functools.wraps(fn)
def decorator(*args, **kwargs):
if get_tf_behavior() != 2:
tf.compat.v1.logging.info('Skip function {} for test_in_tf_2'.format(
fn.__name__))
return
fn(*args, **kwargs)
return decorator
def test_in_tf_1and2(fn):
"""Decorator to test in tf 1 and 2 behaviors."""
@functools.wraps(fn)
def decorator(*args, **kwargs):
if get_tf_behavior() not in [1, 2]:
tf.compat.v1.logging.info('Skip function {} for test_in_tf_1and2'.format(
fn.__name__))
return
fn(*args, **kwargs)
return decorator
| 27.479167 | 80 | 0.703942 | 0 | 0 | 0 | 0 | 679 | 0.257392 | 0 | 0 | 1,245 | 0.471948 |
92294ca518ccc92f5f31e84ca764f1ef9e6822eb | 8,559 | py | Python | mainpages/views.py | HAXF13D/carwashagrigation | 638d33b0b9cfd81f47ec7cb38aff52f2608a037b | [
"MIT"
] | null | null | null | mainpages/views.py | HAXF13D/carwashagrigation | 638d33b0b9cfd81f47ec7cb38aff52f2608a037b | [
"MIT"
] | null | null | null | mainpages/views.py | HAXF13D/carwashagrigation | 638d33b0b9cfd81f47ec7cb38aff52f2608a037b | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.shortcuts import redirect
from django.http import HttpResponseRedirect
from .forms import RegisterForm, LoginForm
from django.urls import reverse
from .datebase_func import make_bd, check_car_wash, add_car_wash, time_by_id, update_time_by_id, get_info_about
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
import requests
from pprint import pprint
from datetime import datetime
def time_to_seconds(time):
time = time.split(':')
hh = 0
mm = 0
ss = 0
if len(time) == 2:
hh = int(time[0])
mm = int(time[1])
if len(time) == 3:
hh = int(time[0])
mm = int(time[1])
ss = int(time[2])
return hh * 3600 + mm * 60 + ss
def second_to_str(time):
hh = time // 3600 % 24
mm = time // 60 % 60
ss = time % 60
result = f"{hh}:{mm}:{ss}"
return result
def fast_search_car_wash(data, u=None):
data = dict(data)
current_time = datetime.now()
cur_pos = data.get('pos')[0]
count = int(data.get('count')[0])
make_bd()
winner_id = wash_id = data.get('0id')[0]
pohui = 99999999999
time_to_wash = 15*60
for i in range(count):
name = data.get(str(i) + 'name')[0]
wash_id = data.get(str(i) + 'id')[0]
wash_cord = data.get(str(i) + 'coords')[0]
if not check_car_wash(wash_id):
add_car_wash(wash_id, name, wash_cord)
request_body = 'https://api.distancematrix.ai/maps/api/distancematrix/json?'
request_body += f'origins={cur_pos}&'
request_body += f'destinations={wash_cord}&'
request_body += 'key=FEn4bf73mLF76mUkqYyaJI5UiDc6g'
response = requests.get(request_body)
print(response.text)
trip_time = response.json()['rows'][0]['elements'][0]['duration']['value']
trip_minutes = trip_time // 60
trip_seconds = trip_time % 60
trip_hours = trip_time // 3600
#pprint(f'{trip_hours}:{trip_minutes}:{trip_seconds}')
current_minutes = int(current_time.strftime('%M'))
current_seconds = int(current_time.strftime('%S'))
current_hours = int(current_time.strftime('%H'))
arrive_seconds = trip_seconds + current_seconds
arrive_minutes = trip_minutes + current_minutes
arrive_hours = trip_hours + current_hours
days = 0
if arrive_seconds // 60 != 0:
arrive_minutes += arrive_seconds // 60
arrive_seconds %= 60
if arrive_minutes // 60 != 0:
arrive_hours += arrive_minutes // 60 % 60
arrive_minutes %= 60
if arrive_hours // 24 != 0:
days = arrive_hours // 24
arrive_hours %= 24
#pprint(f'{days} {arrive_hours}:{arrive_minutes}:{arrive_seconds}')
arrive_time = str(arrive_hours) + ':' + str(arrive_minutes) + ':' + str(arrive_seconds)
open_time, close_time, free_time = time_by_id(wash_id)
if days == 0:
if time_to_seconds(arrive_time) + time_to_wash < time_to_seconds(close_time):
start_time = max(time_to_seconds(arrive_time), time_to_seconds(free_time))
#pprint(second_to_str(start_time))
if start_time < pohui:
pohui = start_time
winner_id = wash_id
#pprint(second_to_str(pohui))
update_time_by_id(winner_id, second_to_str(pohui + time_to_wash))
result = {}
response = get_info_about(winner_id)
coords_xy = response[2].split(',')
result['coords_x'] = coords_xy[0]
result['coords_y'] = coords_xy[1]
pos_xy = cur_pos.split(',')
result['pos_x'] = pos_xy[0]
result['pos_y'] = pos_xy[1]
if u is not None:
print('NICE')
u.email_user(subject='Талон на автомойку',
message=f'Вы записаны на автомойку, приезжайте к {second_to_str(pohui)}',
from_email='car.wash.agrigator@gmail.com'
)
return result
def main_page(request):
u = request.user
flag = u.is_authenticated
data = ''
if request.method == 'POST' and request.is_ajax:
if request.POST.get('pos') is not None:
print("hui")
u = request.user
if u.is_authenticated:
data = fast_search_car_wash(request.POST, u)
print(data)
request.session['data'] = data
return HttpResponseRedirect('/main/road_map')
#return redirect('mainpages:road_map')
if flag:
data = {
'button_1': 'logout_user',
'flag': flag,
}
return render(
request,
'main_page.html',
context=data
)
else:
data = {
'button_1': 'auth',
'flag': flag,
}
return render(
request,
'main_page.html',
context=data
)
def auth_page(request):
data = {
'button_1' : 'login',
'button_2': 'registration'
}
return render(
request,
'authorization.html',
context=data
)
def login_page(request):
if request.method == 'POST':
username = request.POST.get("login")
password = request.POST.get("password")
pprint(username)
pprint(password)
if password is not None:
username = request.POST.get("login")
password = request.POST.get("password")
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return redirect('/main')
else:
form = LoginForm()
data = {
"text": "Неверный логин или пароль",
}
return render(
request,
'login.html',
context=data,
)
else:
return render(
request,
'login.html',
)
def registration_page(request):
if request.method == 'POST':
username = request.POST.get("login")
password = request.POST.get("password")
re_password = request.POST.get("re_password")
mail = request.POST.get("mail")
pprint(username)
pprint(password)
if re_password is not None:
if password != re_password:
data = {
'text': 'Пароли не совпадают',
}
return render(
request,
'registration.html',
context=data,
)
try:
validate_email(mail)
except ValidationError as e:
data = {
'text': 'Неверный формат email' + str(e)
}
return render(
request,
'registration.html',
context=data,
)
names = get_user_model()
names = list(names.objects.all())
for name in names:
if username in str(name):
form = RegisterForm()
data = {
"text": "Пользователь с таким логином уже существует",
}
return render(
request,
'registration.html',
context=data,
)
user = User.objects.create_user(
username=username,
password=password,
email=mail
)
user.save()
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return redirect('/main')
else:
return render(
request,
'registration.html',
)
def logout_user(request):
u = request.user
if u.is_authenticated:
logout(request)
return redirect('/')
def road_map(request):
data = request.session['data']
print(data)
print('PIZDA')
print(request.GET)
return render(
request,
'map.html',
context=data
)
| 30.677419 | 111 | 0.538497 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,319 | 0.151644 |
92294f38fd2ab1e308c335c66c51214ec37751f0 | 4,939 | py | Python | flytekit/common/types/helpers.py | slai/flytekit | 9d73d096b748d263a638e6865d15db4880845305 | [
"Apache-2.0"
] | null | null | null | flytekit/common/types/helpers.py | slai/flytekit | 9d73d096b748d263a638e6865d15db4880845305 | [
"Apache-2.0"
] | 2 | 2021-06-26T04:32:43.000Z | 2021-07-14T04:47:52.000Z | flytekit/common/types/helpers.py | slai/flytekit | 9d73d096b748d263a638e6865d15db4880845305 | [
"Apache-2.0"
] | null | null | null | import importlib as _importlib
import six as _six
from flytekit.common.exceptions import scopes as _exception_scopes
from flytekit.common.exceptions import user as _user_exceptions
from flytekit.configuration import sdk as _sdk_config
from flytekit.models import literals as _literal_models
class _TypeEngineLoader(object):
_LOADED_ENGINES = None
_LAST_LOADED = None
@classmethod
def _load_engines(cls):
config = _sdk_config.TYPE_ENGINES.get()
if cls._LOADED_ENGINES is None or config != cls._LAST_LOADED:
cls._LAST_LOADED = config
cls._LOADED_ENGINES = []
for fqdn in config:
split = fqdn.split(".")
module_path, attr = ".".join(split[:-1]), split[-1]
module = _exception_scopes.user_entry_point(_importlib.import_module)(module_path)
if not hasattr(module, attr):
raise _user_exceptions.FlyteValueException(
module,
"Failed to load the type engine because the attribute named '{}' could not be found"
"in the module '{}'.".format(attr, module_path),
)
engine_impl = getattr(module, attr)()
cls._LOADED_ENGINES.append(engine_impl)
from flytekit.type_engines.default.flyte import FlyteDefaultTypeEngine as _DefaultEngine
cls._LOADED_ENGINES.append(_DefaultEngine())
@classmethod
def iterate_engines_in_order(cls):
"""
:rtype: Generator[flytekit.type_engines.common.TypeEngine]
"""
cls._load_engines()
return iter(cls._LOADED_ENGINES)
def python_std_to_sdk_type(t):
"""
:param T t: User input. Should be of the form: Types.Integer, [Types.Integer], {Types.String: Types.Integer}, etc.
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkType
"""
for e in _TypeEngineLoader.iterate_engines_in_order():
out = e.python_std_to_sdk_type(t)
if out is not None:
return out
raise _user_exceptions.FlyteValueException(t, "Could not resolve to an SDK type for this value.")
def get_sdk_type_from_literal_type(literal_type):
"""
:param flytekit.models.types.LiteralType literal_type:
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkType
"""
for e in _TypeEngineLoader.iterate_engines_in_order():
out = e.get_sdk_type_from_literal_type(literal_type)
if out is not None:
return out
raise _user_exceptions.FlyteValueException(
literal_type, "Could not resolve to a type implementation for this " "value."
)
def infer_sdk_type_from_literal(literal):
"""
:param flytekit.models.literals.Literal literal:
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkType
"""
for e in _TypeEngineLoader.iterate_engines_in_order():
out = e.infer_sdk_type_from_literal(literal)
if out is not None:
return out
raise _user_exceptions.FlyteValueException(literal, "Could not resolve to a type implementation for this value.")
def get_sdk_value_from_literal(literal, sdk_type=None):
"""
:param flytekit.models.literals.Literal literal:
:param flytekit.models.types.LiteralType sdk_type:
:rtype: flytekit.common.types.base_sdk_types.FlyteSdkValue
"""
# The spec states everything must be nullable, so if we receive a null value, swap to the null type behavior.
if sdk_type is None:
sdk_type = infer_sdk_type_from_literal(literal)
return sdk_type.from_flyte_idl(literal.to_flyte_idl())
def unpack_literal_map_to_sdk_object(literal_map, type_map=None):
"""
:param lytekit.models.literals.LiteralMap literal_map:
:param dict[Text, flytekit.common.types.base_sdk_types.FlyteSdkType] type_map: Type map directing unpacking.
:rtype: dict[Text, T]
"""
type_map = type_map or {}
return {k: get_sdk_value_from_literal(v, sdk_type=type_map.get(k, None)) for k, v in literal_map.literals.items()}
def unpack_literal_map_to_sdk_python_std(literal_map, type_map=None):
"""
:param flytekit.models.literals.LiteralMap literal_map: Literal map containing values for unpacking.
:param dict[Text, flytekit.common.types.base_sdk_types.FlyteSdkType] type_map: Type map directing unpacking.
:rtype: dict[Text, T]
"""
return {k: v.to_python_std() for k, v in unpack_literal_map_to_sdk_object(literal_map, type_map=type_map).items()}
def pack_python_std_map_to_literal_map(std_map, type_map):
"""
:param dict[Text, T] std_map:
:param dict[Text, flytekit.common.types.base_sdk_types.FlyteSdkType] type_map:
:rtype: flytekit.models.literals.LiteralMap
:raises: flytekit.common.exceptions.user.FlyteTypeException
"""
return _literal_models.LiteralMap(literals={k: v.from_python_std(std_map[k]) for k, v in _six.iteritems(type_map)})
| 39.512 | 119 | 0.700142 | 1,395 | 0.282446 | 0 | 0 | 1,300 | 0.263211 | 0 | 0 | 1,811 | 0.366673 |
922a233c287deeb8aa6a5632fd96086e2451f966 | 2,356 | py | Python | commands/interractions/discord_binder.py | graatje/highscoresbot | 26207b8191ed6c9a3d7ecd49fea482e6d3603c36 | [
"MIT"
] | null | null | null | commands/interractions/discord_binder.py | graatje/highscoresbot | 26207b8191ed6c9a3d7ecd49fea482e6d3603c36 | [
"MIT"
] | null | null | null | commands/interractions/discord_binder.py | graatje/highscoresbot | 26207b8191ed6c9a3d7ecd49fea482e6d3603c36 | [
"MIT"
] | null | null | null | import sqlite3
import discord
from commands.sendable import Sendable
class DiscordBinder(discord.ui.View):
def __init__(self, ppousername, discord_user_id):
super().__init__(timeout=6000)
self.discord_user_id = discord_user_id
self.ppousername = ppousername
@discord.ui.button(label='accept account binding', style=discord.ButtonStyle.green)
async def accept(self, interaction: discord.Interaction, button: discord.ui.Button):
print("button!!!!")
with sqlite3.connect("../eventconfigurations.db") as conn:
cur = conn.cursor()
cur.execute("INSERT INTO discord_bindings(discordid, pponame) VALUES(?, ?)",
(self.discord_user_id, self.ppousername))
conn.commit()
sendable = Sendable(interaction)
await sendable.send(f"{self.ppousername} has been bound to your discord account!")
self.stop()
@discord.ui.button(label='deny user from (future) binding', style=discord.ButtonStyle.red)
async def deny_user(self, interaction: discord.Interaction, button: discord.ui.Button):
print("button!!!!")
with sqlite3.connect("../eventconfigurations.db") as conn:
cur = conn.cursor()
cur.execute("INSERT INTO discord_blocked(pponame, discordid) VALUES(?,?)",
(self.ppousername, self.discord_user_id))
conn.commit()
sendable = Sendable(interaction)
await sendable.send(f"{self.ppousername} can't request to bind to your discord account "
f"anymore.")
self.stop()
@discord.ui.button(label='prevent all accountbinding in the future', style=discord.ButtonStyle.danger)
async def deny_all(self, interaction: discord.Interaction, button: discord.ui.Button):
print("button!!!!")
with sqlite3.connect("../eventconfigurations.db") as conn:
cur = conn.cursor()
cur.execute("INSERT INTO everything_discord_blocked(discordid) VALUES(?)",
(self.discord_user_id,))
conn.commit()
sendable = Sendable(interaction)
await sendable.send("You won't receive requests to bind a ppo account with your discord "
"account anymore.")
self.stop()
| 46.196078 | 106 | 0.628608 | 2,282 | 0.968591 | 0 | 0 | 2,048 | 0.86927 | 1,758 | 0.74618 | 628 | 0.266553 |
922a9ae4034f744c3a78c0a716a89357127aba7a | 4,038 | py | Python | gigfinder/gigs/tests.py | jayanwana/django-location-project | b2e728d13189e70ca25578eccd3018f15bdd7e83 | [
"MIT"
] | null | null | null | gigfinder/gigs/tests.py | jayanwana/django-location-project | b2e728d13189e70ca25578eccd3018f15bdd7e83 | [
"MIT"
] | 3 | 2020-06-05T21:00:42.000Z | 2021-09-22T18:33:34.000Z | gigfinder/gigs/tests.py | jayanwana/django-location-project | b2e728d13189e70ca25578eccd3018f15bdd7e83 | [
"MIT"
] | null | null | null | from django.test import TestCase
# Create your tests here.
from gigs.models import Venue, Event
from gigs.views import LookupView
from factory.fuzzy import BaseFuzzyAttribute
from django.contrib.gis.geos import Point
from django.utils import timezone
from django.test import RequestFactory
from django.urls import reverse
import factory.django
import random
class FuzzyPoint(BaseFuzzyAttribute):
def fuzz(self):
return Point(random.uniform(-180.0, 180.0),
random.uniform(-90.0, 90.0))
# Factories for tests
class VenueFactory(factory.django.DjangoModelFactory):
class Meta:
model = Venue
django_get_or_create = (
'name',
'location'
)
name = 'Wembley Arena'
location = FuzzyPoint()
class EventFactory(factory.django.DjangoModelFactory):
class Meta:
model = Event
django_get_or_create = (
'name',
'venue',
'datetime'
)
name = 'Queens of the Stone Age'
datetime = timezone.now()
# Test
class VenueTest(TestCase):
def test_create_venue(self):
# Create the venue
venue = VenueFactory()
# Check we can find it
all_venues = Venue.objects.all()
self.assertEqual(len(all_venues), 1)
only_venue = all_venues[0]
self.assertEqual(only_venue, venue)
# Check attributes
self.assertEqual(only_venue.name, 'Wembley Arena')
# Check string representation
self.assertEqual(only_venue.__str__(), 'Wembley Arena')
class EventTest(TestCase):
def test_create_event(self):
# Create the venue
venue = VenueFactory()
# Create the event
event = EventFactory(venue=venue)
# Check we can find it
all_events = Event.objects.all()
self.assertEqual(len(all_events), 1)
only_event = all_events[0]
self.assertEqual(only_event, event)
# Check attributes
self.assertEqual(only_event.name, 'Queens of the Stone Age')
self.assertEqual(only_event.venue.name, 'Wembley Arena')
# Check string representation
self.assertEqual(only_event.__str__(), 'Queens of the Stone Age - Wembley Arena')
class LookupViewTest(TestCase):
"""
Test Lookup View
"""
def setUp(self):
self.factory = RequestFactory()
def test_get(self):
request = self.factory.get(reverse('gigs:lookup'))
response = LookupView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('gigs/lookup.html')
def test_post(self):
# Create venues to return
v1 = VenueFactory(name='Venue1')
v2 = VenueFactory(name='Venue2')
v3 = VenueFactory(name='Venue3')
v4 = VenueFactory(name='Venue4')
v5 = VenueFactory(name='Venue5')
v6 = VenueFactory(name='Venue6')
v7 = VenueFactory(name='Venue7')
v8 = VenueFactory(name='Venue8')
v9 = VenueFactory(name='Venue9')
v10 = VenueFactory(name='Venue10')
# Create events to return
e1 = EventFactory(name='Event1', venue=v1)
e2 = EventFactory(name='Event2', venue=v2)
e3 = EventFactory(name='Event3', venue=v3)
e4 = EventFactory(name='Event4', venue=v4)
e5 = EventFactory(name='Event5', venue=v5)
e6 = EventFactory(name='Event6', venue=v6)
e7 = EventFactory(name='Event7', venue=v7)
e8 = EventFactory(name='Event8', venue=v8)
e9 = EventFactory(name='Event9', venue=v9)
e10 = EventFactory(name='Event10', venue=v10)
# Set parameters
lat = 52.3749159
lon = 1.1067473
# Put together request
data = {
'latitude': lat,
'longitude': lon
}
request = self.factory.post(reverse('gigs:lookup'), data)
response = LookupView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('gigs/lookupresults.html') | 31.546875 | 89 | 0.628777 | 3,634 | 0.89995 | 0 | 0 | 0 | 0 | 0 | 0 | 806 | 0.199604 |
922b49321572191d7b66c10a4c4bde3685820be0 | 2,598 | py | Python | OpenSources/GDAL_moreExamples/class08demos_ogr_gdal/class08_ogr_buffer.py | mehran66/python-geospatial-open-sources | 6c56922bdd2a815a98faeb5ac65f674c22486923 | [
"MIT"
] | null | null | null | OpenSources/GDAL_moreExamples/class08demos_ogr_gdal/class08_ogr_buffer.py | mehran66/python-geospatial-open-sources | 6c56922bdd2a815a98faeb5ac65f674c22486923 | [
"MIT"
] | null | null | null | OpenSources/GDAL_moreExamples/class08demos_ogr_gdal/class08_ogr_buffer.py | mehran66/python-geospatial-open-sources | 6c56922bdd2a815a98faeb5ac65f674c22486923 | [
"MIT"
] | null | null | null | '''*********************************************
author: Galen Maclaurin
Date: 12/11/2012
Updated: 03/14/2016 , Stefan Leyk
Purpose: Simple Buffer example using OGR
*********************************************'''
from time import clock
start = clock()
import os
from osgeo import ogr
# Define variables you need
path = r'C:\GIS3\data\cl08data'
#change the working directory
os.chdir(path)
theme = 'areapoints.shp'
buffTheme = 'buffOutput.shp'
buffDist = 500
# Open the feature class, creating an 'ogr' object
ds = ogr.Open(theme)
# Get the driver from the ogr object. The driver object is an interface to work
# with a specific vector data format (i.e. ESRI shapefile in this case).
dvr = ds.GetDriver()
# Check to see if the output file exists and delete it if so.
if os.path.exists(buffTheme):
dvr.DeleteDataSource(buffTheme)
print buffTheme, "existed and has been deleted"
# Get the layer object from the ogr object. This is kind of like a cursor object.
lyr = ds.GetLayer()
# Get the number of features. the layer object is an iterable object.
numFeat = len(lyr)
# Create an empty feature class to populate with buffer features. This is stored
# in memory as an emtpy ogr object.
buff_ds = dvr.CreateDataSource(buffTheme)
# Create a layer object from the empty ogr object.
outLyr = buff_ds.CreateLayer(buffTheme[:-4],lyr.GetSpatialRef(),ogr.wkbPolygon)
# Adding a field takes two steps: create a field definition and then use that
# to create the field
# Create a field definition
fd = ogr.FieldDefn('myField',ogr.OFTString)
# Create field with the field definition
outLyr.CreateField(fd)
# Similarly, you need to use a layer definition to create a feature
lyrDef = outLyr.GetLayerDefn()
# Iterate through the features in the layer object (like with a cursor)
for feat in lyr:
#get the geometry object from the feature object.
geom = feat.GetGeometryRef()
#create a new feature using the layer definition create outside the loop
outFeat = ogr.Feature(lyrDef)
#set the feature's geometry as the buffered geometry
outFeat.SetGeometry(geom.Buffer(buffDist))
#set field value for feature
outFeat.SetField('myField','someText'+str(feat.GetFID()))
#save the new feature in the output layer created outside the loop
outLyr.CreateFeature(outFeat)
# Clean up, remove reference to the datasource objects, this is like deleting
# the cursor and row objects.
buff_ds = None
ds = None
print 'Buffer vector features complete'
print 'Elapsed time: ',round(clock()-start,2),' seconds'
| 37.652174 | 82 | 0.706697 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,724 | 0.663587 |
922cef519f3bea50e9ca93a2cb8bc5bc4fddf486 | 4,309 | py | Python | Leyva_Davis_op3.py | DavisLeyva/Examen-primer-unidad | 9829574f488a14852eee54f9e33ac2f71b280534 | [
"MIT"
] | null | null | null | Leyva_Davis_op3.py | DavisLeyva/Examen-primer-unidad | 9829574f488a14852eee54f9e33ac2f71b280534 | [
"MIT"
] | null | null | null | Leyva_Davis_op3.py | DavisLeyva/Examen-primer-unidad | 9829574f488a14852eee54f9e33ac2f71b280534 | [
"MIT"
] | null | null | null | from tkinter import Frame,Label,Button,Checkbutton,Scale,StringVar,IntVar,Entry,Tk
import serial
import time
import threading
import pandas as pd
import mysql.connector
class MainFrame(Frame):
cad = str()
def __init__(self, master=None):
super().__init__(master, width=420, height=270)
self.master = master
self.master.protocol('WM_DELETE_WINDOW',self.askQuit)
self.pack()
self.hilo1 = threading.Thread(target=self.getSensorValues,daemon=True)
self.arduino = serial.Serial("COM3",9600,timeout=1.0)
time.sleep(1)
self.value_temp_1 = IntVar()
self.value_temp=StringVar()
self.nombreA = StringVar()
self.apelli=StringVar()
self.age=IntVar()
self.dato=IntVar()
self.create_widgets()
self.isRun=True
self.hilo1.start()
self.enviar()
self.cad= str()
self.cnn=mysql.connector.connect(host="localhost",user="root",passwd="",database="historial") #Conectar con MySQL
print(self.cnn)
def Enviar_db(self):
cur=self.cnn.cursor()
sql="INSERT INTO historialmedico (Nombre,Apellido,Edad,Temperatura)VALUES('{}','{}','{}','{}')".format(self.nombreA.get(),self.apelli.get(),self.age.get(),self.value_temp_1)
cur.execute(sql)
self.cnn.commit()
time.sleep(1)
cur.close()
def askQuit(self):
self.isRun=False
self.arduino.close()
self.hilo1.join(0.1)
self.master.quit()
self.master.destroy()
print("*** finalizando...")
def getSensorValues(self):
while self.isRun:
cad =self.arduino.readline().decode('ascii').strip()
self.value_temp.set(cad)
self.value_temp_1=float(cad)
def enviar(self):
x= (self.cad)
print(x)
datos= list()
def create_widgets(self):
self.labelBPM= Label(self,text = "Nombre: ", bg= "#5CFE05",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labelBPM.pack()
self.labelBPM.grid(row=0,column=0, padx=5,ipady=8, pady=10)
self.label1= Entry(self, textvariable=self.nombreA, bg= "red",fg="black", font="Helvetica 13 bold",width=15 ,justify="center")
self.label1.grid(row=0,column=1, padx=5,ipady=8, pady=10)
self.labelapellido= Label(self,text = "Apellido: ", bg= "#5CFE05",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labelapellido.grid(row=1,column=0, padx=5,ipady=8, pady=10)
self.label2= Entry(self,textvariable=self.apelli,bg= "red",fg="black", font="Helvetica 13 bold",width=15 ,justify="center")
self.label2.grid(row=1,column=1, padx=5,ipady=8, pady=10)
self.labeledad= Label(self,text = "Edad: ", bg= "#5CFE05",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labeledad.grid(row=2,column=0, padx=5,ipady=8, pady=10)
self.label3= Entry(self,textvariable=self.age, bg= "red",fg="black", font="Helvetica 13 bold",width=15 ,justify="center")
self.label3.grid(row=2,column=1, padx=5,ipady=8, pady=10)
self.Limpiar= Button(self,command= self.Enviar_db, text= "Enviar historial ",bg="blue",fg="white", font="Helvetica 14 bold",width=20,justify="center")
self.Limpiar.pack
self.Limpiar.grid(row=3,column=0, padx=5,pady=15,columnspan=2)
self.labelT= Label(self,textvariable = self.value_temp, bg= "yellow",fg="black", font="Helvetica 13 bold",width=9 ,justify="center")
self.labelT.grid(row=0,column=2, padx=5,ipady=8, pady=10)
self.Limpiar1= Button(self,command= self.askQuit, text= "Salir ",bg="red",fg="white", font="Helvetica 14 bold",width=7,justify="center")
self.Limpiar1.pack
self.Limpiar1.grid(row=3,column=3, padx=5,pady=15,columnspan=2)
def main():
root = Tk()
root.wm_title("Monitoro del signo vital de la temperatura")
app = MainFrame(root)
app.mainloop()
if __name__=="__main__":
main()
| 42.245098 | 182 | 0.595034 | 3,922 | 0.910188 | 0 | 0 | 0 | 0 | 0 | 0 | 670 | 0.155489 |
922d36a66e41790999941500aa12a32f8a43c0bf | 334 | py | Python | sumultiply.py | declanbarr/python-problems | 0e3f241438e8420fb16073e24bf239eafc632ca6 | [
"Apache-2.0"
] | null | null | null | sumultiply.py | declanbarr/python-problems | 0e3f241438e8420fb16073e24bf239eafc632ca6 | [
"Apache-2.0"
] | null | null | null | sumultiply.py | declanbarr/python-problems | 0e3f241438e8420fb16073e24bf239eafc632ca6 | [
"Apache-2.0"
] | null | null | null | # Declan Barr 19 Mar 2018
# Script that contains function sumultiply that takes two integer arguments and
# returns their product. Does this without the * or / operators
def sumultiply(x, y):
sumof = 0
for i in range(1, x+1):
sumof = sumof + y
return sumof
print(sumultiply(11, 13))
print(sumultiply(5, 123))
| 22.266667 | 80 | 0.685629 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 168 | 0.502994 |
922e181e066e7a206a91aadb175d4b8f0ac18a0d | 2,602 | py | Python | docs/code/distributions/multivariate/plot_multivariate_copulas.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
] | null | null | null | docs/code/distributions/multivariate/plot_multivariate_copulas.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
] | null | null | null | docs/code/distributions/multivariate/plot_multivariate_copulas.py | SURGroup/UncertaintyQuantification | a94c8db47d07134ea2b3b0a3ca53ca818532c3e6 | [
"MIT"
] | null | null | null | """
Multivariate from independent marginals and copula
==================================================
"""
#%% md
#
# - How to define α bivariate distribution from independent marginals and change its structure based on a copula supported by UQpy
# - How to plot the pdf of the distribution
# - How to modify the parameters of the distribution
#%%
#%% md
#
# Import the necessary modules.
#%%
import numpy as np
import matplotlib.pyplot as plt
#%% md
#
# Example of a multivariate distribution from joint independent marginals
# ------------------------------------------------------------------------
#%%
from UQpy.distributions import Normal, JointIndependent
from UQpy.distributions import Gumbel, JointCopula
#%% md
#
# Define a Copula
# ---------------
# The definition of bivariate distribution with a copula, is similar to defining a multivariate distribution from
# independent marginals. In both cases a list of marginals needs to be defined. In case of
#%%
marginals = [Normal(loc=0., scale=1), Normal(loc=0., scale=1)]
copula = Gumbel(theta=3.)
# dist_1 is a multivariate normal with independent marginals
dist_1 = JointIndependent(marginals)
print('Does the distribution with independent marginals have an rvs method?')
print(hasattr(dist_1, 'rvs'))
# dist_2 exhibits dependence between the two dimensions, defined using a gumbel copula
dist_2 = JointCopula(marginals=marginals, copula=copula)
print('Does the distribution with copula have an rvs method?')
print(hasattr(dist_2, 'rvs'))
#%% md
#
# Plot the pdf of the distribution before and after the copula
# -------------------------------------------------------------
#
#%%
fig, ax = plt.subplots(ncols=2, figsize=(10, 4))
x = np.arange(-3, 3, 0.1)
y = np.arange(-3, 3, 0.1)
X, Y = np.meshgrid(x, y)
Z = dist_1.pdf(x=np.concatenate([X.reshape((-1, 1)), Y.reshape((-1, 1))], axis=1))
CS = ax[0].contour(X, Y, Z.reshape(X.shape))
ax[0].clabel(CS, inline=1, fontsize=10)
ax[0].set_title('Contour plot of pdf - independent normals')
x = np.arange(-3, 3, 0.1)
y = np.arange(-3, 3, 0.1)
X, Y = np.meshgrid(x, y)
Z = dist_2.pdf(x=np.concatenate([X.reshape((-1, 1)), Y.reshape((-1, 1))], axis=1))
CS = ax[1].contour(X, Y, Z.reshape(X.shape))
ax[1].clabel(CS, inline=1, fontsize=10)
ax[1].set_title('Contour plot of pdf - normals with Gumbel copula')
plt.show()
#%% md
#
# Modify the parameters of the multivariate copula.
# -------------------------------------------------
#
# Use the update_parameters method.
#%%
print(dist_2.copula.parameters)
dist_2.update_parameters(theta_c=2.)
print(dist_2.copula.parameters) | 27.389474 | 130 | 0.647579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,451 | 0.557434 |
922f2e4c55b39a64c04985ea44b07548e34ad5a0 | 38,177 | py | Python | tests/bdd_test.py | krooken/dd | dda262e6f4582c7f0c77c56a3f3bdfccc2847b7a | [
"BSD-3-Clause"
] | null | null | null | tests/bdd_test.py | krooken/dd | dda262e6f4582c7f0c77c56a3f3bdfccc2847b7a | [
"BSD-3-Clause"
] | null | null | null | tests/bdd_test.py | krooken/dd | dda262e6f4582c7f0c77c56a3f3bdfccc2847b7a | [
"BSD-3-Clause"
] | null | null | null | import logging
from dd.bdd import BDD as _BDD
from dd.bdd import preimage
from dd import autoref
from dd import bdd as _bdd
import nose.tools as nt
import networkx as nx
import networkx.algorithms.isomorphism as iso
class BDD(_BDD):
"""Disables refcount check upon shutdown.
This script tests the low-level manager, where
reference counting is not automated. For simplicity,
references are not cleared at the end of tests here.
Automated reference counting is in `dd.autoref`.
"""
def __del__(self):
pass
def test_add_var():
b = BDD()
#
# automated level selection
# first var
j = b.add_var('x')
assert len(b.vars) == 1, b.vars
assert 'x' in b.vars, b.vars
assert b.vars['x'] == 0, b.vars
assert j == 0, j
# second var
j = b.add_var('y')
assert len(b.vars) == 2, b.vars
assert 'y' in b.vars, b.vars
assert b.vars['y'] == 1, b.vars
assert j == 1, j
# third var
j = b.add_var('z')
assert len(b.vars) == 3, b.vars
assert 'z' in b.vars, b.vars
assert b.vars['z'] == 2, b.vars
assert j == 2, j
#
# explicit level selection
b = BDD()
j = b.add_var('x', level=35)
assert len(b.vars) == 1, b.vars
assert 'x' in b.vars, b.vars
assert b.vars['x'] == 35, b.vars
assert j == 35, j
j = b.add_var('y', level=5)
assert len(b.vars) == 2, b.vars
assert 'y' in b.vars, b.vars
assert b.vars['y'] == 5, b.vars
assert j == 5, j
# attempt to add var at an existing level
with nt.assert_raises(AssertionError):
b.add_var('z', level=35)
with nt.assert_raises(AssertionError):
b.add_var('z', level=5)
#
# mixing automated and
# explicit level selection
b = BDD()
b.add_var('x', level=2)
b.add_var('y')
assert len(b.vars) == 2, b.vars
assert 'x' in b.vars, b.vars
assert 'y' in b.vars, b.vars
assert b.vars['x'] == 2, b.vars
assert b.vars['y'] == 1, b.vars
with nt.assert_raises(AssertionError):
b.add_var('z')
b.add_var('z', level=0)
def test_var():
b = BDD()
with nt.assert_raises(AssertionError):
b.var('x')
j = b.add_var('x')
u = b.var('x')
assert u > 0, u
level, low, high = b.succ(u)
assert level == j, (level, j)
assert low == b.false, low
assert high == b.true, high
def test_assert_consistent():
g = two_vars_xy()
assert g.assert_consistent()
g = x_or_y()
assert g.assert_consistent()
g._succ[2] = (5, 1, 2)
with nt.assert_raises(AssertionError):
g.assert_consistent()
g = x_or_y()
g.roots.add(2)
g._succ[4] = (0, 10, 1)
with nt.assert_raises(AssertionError):
g.assert_consistent()
g = x_or_y()
g.roots.add(2)
g._succ[1] = (2, None, 1)
with nt.assert_raises(AssertionError):
g.assert_consistent()
g = x_and_y()
assert g.assert_consistent()
def test_level_to_variable():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
assert g.var_at_level(0) == 'x'
assert g.var_at_level(1) == 'y'
with nt.assert_raises(AssertionError):
g.var_at_level(10)
def test_descendants():
ordering = dict(x=0, y=1)
b = BDD(ordering)
u = b.add_expr('x /\ y')
v = b.add_expr('x \/ y')
roots = [u, v]
nodes = b.descendants(roots)
nodes_u = b.descendants([u])
nodes_v = b.descendants([v])
assert u in nodes_u, nodes_u
assert v in nodes_v, nodes_v
assert u in nodes, nodes
assert v in nodes, nodes
assert 1 in nodes_u, nodes_u
assert 1 in nodes_v, nodes_v
assert 1 in nodes, nodes
assert len(nodes_u) == 3, nodes_u
assert len(nodes_v) == 3, nodes_v
assert nodes_u != nodes_v, (nodes_u, nodes_v)
assert len(nodes) == 4, nodes
assert nodes == nodes_u.union(nodes_v), (
nodes, b._succ)
# no roots
roots = []
nodes = b.descendants(roots)
assert len(nodes) == 0, nodes
def test_is_essential():
g = two_vars_xy()
assert g.is_essential(2, 'x')
assert not g.is_essential(2, 'y')
assert g.is_essential(3, 'y')
assert not g.is_essential(3, 'x')
g = x_and_y()
assert g.is_essential(2, 'x')
assert g.is_essential(3, 'y')
assert g.is_essential(4, 'x')
assert g.is_essential(4, 'y')
assert not g.is_essential(3, 'x')
assert not g.is_essential(-1, 'x')
assert not g.is_essential(-1, 'y')
assert not g.is_essential(1, 'x')
assert not g.is_essential(1, 'y')
# variable not in the ordering
assert not g.is_essential(2, 'z')
def test_support():
g = two_vars_xy()
assert g.support(2) == {'x'}
assert g.support(3) == {'y'}
g = x_and_y()
assert g.support(4) == {'x', 'y'}
assert g.support(3) == {'y'}
g = x_or_y()
assert g.support(4) == {'x', 'y'}
assert g.support(3) == {'y'}
def test_count():
g = x_and_y()
assert g.count(4) == 1
g = x_or_y()
r = g.count(4)
assert r == 3, r
r = g.count(4, nvars=2)
assert r == 3, r
r = g.count(-4)
assert r == 1, r
r = g.count(-4, nvars=2)
assert r == 1, r
r = g.count(4, 3)
assert r == 6, r
r = g.count(-4, 3)
assert r == 2, r
with nt.assert_raises(Exception):
g.count()
r = g.count(4)
assert r == 3, r
g = _bdd.BDD()
g.add_var('x')
g.add_var('y')
u = g.add_expr('x /\ y ')
r = g.count(u)
assert r == 1, r
def test_pick_iter():
# x /\ y
g = x_and_y()
u = 4
bits = {'x', 'y'}
s = [{'x': 1, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
# care_bits == support (default)
bits = None
compare_iter_to_list_of_sets(u, g, s, bits)
#
# x \/ y
g = x_or_y()
u = 4
# support
bits = None
s = [{'x': 1, 'y': 0}, {'x': 1, 'y': 1},
{'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
# only what appears along traversal
bits = set()
s = [{'x': 1}, {'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
# bits < support
bits = {'x'}
s = [{'x': 1}, {'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
bits = {'y'}
s = [{'x': 1, 'y': 0},{'x': 1, 'y': 1},
{'x': 0, 'y': 1}]
compare_iter_to_list_of_sets(u, g, s, bits)
#
# x /\ ~ y
g = x_and_not_y()
u = -2
bits = {'x', 'y'}
s = [{'x': 1, 'y': 0}]
compare_iter_to_list_of_sets(u, g, s, bits)
# gaps in order
order = {'x': 0, 'y': 1, 'z': 2}
bdd = BDD(order)
u = bdd.add_expr('x /\ z')
(m,) = bdd.pick_iter(u)
assert m == {'x': 1, 'z': 1}, m
def compare_iter_to_list_of_sets(u, g, s, care_bits):
s = list(s)
for d in g.pick_iter(u, care_bits):
assert d in s, d
s.remove(d)
assert not s, s
def test_enumerate_minterms():
# non-empty cube
cube = dict(x=False)
bits = ['x', 'y', 'z']
r = _bdd._enumerate_minterms(cube, bits)
p = set_from_generator_of_dict(r)
q = set()
for y in (False, True):
for z in (False, True):
m = (('x', False), ('y', y), ('z', z))
q.add(m)
assert p == q, (p, q)
# empty cube
cube = dict()
bits = ['x', 'y', 'z']
r = _bdd._enumerate_minterms(cube, bits)
p = set_from_generator_of_dict(r)
q = set()
for x in (False, True):
for y in (False, True):
for z in (False, True):
m = (('x', x), ('y', y), ('z', z))
q.add(m)
assert p == q, (p, q)
# fewer bits than cube
cube = dict(x=False, y=True)
bits = set()
r = _bdd._enumerate_minterms(cube, bits)
p = set_from_generator_of_dict(r)
q = {(('x', False), ('y', True))}
assert p == q, (p, q)
def set_from_generator_of_dict(gen):
r = list(gen)
p = {tuple(sorted(m.items(), key=lambda x: x[0]))
for m in r}
return p
def test_isomorphism():
ordering = {'x': 0}
g = BDD(ordering)
g.roots.update([2, 3])
g._succ[2] = (0, -1, 1)
g._succ[3] = (0, -1, 1)
h = g.reduction()
assert set(h) == {1, 2}, set(h)
assert 0 not in h
assert h._succ[1] == (1, None, None)
assert h._succ[2] == (0, -1, 1)
assert h.roots == {2}
def test_elimination():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
g.roots.add(2)
# high == low, so node 2 is redundant
g._succ[2] = (0, 3, 3)
g._succ[3] = (1, -1, 1)
h = g.reduction()
assert set(h) == {1, 2}
def test_reduce_combined():
"""Fig.5 in 1986 Bryant TOC"""
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
g.roots.add(2)
g._succ[2] = (0, 3, 4)
g._succ[3] = (1, -1, 5)
g._succ[4] = (1, 5, 6)
g._succ[5] = (2, -1, 1)
g._succ[6] = (2, -1, 1)
h = g.reduction()
assert 1 in h
assert ordering == h.vars
r = nx.MultiDiGraph()
r.add_node(1, level=3)
r.add_node(2, level=0)
r.add_node(3, level=1)
r.add_node(4, level=2)
r.add_edge(2, 3, value=False, complement=False)
r.add_edge(2, 4, value=True, complement=False)
r.add_edge(3, 4, value=True, complement=False)
r.add_edge(3, 1, value=False, complement=True)
r.add_edge(4, 1, value=False, complement=True)
r.add_edge(4, 1, value=True, complement=False)
(u, ) = h.roots
compare(u, h, r)
# r.write('r.pdf')
# h.write('h.pdf')
def test_reduction_complemented_edges():
bdd = BDD()
bdd.add_var('x', level=0)
bdd.add_var('y', level=1)
a, b = map(bdd.level_of_var, ['x', 'y'])
assert a < b, (a, b)
# complemented edge from internal node to
# non-terminal node
expr = '~ x /\ y'
_test_reduction_complemented_edges(expr, bdd)
# complemented edge from external reference to
# non-terminal node
expr = 'x /\ ~ y'
u = bdd.add_expr(expr)
assert u < 0, u
_test_reduction_complemented_edges(expr, bdd)
def _test_reduction_complemented_edges(expr, bdd):
u = bdd.add_expr(expr)
bdd.roots.add(u)
bdd_r = bdd.reduction()
v, = bdd_r.roots
v_ = bdd_r.add_expr(expr)
assert v == v_, (v, v_)
bdd_r.assert_consistent()
bdd.roots.remove(u)
def test_find_or_add():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
# init
n = len(g)
m = g._min_free
assert n == 1, n
assert m == 2, m
# elimination rule
i = 0
v = -1
w = 1
n = len(g)
u = g.find_or_add(i, v, v)
n_ = len(g)
assert n == n_, (n, n_)
assert u == v, (u, v)
assert len(g._pred) == 1, g._pred
t = (2, None, None)
assert t in g._pred, g._pred
assert g._pred[t] == 1, g._pred
# unchanged min_free
v = 1
m = g._min_free
g.find_or_add(i, v, v)
m_ = g._min_free
assert m_ == m, (m_, m)
# add new node
g = BDD(ordering)
v = -1
w = 1
n = len(g)
m = g._min_free
assert n == 1, n
u = g.find_or_add(i, v, w)
n_ = len(g)
m_ = g._min_free
assert u != v, (u, v)
assert n_ == n + 1, (n, n_)
assert m_ == m + 1, (m, m_)
assert g._succ[u] == (i, -1, 1)
assert (i, v, w) in g._pred
assert abs(u) in g._ref
assert g._ref[abs(u)] == 0
# terminal node `v`: 2 refs + 1 ref by manager
assert g._ref[abs(v)] == 3, g._ref
# independent increase of reference counters
v = u
w = w
refv = g._ref[abs(v)]
refw = g._ref[w]
u = g.find_or_add(i, v, w)
refv_ = g._ref[abs(v)]
refw_ = g._ref[w]
assert refv + 1 == refv_, (refv, refv_)
assert refw + 1 == refw_, (refw, refw_)
# add existing
n = len(g)
m = g._min_free
refv = g._ref[abs(v)]
refw = g._ref[w]
r = g.find_or_add(i, v, w)
n_ = len(g)
m_ = g._min_free
refv_ = g._ref[abs(v)]
refw_ = g._ref[w]
assert n == n_, (n, n_)
assert m == m_, (m, m_)
assert u == r, u
assert refv == refv_, (refv, refv_)
assert refw == refw_, (refw, refw_)
# only non-terminals can be added
with nt.assert_raises(AssertionError):
g.find_or_add(2, -1, 1)
# low and high must already exist
with nt.assert_raises(AssertionError):
g.find_or_add(0, 3, 4)
# canonicity of complemented edges
# v < 0, w > 0
g = BDD(ordering)
i = 0
v = -1
w = 1
u = g.find_or_add(i, v, w)
assert u > 0, u
# v > 0, w < 0
v = 1
w = -1
u = g.find_or_add(i, v, w)
assert u < 0, u
assert abs(u) in g._succ, u
_, v, w = g._succ[abs(u)]
assert v < 0, v
assert w > 0, w
# v < 0, w < 0
v = -1
w = -2
u = g.find_or_add(i, v, w)
assert u < 0, u
_, v, w = g._succ[abs(u)]
assert v > 0, v
assert w > 0, w
def test_next_free_int():
g = BDD()
# contiguous
g._succ = {1, 2, 3}
start = 1
n = g._next_free_int(start)
_assert_smaller_are_nodes(start, g)
assert n == 4, n
start = 3
n = g._next_free_int(start)
_assert_smaller_are_nodes(start, g)
assert n == 4, n
# with blanks
g._succ = {1, 3}
start = 1
n = g._next_free_int(start)
_assert_smaller_are_nodes(start, g)
assert n == 2, n
n = g._next_free_int(start=3)
assert n == 4, n
# full
g._succ = {1, 2, 3}
g.max_nodes = 3
with nt.assert_raises(Exception):
g._next_free_int(start=1)
def _assert_smaller_are_nodes(start, bdd):
for i in range(1, start + 1):
assert i in bdd, i
def test_collect_garbage():
# all nodes are garbage
g = BDD({'x': 0, 'y': 1})
u = g.add_expr('x /\ y')
n = len(g)
assert n == 4, n
uref = g._ref[abs(u)]
assert uref == 0, uref
_, v, w = g._succ[abs(u)]
vref = g._ref[abs(v)]
wref = g._ref[w]
# terminal node `v`: 6 refs + 1 ref by manager
assert vref == 6, vref
assert wref == 1, wref
g.collect_garbage()
n = len(g)
assert n == 1, n
assert u not in g, g._succ
assert w not in g, g._succ
# some nodes not garbage
# projection of x is garbage
g = BDD({'x': 0, 'y': 1})
u = g.add_expr('x /\ y')
n = len(g)
assert n == 4, n
g._ref[abs(u)] += 1
uref = g._ref[abs(u)]
assert uref == 1, uref
g.collect_garbage()
n = len(g)
assert n == 3, n
def test_top_cofactor():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
x = ordering['x']
y = ordering['y']
u = g.find_or_add(y, -1, 1)
assert g._top_cofactor(u, x) == (u, u)
assert g._top_cofactor(u, y) == (-1, 1)
u = g.find_or_add(x, -1, 1)
assert g._top_cofactor(u, x) == (-1, 1)
assert g._top_cofactor(-u, x) == (1, -1)
def test_ite():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
# x
ix = ordering['x']
x = g.find_or_add(ix, -1, 1)
h = ref_var(ix)
compare(x, g, h)
# y
iy = ordering['y']
y = g.find_or_add(iy, -1, 1)
h = ref_var(iy)
compare(y, g, h)
# x and y
u = g.ite(x, y, -1)
h = ref_x_and_y()
compare(u, g, h)
# x or y
u = g.ite(x, 1, y)
h = ref_x_or_y()
compare(u, g, h)
# negation
assert g.ite(x, -1, 1) == -x, g._succ
assert g.ite(-x, -1, 1) == x, g._succ
def test_add_expr():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
# x
ix = ordering['x']
u = g.add_expr('x')
h = ref_var(ix)
compare(u, g, h)
# x and y
u = g.add_expr('x /\ y')
h = ref_x_and_y()
compare(u, g, h)
def test_compose():
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
# x /\ (x \/ z)
a = g.add_expr('x /\ y')
b = g.add_expr('x \/ z')
c = g.let({'y': b}, a)
d = g.add_expr('x /\ (x \/ z)')
assert c == d, (c, d)
# (y \/ z) /\ x
ordering = {'x': 0, 'y': 1, 'z': 2, 'w': 3}
g = BDD(ordering)
a = g.add_expr('(x /\ y) \/ z')
b = g.add_expr('(y \/ z) /\ x')
c = g.let({'z': b}, a)
assert c == b, (c, b)
# long expr
ordering = {'x': 0, 'y': 1, 'z': 2, 'w': 3}
g = BDD(ordering)
a = g.add_expr('(x /\ y) \/ (~ z \/ (w /\ y /\ x))')
b = g.add_expr('(y \/ ~ z) /\ x')
c = g.let({'y': b}, a)
d = g.add_expr(
'(x /\ ((y \/ ~ z) /\ x)) \/ '
' (~ z \/ (w /\ ((y \/ ~ z) /\ x) /\ x))')
assert c == d, (c, d)
# complemented edges
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
f = g.add_expr('x <=> y')
var = 'y'
new_level = 0
var_node = g.find_or_add(new_level, -1, 1)
u = g.let({var: var_node}, f)
assert u == 1, g.to_expr(u)
def test_cofactor():
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
# u not in g
with nt.assert_raises(AssertionError):
g.let({'x': False, 'y': True, 'z': False}, 5)
# x /\ y
e = g.add_expr('x /\ y')
x = g.add_expr('x')
assert g.let({'x': False}, x) == -1
assert g.let({'x': True}, x) == 1
assert g.let({'x': False}, -x) == 1
assert g.let({'x': True}, -x) == -1
y = g.add_expr('y')
assert g.let({'x': True}, e) == y
assert g.let({'x': False}, e) == -1
assert g.let({'y': True}, e) == x
assert g.let({'y': False}, e) == -1
assert g.let({'x': False}, -e) == 1
assert g.let({'x': True}, -e) == -y
assert g.let({'y': False}, -e) == 1
assert g.let({'y': True}, -e) == -x
def test_swap():
# x, y
g = BDD({'x': 0, 'y': 1})
x = g.add_expr('x')
y = g.add_expr('y')
g.incref(x)
g.incref(y)
n = len(g)
assert n == 3, n
nold, n = g.swap('x', 'y')
assert n == 3, n
assert nold == n, nold
assert g.vars == {'y': 0, 'x': 1}, g.vars
assert g.assert_consistent()
# functions remain invariant
x_ = g.add_expr('x')
y_ = g.add_expr('y')
assert x == x_, (x, x_, g._succ)
assert y == y_, (y, y_, g._succ)
# external reference counts remain unchanged
assert g._ref[abs(x)] == 1
assert g._ref[abs(y)] == 1
# x /\ y
g = BDD({'x': 0, 'y': 1})
u = g.add_expr('x /\ y')
g.incref(u)
nold, n = g.swap('x', 'y')
assert nold == n, (nold, n)
assert g.vars == {'y': 0, 'x': 1}, g.vars
u_ = g.add_expr('x /\ y')
assert u == u_, (u, u_)
assert g.assert_consistent()
# reference counts unchanged
assert g._ref[abs(u)] == 1
# x /\ ~ y
# tests handling of complement edges
e = 'x /\ ~ y'
g = x_and_not_y()
u = g.add_expr(e)
g.incref(u)
g.collect_garbage()
n = len(g)
assert n == 3, n
nold, n = g.swap('x', 'y')
assert n == 3, n
assert nold == n, nold
assert g.vars == {'x': 1, 'y': 0}
assert g.assert_consistent()
u_ = g.add_expr(e)
# function u must have remained unaffected
assert u_ == u, (u, u_, g._succ)
# invert swap of:
# x /\ ~ y
nold, n = g.swap('x', 'y')
assert n == 3, n
assert nold == n, nold
assert g.vars == {'x': 0, 'y': 1}
assert g.assert_consistent()
u_ = g.add_expr(e)
assert u_ == u, (u, u_, g._succ)
# Figs. 6.24, 6.25 Baier 2008
g = BDD({'z1': 0, 'y1': 1, 'z2': 2,
'y2': 3, 'z3': 4, 'y3': 5})
u = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
g.incref(u)
n = len(g)
assert n == 16, n
g.collect_garbage()
n = len(g)
assert n == 7, n
# sift to inefficient order
g.swap('y1', 'z2') # z1, z2, y1, y2, z3, y3
g.swap('y2', 'z3') # z1, z2, y1, z3, y2, y3
g.swap('y1', 'z3') # z1, z2, z3, y1, y2, y3
n = len(g)
assert n == 15, n
assert g.assert_consistent()
new_ordering = {
'z1': 0, 'z2': 1, 'z3': 2,
'y1': 3, 'y2': 4, 'y3': 5}
assert g.vars == new_ordering, g.vars
u_ = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
assert u_ == u, (u, u_, g._succ)
# g.dump('g.pdf')
def test_sifting():
# Figs. 6.24, 6.25 Baier 2008
g = BDD({'z1': 0, 'z2': 1, 'z3': 2,
'y1': 3, 'y2': 4, 'y3': 5})
u = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
g.incref(u)
g.collect_garbage()
n = len(g)
assert n == 15, n
_bdd.reorder(g)
n_ = len(g)
assert n > n_, (n, n_)
u_ = g.add_expr('(z1 /\ y1) \/ (z2 /\ y2) \/ (z3 /\ y3)')
g.incref(u)
g.collect_garbage()
g.assert_consistent()
assert u == u_, (u, u_)
def test_request_reordering():
ctx = Dummy()
# reordering off
n = ctx._last_len
assert n is None, n
_bdd._request_reordering(ctx)
# reordering on
ctx._last_len = 1
ctx.length = 3 # >= 2 = 2 * _last_len
# large growth
with nt.assert_raises(_bdd._NeedsReordering):
_bdd._request_reordering(ctx)
ctx._last_len = 2
ctx.length = 3 # < 4 = 2 * _last_len
# small growth
_bdd._request_reordering(ctx)
def test_reordering_context():
ctx = Dummy()
# top context
ctx.assert_(False)
with _bdd._ReorderingContext(ctx):
ctx.assert_(True)
raise _bdd._NeedsReordering()
ctx.assert_(False)
# nested context
ctx._reordering_context = True
with nt.assert_raises(_bdd._NeedsReordering):
with _bdd._ReorderingContext(ctx):
ctx.assert_(True)
raise _bdd._NeedsReordering()
ctx.assert_(True)
# other exception
ctx._reordering_context = False
with nt.assert_raises(AssertionError):
with _bdd._ReorderingContext(ctx):
ctx.assert_(True)
raise AssertionError()
ctx.assert_(False)
ctx._reordering_context = True
with nt.assert_raises(Exception):
with _bdd._ReorderingContext(ctx):
raise Exception()
ctx.assert_(True)
class Dummy(object):
"""To test state machine for nesting context."""
def __init__(self):
self._reordering_context = False
self._last_len = None
self.length = 1
def __len__(self):
return self.length
def assert_(self, value):
c = self._reordering_context
assert c is value, c
def test_dynamic_reordering():
b = TrackReorderings()
[b.add_var(var) for var in ['x', 'y', 'z', 'a', 'b', 'c', 'e']]
# add expr with reordering off
assert not b.reordering_is_on()
assert b.n_swaps == 0, b.n_swaps
u = b.add_expr('x /\ y /\ z')
assert b.n_swaps == 0, b.n_swaps
b.incref(u)
n = len(b)
assert n == 7, n
# add expr with reordering on
b._last_len = 6
assert b.reordering_is_on()
v = b.add_expr('a /\ b')
assert b.reordering_is_on()
assert b.n_swaps == 0, b.n_swaps
b.incref(v)
n = len(b)
assert n == 10, n
# add an expr that triggers reordering
assert b.reordering_is_on()
w = b.add_expr('z \/ (~ a /\ x /\ ~ y)')
assert b.reordering_is_on()
n_swaps = b.n_swaps
assert n_swaps > 0, n_swaps
b.incref(w)
assert u in b, (w, b._succ)
assert v in b, (v, b._succ)
assert w in b, (w, b._succ)
# add another expr that triggers reordering
old_n_swaps = n_swaps
assert b.reordering_is_on()
r = b.add_expr('(~ z \/ (c /\ b)) /\ e /\ (a /\ (~x \/ y))')
b.add_expr('(e \/ ~ a) /\ x /\ (b \/ ~ y)')
n_swaps = b.n_swaps
assert n_swaps > old_n_swaps, (n_swaps, old_n_swaps)
assert b.reordering_is_on()
class TrackReorderings(BDD):
"""To record invocations of reordering."""
def __init__(self, *arg, **kw):
self.n_swaps = 0
super(TrackReorderings, self).__init__(*arg, **kw)
def swap(self, *arg, **kw):
self.n_swaps += 1
return super(TrackReorderings, self).swap(*arg, **kw)
def reordering_is_on(self):
d = self.configure()
r = d['reordering']
return r is True
def test_undeclare_vars():
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
# empty arg `vrs`
u = bdd.add_expr('x /\ y /\ w')
rm_vars = bdd.undeclare_vars()
rm_vars_ = {'z'}
assert rm_vars == rm_vars_, (rm_vars, rm_vars_)
bdd_vars_ = dict(x=0, y=1, w=2)
assert bdd.vars == bdd_vars_, bdd.vars
assert bdd.assert_consistent()
# nonempty `vrs` with all empty levels
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
u = bdd.add_expr('y /\ w')
rm_vars = bdd.undeclare_vars('x', 'z')
rm_vars_ = {'x', 'z'}
assert rm_vars == rm_vars_, (rm_vars, rm_vars_)
bdd_vars_ = dict(y=0, w=1)
assert bdd.vars == bdd_vars_, bdd.vars
assert bdd.assert_consistent()
# nonempty `vrs` without all empty levels
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
u = bdd.add_expr('y /\ w')
rm_vars = bdd.undeclare_vars('z')
rm_vars_ = {'z'}
assert rm_vars == rm_vars_, (rm_vars, rm_vars_)
bdd_vars_ = dict(x=0, y=1, w=2)
assert bdd.vars == bdd_vars_, bdd.vars
assert bdd.assert_consistent()
# remove only unused variables
bdd = BDD()
bdd.declare('x', 'y', 'z', 'w')
u = bdd.add_expr('y /\ w')
with nt.assert_raises(AssertionError):
bdd.undeclare_vars('z', 'y')
def test_dump_load():
prefix = 'test_dump_load'
fname = prefix + '.p'
dvars = dict(x=0, y=1)
# dump
b = BDD(dvars)
e = 'x /\ ~ y'
u_dumped = b.add_expr(e)
b.dump(fname, [u_dumped])
b.dump(fname) # no roots
# load
b = BDD(dvars)
b.add_expr('x \/ y')
u_new = b.add_expr(e)
umap = b.load(fname)
u_loaded = umap[abs(u_dumped)]
if u_dumped < 0:
u_loaded = -u_loaded
assert u_loaded == u_new, (
u_dumped, u_loaded, u_new, umap)
assert b.assert_consistent()
def test_dump_load_manager():
prefix = 'test_dump_load_manager'
g = BDD({'x': 0, 'y': 1})
e = 'x /\ ~ y'
u = g.add_expr(e)
g.incref(u)
fname = prefix + '.p'
g._dump_manager(fname)
h = g._load_manager(fname)
assert g.assert_consistent()
u_ = h.add_expr(e)
assert u == u_, (u, u_)
# h.dump(prefix + '.pdf')
def test_quantify():
ordering = {'x': 0, 'y': 1, 'z': 2}
g = BDD(ordering)
# x /\ y
e = g.add_expr('x /\ ~ y')
x = g.add_expr('x')
not_y = g.add_expr('~ y')
assert g.quantify(e, {'x'}) == not_y
assert g.quantify(e, {'x'}, forall=True) == -1
assert g.quantify(e, {'y'}) == x
assert g.quantify(e, {'x'}, forall=True) == -1
# x \/ y \/ z
e = g.add_expr('x \/ y \/ z')
xy = g.add_expr('x \/ y')
yz = g.add_expr('y \/ z')
zx = g.add_expr('z \/ x')
assert g.quantify(e, {'x'})
assert g.quantify(e, {'y'})
assert g.quantify(e, {'z'})
assert g.quantify(e, {'z'}, forall=True) == xy
assert g.quantify(e, {'x'}, forall=True) == yz
assert g.quantify(e, {'y'}, forall=True) == zx
# complement edges
u = -x
v = g.quantify(u, {'y'}, forall=True)
assert v == -x, g.to_expr(v)
# multiple values: test recursion
e = g.add_expr('x /\ y /\ z')
x = g.add_expr('x')
r = g.quantify(e, {'y', 'z'})
assert r == x, r
def test_quantifier_syntax():
b = BDD()
[b.add_var(var) for var in ['x', 'y']]
# constants
u = b.add_expr('\E x: TRUE')
assert u == b.true, u
u = b.add_expr('\E x, y: TRUE')
assert u == b.true, u
u = b.add_expr('\E x: FALSE')
assert u == b.false, u
u = b.add_expr('\A x: TRUE')
assert u == b.true, u
u = b.add_expr('\A x: FALSE')
assert u == b.false, u
u = b.add_expr('\A x, y: FALSE')
assert u == b.false, u
# variables
u = b.add_expr('\E x: x')
assert u == b.true, u
u = b.add_expr('\A x: x')
assert u == b.false, u
u = b.add_expr('\E x, y: x')
assert u == b.true, u
u = b.add_expr('\E x, y: y')
assert u == b.true, u
u = b.add_expr('\A x: y')
assert u == b.var('y'), u
u = b.add_expr('\A x: ~ y')
u_ = b.apply('not', b.var('y'))
assert u == u_, (u, u_)
def test_rename():
ordering = {'x': 0, 'xp': 1}
g = BDD(ordering)
x = g.add_expr('x')
xp = g.add_expr('xp')
dvars = {'x': 'xp'}
xrenamed = g.let(dvars, x)
assert xrenamed == xp, xrenamed
ordering = {'x': 0, 'xp': 1,
'y': 2, 'yp': 3,
'z': 4, 'zp': 5}
g = BDD(ordering)
u = g.add_expr('x /\ y /\ ~ z')
dvars = {'x': 'xp', 'y': 'yp', 'z': 'zp'}
urenamed = g.let(dvars, u)
up = g.add_expr('xp /\ yp /\ ~ zp')
assert urenamed == up, urenamed
# assertion violations
# non-neighbors
dvars = {'x': 'yp'}
r = g.let(dvars, u)
r_ = g.add_expr('yp /\ y /\ ~ z')
assert r == r_, (r, r_)
# u not in bdd
dvars = {'x': 'xp'}
with nt.assert_raises(AssertionError):
g.let(dvars, 1000)
# y essential for u
dvars = {'x': 'y'}
v = g.let(dvars, u)
v_ = g.add_expr('y /\ ~ z')
assert v == v_, (v, v_)
# old and new vars intersect
dvars = {'x': 'x'}
v = g.let(dvars, u)
assert v == u, (v, u)
def test_rename_syntax():
b = BDD()
[b.add_var(var) for var in ['x', 'y', 'z', 'w']]
# single substitution
u = b.add_expr('\S y / x: TRUE')
assert u == b.true, u
u = b.add_expr('\S y / x: FALSE')
assert u == b.false, u
u = b.add_expr('\S y / x: x')
u_ = b.add_expr('y')
assert u == u_, (u, u_)
u = b.add_expr('\S y / x: z')
u_ = b.add_expr('z')
assert u == u_, (u, u_)
u = b.add_expr('\S y / x: x /\ z')
u_ = b.add_expr('y /\ z')
assert u == u_, (u, u_)
# multiple substitution
u = b.add_expr('\S y / x, w / z: x /\ z')
u_ = b.add_expr('y /\ w')
assert u == u_, (u, u_)
u = b.add_expr('\S y / x, w / z: z \/ ~ x')
u_ = b.add_expr('w \/ ~ y')
assert u == u_, (u, u_)
def test_image_rename_map_checks():
ordering = {'x': 0, 'xp': 1,
'y': 2, 'yp': 3,
'z': 4, 'zp': 5}
bdd = BDD(ordering)
# non-adjacent
rename = {0: 2, 3: 4}
qvars = set()
r = _bdd.image(1, 1, rename, qvars, bdd)
assert r == 1, r
r = _bdd.preimage(1, 1, rename, qvars, bdd)
assert r == 1, r
# overlapping keys and values
rename = {0: 1, 1: 2}
with nt.assert_raises(AssertionError):
_bdd.image(1, 1, rename, qvars, bdd)
with nt.assert_raises(AssertionError):
_bdd.preimage(1, 1, rename, qvars, bdd)
# may be in support after quantification ?
trans = bdd.add_expr('x => xp')
source = bdd.add_expr('x /\ y')
qvars = {0}
rename = {1: 0, 3: 2}
with nt.assert_raises(AssertionError):
_bdd.image(trans, source, rename, qvars, bdd)
# in support of `target` ?
qvars = set()
trans = bdd.add_expr('y')
target = bdd.add_expr('x /\ y')
rename = {0: 2}
r = _bdd.preimage(trans, target, rename, qvars, bdd)
assert r == bdd.var('y'), r
def test_preimage():
# exists: x, y
# forall: z
ordering = {'x': 0, 'xp': 1,
'y': 2, 'yp': 3,
'z': 4, 'zp': 5}
rename = {0: 1, 2: 3, 4: 5}
g = BDD(ordering)
f = g.add_expr('~ x')
t = g.add_expr('x <=> ~ xp')
qvars = {1, 3}
p = preimage(t, f, rename, qvars, g)
x = g.add_expr('x')
assert x == p, (x, p)
# a cycle
# (x /\ y) --> (~ x /\ y) -->
# (~ x /\ ~ y) --> (x /\ ~ y) --> wrap around
t = g.add_expr(
'((x /\ y) => (~ xp /\ yp)) /\ '
'((~ x /\ y) => (~ xp /\ ~ yp)) /\ '
'((~ x /\ ~ y) => (xp /\ ~ yp)) /\ '
'((x /\ ~ y) => (xp /\ yp))')
f = g.add_expr('x /\ y')
p = preimage(t, f, rename, qvars, g)
assert p == g.add_expr('x /\ ~ y')
f = g.add_expr('x /\ ~ y')
p = preimage(t, f, rename, qvars, g)
assert p == g.add_expr('~ x /\ ~ y')
# backward reachable set
f = g.add_expr('x /\ y')
oldf = None
while oldf != f:
p = preimage(t, f, rename, qvars, g)
oldf = f
f = g.apply('or', p, oldf)
assert f == 1
# go around once
f = g.add_expr('x /\ y')
start = f
for i in range(4):
f = preimage(t, f, rename, qvars, g)
end = f
assert start == end
# forall z exists x, y
t = g.add_expr(
'('
' ((x /\ y) => (zp /\ xp /\ ~ yp)) \/ '
' ((x /\ y) => (~ zp /\ ~ xp /\ yp))'
') /\ '
'(~ (x /\ y) => False)')
f = g.add_expr('x /\ ~ y')
ep = preimage(t, f, rename, qvars, g)
p = g.quantify(ep, {'zp'}, forall=True)
assert p == -1
f = g.add_expr('(x /\ ~ y) \/ (~ x /\ y)')
ep = preimage(t, f, rename, qvars, g)
p = g.quantify(ep, {'zp'}, forall=True)
assert p == g.add_expr('x /\ y')
def test_assert_valid_ordering():
ordering = {'x': 0, 'y': 1}
_bdd._assert_valid_ordering(ordering)
incorrect_ordering = {'x': 0, 'y': 2}
with nt.assert_raises(AssertionError):
_bdd._assert_valid_ordering(incorrect_ordering)
def test_assert_refined_ordering():
ordering = {'x': 0, 'y': 1}
new_ordering = {'z': 0, 'x': 1, 'w': 2, 'y': 3}
_bdd._assert_isomorphic_orders(ordering, new_ordering, ordering)
def test_to_pydot():
def f(x):
return str(abs(x))
# with roots
g = x_and_y()
pd = _bdd.to_pydot([4, 2], g)
r = nx.drawing.nx_pydot.from_pydot(pd)
for u in g:
assert f(u) in r, (u, r.nodes())
for u in g._succ:
i, v, w = g._succ[u]
if v is None or w is None:
assert v is None, v
assert w is None, w
continue
assert r.has_edge(f(u), f(v)), (u, v)
assert r.has_edge(f(u), f(w)), (u, w)
# no roots
pd = _bdd.to_pydot(None, g)
r = nx.drawing.nx_pydot.from_pydot(pd)
assert len(r) == 8, r.nodes() # 3 hidden nodes for levels
def test_function_wrapper():
levels = dict(x=0, y=1, z=2)
bdd = autoref.BDD(levels)
u = bdd.add_expr('x /\ y')
assert u.bdd is bdd, (repr(u.bdd), repr(bdd))
assert abs(u.node) in bdd._bdd, (u.node, bdd._bdd._succ)
# operators
x = bdd.add_expr('x')
z = bdd.add_expr('z')
v = x.implies(z)
w = u & ~ v
w_ = bdd.add_expr('(x /\ y) /\ ~ ((~ x) \/ z)')
assert w_ == w, (w_, w)
r = ~ (u | v).equiv(w)
r_ = bdd.add_expr(
'( (x /\ y) \/ ((~ x) \/ z) ) ^'
'( (x /\ y) /\ ~ ((~ x) \/ z) )')
assert r_ == r, (r_, r)
p = bdd.add_expr('y')
q = p.equiv(x)
q_ = bdd.add_expr('x <=> y')
assert q_ == q, (q_, q)
# to_expr
s = q.to_expr()
assert s == 'ite(x, y, (~ y))', s
# equality
p_ = bdd.add_expr('y')
assert p_ == p, p_
# decref and collect garbage
bdd.collect_garbage()
n = len(bdd)
assert n > 1, bdd._bdd._ref
del p
del q, q_
del r, r_
bdd.collect_garbage()
m = len(bdd)
assert m > 1, bdd._bdd._ref
assert m < n, (m, n)
del u
del v
del w, w_
del x
del z
bdd.collect_garbage()
n = len(bdd)
assert n == 2, bdd._bdd._ref
del p_
bdd.collect_garbage()
n = len(bdd)
assert n == 1, bdd._bdd._ref
# properties
bdd = autoref.BDD({'x': 0, 'y': 1, 'z': 2})
u = bdd.add_expr('x \/ ~ y')
assert u.level == 0, u.level
assert u.var == 'x', u.var
y = bdd.add_expr('~ y')
assert u.low == y, (u.low.node, y.node)
assert u.high.node == 1, u.high.node
assert u.ref == 1, u.ref
def x_or_y():
g = two_vars_xy()
u = 4
t = (0, 3, 1)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
g._min_free = u + 1
g.assert_consistent()
return g
def x_and_y():
g = two_vars_xy()
u = 4
t = (0, -1, 3)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
g._min_free = u + 1
return g
def two_vars_xy():
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
u = 2
t = (0, -1, 1)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
u = 3
t = (1, -1, 1)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[u] = 1
g._min_free = u + 1
return g
def x_and_not_y():
# remember:
# 2 = ~ (x /\ ~ y)
# -2 = x /\ ~ y
ordering = {'x': 0, 'y': 1}
g = BDD(ordering)
u = 3
v = -1
w = 1
t = (1, v, w)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[abs(v)] += 1
g._ref[abs(w)] += 1
g._ref[abs(u)] = 0
u = 2
v = 1
w = 3
t = (0, v, w)
assert_valid_succ_pred(u, t, g)
g._succ[u] = t
g._pred[t] = u
g._ref[abs(v)] += 1
g._ref[abs(w)] += 1
g._ref[abs(u)] = 0
g._min_free = 4
return g
def assert_valid_succ_pred(u, t, g):
assert u > 1, u
assert isinstance(t, tuple), t
assert len(t) == 3, t
assert t[0] >= 0, t
assert u not in g._succ, g._succ
assert t not in g._pred, g._pred
def ref_var(i):
h = nx.MultiDiGraph()
h.add_node(1, level=2)
h.add_node(2, level=i)
h.add_edge(2, 1, value=False, complement=True)
h.add_edge(2, 1, value=True, complement=False)
return h
def ref_x_and_y():
h = nx.MultiDiGraph()
h.add_node(1, level=2)
h.add_node(2, level=0)
h.add_node(3, level=1)
h.add_edge(2, 1, value=False, complement=True)
h.add_edge(2, 3, value=True, complement=False)
h.add_edge(3, 1, value=False, complement=True)
h.add_edge(3, 1, value=True, complement=False)
return h
def ref_x_or_y():
h = nx.MultiDiGraph()
h.add_node(1, level=2)
h.add_node(2, level=0)
h.add_node(3, level=1)
h.add_edge(2, 3, value=False, complement=False)
h.add_edge(2, 1, value=True, complement=False)
h.add_edge(3, 1, value=False, complement=True)
h.add_edge(3, 1, value=True, complement=False)
return h
def compare(u, bdd, h):
g = _bdd.to_nx(bdd, [u])
# nx.drawing.nx_pydot.to_pydot(g).write_pdf('g.pdf')
post = nx.descendants(g, u)
post.add(u)
r = g.subgraph(post)
# nx.drawing.nx_pydot.to_pydot(r).write_pdf('r.pdf')
# nx.drawing.nx_pydot.to_pydot(h).write_pdf('h.pdf')
gm = iso.GraphMatcher(r, h, node_match=_nm, edge_match=_em)
assert gm.is_isomorphic()
d = gm.mapping
assert d[1] == 1
def _nm(x, y):
return x['level'] == y['level']
def _em(x, y):
return (
bool(x[0]['value']) == bool(y[0]['value']) and
bool(x[0]['complement']) == bool(y[0]['complement']))
if __name__ == '__main__':
log = logging.getLogger('astutils')
log.setLevel(logging.ERROR)
log = logging.getLogger('dd.bdd')
log.setLevel(logging.INFO)
log.addHandler(logging.StreamHandler())
test_dynamic_reordering()
| 26.383552 | 68 | 0.524242 | 1,099 | 0.028787 | 0 | 0 | 0 | 0 | 0 | 0 | 6,117 | 0.160227 |
922f623f7a3d41cac0130a562f5b7e6ac382fee1 | 2,639 | py | Python | mikelint/analysers/analyser.py | mike-fam/mikelint | 4e512039d11e9bbfde18a8cadcbc4608295e663f | [
"MIT"
] | 2 | 2021-04-27T01:13:37.000Z | 2021-05-21T02:28:24.000Z | mikelint/analysers/analyser.py | mike-fam/mikelint | 4e512039d11e9bbfde18a8cadcbc4608295e663f | [
"MIT"
] | 3 | 2021-05-05T10:21:25.000Z | 2021-05-30T12:51:43.000Z | mikelint/analysers/analyser.py | mike-fam/mikelint | 4e512039d11e9bbfde18a8cadcbc4608295e663f | [
"MIT"
] | null | null | null | """
Abstract analyser
"""
from functools import wraps
from inspect import getmembers, ismethod
from typing import Callable
from ..type_hints import AnalyserResults, AnalyserHelper
from ..utils import SyntaxTree, BaseViolation, ViolationResult
def register_check(error_format: str):
"""
Registers a new checker to an analyser
Args:
error_format: error format of violation
"""
def decorator(check_method: Callable):
@wraps(check_method)
def wrapper(*args, **kwargs):
analyser = args[0]
checker_name = check_method.__name__
analyser.register_checker(checker_name,
check_method.__doc__,
error_format)
result: list[ViolationResult] = check_method(*args, **kwargs)
analyser.add_violations(checker_name, result)
return wrapper
return decorator
class Analyser:
"""Abstract base analyser"""
def __init__(self, sources: dict[str, AnalyserHelper]):
"""
Constructor
Args:
tree: syntax tree
source: list of lines from source code
"""
self._check_results: AnalyserResults = {}
self._sources = sources
def register_checker(self, name: str, description: str, error_format: str):
"""
Registers a new checker to this analyser
Args:
name: name of the checker, typically the method name
description: description of this checker
error_format: format string used to display violations
"""
self._check_results[name] = BaseViolation(description, error_format, [])
def get_results(self) -> AnalyserResults:
"""
Returns results of all checkers of this analyser
"""
return self._check_results
def add_violations(self, checker_name: str,
results: list[ViolationResult]) -> None:
"""
Adds violation results to a checker
Args:
checker_name: name of the checker
results: list of violation results
"""
self._check_results[checker_name].values.extend(results)
def get_line(self, file_name: str, line_number: int) -> str:
"""Returns line given line number"""
return self._sources[file_name].source[line_number - 1].strip()
def run(self):
"""
Runs all checkers
"""
for method_name, method in getmembers(self, predicate=ismethod):
if not method_name.startswith("check_"):
continue
method()
| 32.182927 | 80 | 0.611595 | 1,704 | 0.645699 | 0 | 0 | 434 | 0.164456 | 0 | 0 | 881 | 0.333839 |
922fa3dca598bcb019c6f728700b805a4c18143d | 2,744 | py | Python | tests/test_eos_token_seq_length.py | v0lta/tfkaldi | 4772e881cc168439723c19f69a2425588f661060 | [
"MIT"
] | 57 | 2017-01-19T15:58:46.000Z | 2021-01-12T17:57:31.000Z | tests/test_eos_token_seq_length.py | v0lta/tfkaldi | 4772e881cc168439723c19f69a2425588f661060 | [
"MIT"
] | null | null | null | tests/test_eos_token_seq_length.py | v0lta/tfkaldi | 4772e881cc168439723c19f69a2425588f661060 | [
"MIT"
] | 15 | 2017-02-25T17:44:35.000Z | 2019-09-23T14:03:18.000Z | from __future__ import absolute_import, division, print_function
import numpy as np
import tensorflow as tf
from IPython.core.debugger import Tracer; debug_here = Tracer();
batch_size = 5
max_it = tf.constant(6)
char_mat_1 = [[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.9, 0.0, 0.0]]
char_mat_2 = [[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0]]
char_mat_3 = [[0.0, 0.0, 0.0, 0.1, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0]]
char_mat_4 = [[0.0, 0.0, 0.0, 0.1, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
char_mat_5 = [[1.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
#expected output: [5, 2, 4, 5, 4]
char_lst = [char_mat_1, char_mat_2, char_mat_3,
char_mat_4, char_mat_5]
np_char_tensor = np.array(char_lst)
char_prob = tf.constant(np.array(np_char_tensor), tf.float64)
char_prob = tf.transpose(char_prob, [1, 0, 2])
print(tf.Tensor.get_shape(char_prob))
sequence_length_lst = [1, 1, 1, 1, 1]
sequence_length = tf.constant(sequence_length_lst)
done_mask = tf.cast(tf.zeros(batch_size), tf.bool)
for time in range(0, 5):
print(time)
current_date = char_prob[:, time, :]
max_vals = tf.argmax(current_date, 1)
mask = tf.equal(max_vals, tf.constant(0, tf.int64))
current_mask = tf.logical_and(mask, tf.logical_not(done_mask))
done_mask = tf.logical_or(mask, done_mask)
time_vec = tf.ones(batch_size, tf.int32)*(time+2)
sequence_length = tf.select(done_mask, sequence_length, time_vec, name=None)
not_done_no = tf.reduce_sum(tf.cast(tf.logical_not(done_mask), tf.int32))
all_eos = tf.equal(not_done_no, tf.constant(0))
stop_loop = tf.logical_or(all_eos, tf.greater(time, max_it))
keep_working = tf.logical_not(stop_loop)
sess = tf.Session()
with sess.as_default():
tf.initialize_all_variables().run()
#print(char_prob.eval())
print(max_vals.eval())
print(mask.eval())
print(done_mask.eval())
print(sequence_length.eval())
print(keep_working.eval())
| 34.3 | 80 | 0.543367 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 57 | 0.020773 |
923063bef1fb7156669c1bb0bb14d63ac5b20fc0 | 2,796 | py | Python | presentation/ham10kplots.py | iwan933/mlmi-federated-learning | e148664304dd7fbbc2cc2a6a34567533748c1720 | [
"MIT"
] | 2 | 2021-09-07T12:52:44.000Z | 2021-09-30T09:35:53.000Z | presentation/ham10kplots.py | iwan933/mlmi-federated-learning | e148664304dd7fbbc2cc2a6a34567533748c1720 | [
"MIT"
] | null | null | null | presentation/ham10kplots.py | iwan933/mlmi-federated-learning | e148664304dd7fbbc2cc2a6a34567533748c1720 | [
"MIT"
] | 1 | 2021-03-02T12:35:28.000Z | 2021-03-02T12:35:28.000Z | from typing import List, Tuple
import seaborn as sns
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
"""
Plots of tensorboard results with adjusted theming for presentation
"""
label_dict = {0: 'akiec', 1: 'bcc', 2: 'bkl', 3: 'df', 4: 'mel', 5: 'nv', 6: 'vasc'}
sns.set_context(rc={'patch.linewidth': 0.0})
bg_color = '#DAEDEF'
first_color = '#ADC9C4'
second_color = '#7D918E'
def set_plot_theme(ax):
ax.set_facecolor(bg_color)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_color(second_color)
ax.xaxis.label.set_color(second_color)
ax.yaxis.label.set_color(second_color)
ax.yaxis.grid(color=second_color, linewidth=.5, zorder=0)
ax.tick_params(axis='x', colors=second_color)
ax.tick_params(axis='y', colors=second_color, width=.5)
def plot_label_counts(label_counts):
series = pd.Series(label_counts, index=[label_dict[i] for i in range(7)])
fig, ax = plt.subplots(nrows=1, ncols=1, facecolor=bg_color)
ax.set_title('', color=second_color)
sns.barplot(x=series.index, y=series, ax=ax, ci=None, color=first_color, zorder=3)
set_plot_theme(ax)
fig.show()
def plot_confusion_matrix(confusion_matrix, title):
pct_matrix = confusion_matrix / np.sum(confusion_matrix, axis=0)
df_cm = pd.DataFrame(pct_matrix,
index=[label_dict[i] for i in range(7)],
columns=[label_dict[i] for i in range(7)])
# draw heatmap
fig, ax = plt.subplots(nrows=1, ncols=1, facecolor=bg_color)
cmap = sns.dark_palette("#E3F8FA", as_cmap=True)
sns.heatmap(df_cm, ax=ax, annot=True, fmt=".2f", cmap=cmap)
ax.set_title(title, color=second_color)
ax.spines['left'].set_color(second_color)
ax.spines['left'].set_visible(True)
ax.spines['right'].set_color(second_color)
ax.spines['right'].set_visible(True)
ax.spines['top'].set_color(second_color)
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_color(second_color)
ax.spines['bottom'].set_visible(True)
ax.xaxis.label.set_color(second_color)
ax.yaxis.label.set_color(second_color)
ax.tick_params(axis='x', colors=second_color, width=1.0)
ax.tick_params(axis='y', colors=second_color, width=.5)
fig.show()
def plot_performance_graphs(data: List[Tuple[str, str, str, pd.Series]]):
fig, ax = plt.subplots(nrows=1, ncols=1, facecolor=bg_color)
ax.set_ylim([0.0, 1.0])
set_plot_theme(ax)
for title, color, linestyle, series in data:
ax.plot(series.index, series, label=title, color=color, linestyle=linestyle)
#plt.axvline(x=8, color=second_color)
ax.legend()
fig.show()
| 36.311688 | 86 | 0.690629 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 319 | 0.114092 |
9234049232e94a3055e5a3b996213c1ab5cf8d64 | 9,937 | py | Python | mdn_base.py | Woodenonez/multimodal_motion_prediction | e1c799626a2b99780afe63b64e29042cb8043dd3 | [
"MIT"
] | 1 | 2022-03-21T03:28:19.000Z | 2022-03-21T03:28:19.000Z | mdn_base.py | Woodenonez/CASE2021_multimodal_motion_prediction | 6817333fa7b9f2553c1da70a3a82414ed54dfb42 | [
"MIT"
] | null | null | null | mdn_base.py | Woodenonez/CASE2021_multimodal_motion_prediction | 6817333fa7b9f2553c1da70a3a82414ed54dfb42 | [
"MIT"
] | null | null | null | """
A module for a mixture density network layer
(_Mixture Desity Networks_ by Bishop, 1994.)
"""
import sys
import torch
import torch.tensor as ts
import torch.nn as nn
import torch.optim as optim
from torch.distributions import Categorical
import math
# Draw distributions
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.colors import LinearSegmentedColormap
'''
Process:
Input:x -> Some model (body) -> Characteristic vector:z (feature)
-> MDN (head) -> Probabilistic vector:p (output)
'''
class MDN_Module(nn.Module):
"""
A Mixture Density Network Module
Symbols:
B - Batch size
G - Number of Gaussian components
D - Input's dimensions
F - Feature's dimensions
C - Output's dimensions (Gaussian distribution's dimensions)
Arguments:
dim_fea (int): the feature's dimensions
dim_prob (int): the output's dimenssions
num_gaus (int): the number of Gaussians per output dimension
Input:
minibatch (BxF)
Output:
(alp, mu, sigma) (BxG, BxGxC, BxGxC)
alp - (alpha) Component's weight
mu - Mean value
sigma - Standard deviation
"""
def __init__(self, dim_fea, dim_prob, num_gaus):
super(MDN_Module, self).__init__()
self.dim_fea = dim_fea
self.dim_prob = dim_prob
self.num_gaus = num_gaus
self.layer_alp = nn.Sequential(
nn.Linear(dim_fea, num_gaus),
nn.Softmax(dim=1) # If 1, go along each row
)
self.layer_mu = nn.Linear(dim_fea, dim_prob*num_gaus)
self.layer_sigma = nn.Sequential(
nn.Linear(dim_fea, dim_prob*num_gaus),
ReExp_Layer()
)
def forward(self, batch):
alp = self.layer_alp(batch)
mu = self.layer_mu(batch)
mu = mu.view(-1, self.num_gaus, self.dim_prob)
sigma = self.layer_sigma(batch)
sigma = sigma.view(-1, self.num_gaus, self.dim_prob)
return alp, mu, sigma
class ReExp_Layer(nn.Module):
"""
A modified exponential layer.
Only the negative part of the exponential retains.
The positive part is linear: y=x+1.
"""
def __init__(self):
super().__init__()
def forward(self, x):
l = nn.ELU() # ELU: max(0,x)+min(0,α∗(exp(x)−1))
return torch.add(l(x), 1) # assure no negative sigma produces!!!
class classic_MDN_Module(nn.Module):
def __init__(self, dim_fea, dim_prob, num_gaus):
super(classic_MDN_Module, self).__init__()
self.dim_fea = dim_fea
self.dim_prob = dim_prob
self.num_gaus = num_gaus
self.layer_alp = nn.Sequential(
nn.Linear(dim_fea, num_gaus),
nn.Softmax(dim=1) # If 1, go along each row
)
self.layer_mu = nn.Linear(dim_fea, dim_prob*num_gaus)
self.layer_sigma = nn.Sequential(
nn.Linear(dim_fea, dim_prob*num_gaus)
)
def forward(self, batch):
alp = self.layer_alp(batch)
mu = self.layer_mu(batch)
mu = mu.view(-1, self.num_gaus, self.dim_prob)
sigma = torch.exp(self.layer_sigma(batch))
sigma = sigma.view(-1, self.num_gaus, self.dim_prob)
return alp, mu, sigma
def cal_GauProb(mu, sigma, x):
"""
Return the probability of "data" given MoG parameters "mu" and "sigma".
Arguments:
mu (BxGxC) - The means of the Gaussians.
sigma (BxGxC) - The standard deviation of the Gaussians.
x (BxC) - A batch of data points.
Return:
probabilities (BxG): The probability of each point in the probability
of the distribution in the corresponding mu/sigma index.
"""
x = x.unsqueeze(1).expand_as(mu) # BxC -> Bx1xC -> BxGxC
prob = torch.rsqrt(torch.tensor(2*math.pi)) * torch.exp(-((x - mu) / sigma)**2 / 2) / sigma
return torch.prod(prob, dim=2) # overall probability for all output's dimensions in each component, BxG
def cal_multiGauProb(alp, mu, sigma, x):
"""
Arguments:
alp (BxG) - (alpha) Component's weight
"""
prob = alp * cal_GauProb(mu, sigma, x) # BxG
prob = torch.sum(prob, dim=1) # Bx1
# overall prob for each batch (sum is for all compos)
return prob
def loss_NLL(alp, mu, sigma, data):
"""
Calculates the error, given the MoG parameters and the data.
The loss is the negative log likelihood of the data given the MoG parameters.
"""
nll = -torch.log(cal_multiGauProb(alp, mu, sigma, data))
return torch.mean(nll)
def loss_MaDist(alp, mu, sigma, data): # Mahalanobis distance
'''
mu (GxC) - The means of the Gaussians.
sigma (GxC) - The standard deviation of the Gaussians.
'''
md = []
alp = alp/sum(alp) #normalization
for i in range(mu.shape[0]): # do through every component
mu0 = (data-mu[i,:]).unsqueeze(0) # (x-mu)
S_inv = ts([[1/sigma[i,0],0],[0,1/sigma[i,1]]]) # S^-1 inversed covariance matrix
md0 = torch.sqrt( S_inv[0,0]*mu0[0,0]**2 + S_inv[1,1]*mu0[0,1]**2 )
md.append(md0)
return ts(md), sum(ts(md)*alp)
def loss_EMD(): pass
def sample(alp, mu, sigma):
"""
Draw samples from a MoG.
Return one sample for each batch
"""
categorical = Categorical(alp) # aka. generalized Bernoulli
try:
alps = list(categorical.sample().data) # take a sample of alpha for each batch
except:
raise Exception('Ooooops! Model collapse!')
sample = sigma.new_empty(sigma.size(0), sigma.size(2)).normal_() # sample of (0,1) normal distribution
for i, idx in enumerate(alps):
sample[i] = sample[i].mul(sigma[i,idx]).add(mu[i,idx])
return sample
def take_mainCompo(alp, mu, sigma, main=3):
alp = alp[0,:]
mu = mu[0,:,:]
sigma = sigma[0,:,:]
main_alp = alp[:main] # placeholder
main_mu = mu[:main,:] # placeholder
main_sigma = sigma[:main,:] # placeholder
_, indices = torch.sort(alp) # ascending order
for i in range(1,main+1):
idx = indices[-i].item() # largest to smallest
main_alp[i-1] = alp[idx]
main_mu[i-1,:] = mu[idx,:]
main_sigma[i-1,:] = sigma[idx,:]
return main_alp.unsqueeze(0), main_mu.unsqueeze(0), main_sigma.unsqueeze(0) # insert the "batch" dimension
def take_goodCompo(alp, mu, sigma, thre=0.1):
alp = alp[0,:]
mu = mu[0,:,:]
sigma = sigma[0,:,:]
thre = thre*max(alp)
idx = (alp>thre)
good_alp = alp[idx]
good_mu = mu[idx,:]
good_sigma = sigma[idx,:]
return good_alp.unsqueeze(0), good_mu.unsqueeze(0), good_sigma.unsqueeze(0) # insert the "batch" dimension
def sigma_limit(mu, sigma, nsigma=3):
# nsigma: 1 -> 0.6827 2 -> 0.9545 3 -> 0.9974
x_scope = [(mu-nsigma*sigma)[0,:,0], (mu+nsigma*sigma)[0,:,0]]
y_scope = [(mu-nsigma*sigma)[0,:,1], (mu+nsigma*sigma)[0,:,1]]
x_min = torch.min(x_scope[0])
x_max = torch.max(x_scope[1])
y_min = torch.min(y_scope[0])
y_max = torch.max(y_scope[1])
if x_min != torch.min(abs(x_scope[0])):
x_min = -torch.min(abs(x_scope[0]))
if x_max != torch.max(abs(x_scope[1])):
x_max = -torch.max(abs(x_scope[1]))
if y_min != torch.min(abs(y_scope[0])):
y_min = -torch.min(abs(y_scope[0]))
if y_max != torch.max(abs(y_scope[1])):
y_max = -torch.max(abs(y_scope[1]))
return [x_min, x_max], [y_min, y_max]
def cal_multiGauProbDistr(xx, yy, alp, mu, sigma):
xy = np.concatenate((xx.reshape(-1,1), yy.reshape(-1,1)), axis=1).astype(np.float32)
p = np.array([])
for i in range(xy.shape[0]):
p = np.append( p, cal_multiGauProb(alp, mu, sigma, x=ts(xy[i,:][np.newaxis,:])).detach().numpy() )
p[np.where(p<max(p)/10)] = 0
return p.reshape(xx.shape)
def draw_probDistribution(ax, alp, mu, sigma, main=3, nsigma=3, step=0.5, colorbar=False, toplot=True):
'''
Arguments:
ax - Axis
alp (BxG) - (alpha) Component's weight.
mu (BxGxC) - The means of the Gaussians.
sigma (BxGxC) - The standard deviation of the Gaussians.
'''
if main is not None:
alp, mu, sigma = take_mainCompo(alp, mu, sigma, main=main)
# ================= Register Colormap ================START
ncolors = 256
color_array = plt.get_cmap('gist_rainbow')(range(ncolors)) # get colormap
color_array[:,-1] = np.linspace(0,1,ncolors) # change alpha values
color_array[:,-1][:25] = 0
map_object = LinearSegmentedColormap.from_list(name='rainbow_alpha',colors=color_array) # create a colormap object
plt.register_cmap(cmap=map_object) # register this new colormap with matplotlib
# ================= Register Colormap ==================END
xlim, ylim = sigma_limit(mu, sigma, nsigma=nsigma)
x = np.arange(xlim[0].detach().numpy(), xlim[1].detach().numpy(), step=step)
y = np.arange(ylim[0].detach().numpy(), ylim[1].detach().numpy(), step=step)
xx, yy = np.meshgrid(x, y)
pp = cal_multiGauProbDistr(xx, yy, alp, mu, sigma)
if toplot:
cntr = ax.contourf(xx, yy, pp, cmap="rainbow_alpha")
if colorbar:
plt.colorbar(cntr, ax=ax)
return xx,yy,pp
def draw_GauEllipse(ax, mu, sigma, fc='b', nsigma=3, extend=False, label=None):
'''
mu (GxC) - The means of the Gaussians.
sigma (GxC) - The standard deviation of the Gaussians.
'''
for i in range(mu.shape[0]):
if i != 0:
label=None
if extend:
patch = patches.Ellipse(mu[i,:], nsigma*sigma[i,0]+8, nsigma*sigma[i,1]+8, fc=fc, label=label)
ax.add_patch(patch)
else:
patch = patches.Ellipse(mu[i,:], nsigma*sigma[i,0], nsigma*sigma[i,1], fc=fc, label=label)
ax.add_patch(patch) | 36.003623 | 118 | 0.607427 | 2,749 | 0.276504 | 0 | 0 | 0 | 0 | 0 | 0 | 3,249 | 0.326795 |
9236f54ad779bb50859cbb8fd78dee86260f72c6 | 29,649 | py | Python | djautotask/tests/test_commands.py | KerkhoffTechnologies/django-autotask | 458ff0bf65e3ca85fb954f907f05c4c614904afc | [
"MIT"
] | 4 | 2019-04-18T17:12:07.000Z | 2021-12-30T21:42:10.000Z | djautotask/tests/test_commands.py | KerkhoffTechnologies/django-autotask | 458ff0bf65e3ca85fb954f907f05c4c614904afc | [
"MIT"
] | 32 | 2018-05-30T20:31:22.000Z | 2022-02-17T21:36:50.000Z | djautotask/tests/test_commands.py | KerkhoffTechnologies/django-autotask | 458ff0bf65e3ca85fb954f907f05c4c614904afc | [
"MIT"
] | 5 | 2018-05-25T23:33:45.000Z | 2022-01-04T22:01:46.000Z | import io
from atws.wrapper import Wrapper
from django.core.management import call_command
from django.test import TestCase
from djautotask.tests import fixtures, mocks, fixture_utils
from djautotask import models
def sync_summary(class_name, created_count, updated_count=0):
return '{} Sync Summary - Created: {}, Updated: {}, Skipped: 0'.format(
class_name, created_count, updated_count
)
def full_sync_summary(class_name, deleted_count, updated_count=0):
return '{} Sync Summary - Created: 0, Updated: {}, Skipped: 0, ' \
'Deleted: {}'.format(class_name, updated_count, deleted_count)
def slug_to_title(slug):
return slug.title().replace('_', ' ')
def run_sync_command(full_option=False, command_name=None):
out = io.StringIO()
args = ['atsync']
if command_name:
args.append(command_name)
if full_option:
args.append('--full')
call_command(*args, stdout=out)
return out
class AbstractBaseSyncRestTest(object):
def _test_sync(self, mock_call, return_value, at_object,
full_option=False):
mock_call(return_value)
out = io.StringIO()
args = ['atsync', at_object]
if full_option:
args.append('--full')
call_command(*args, stdout=out)
return out
def _title_for_at_object(self, at_object):
return at_object.title().replace('_', ' ')
def test_sync(self):
out = self._test_sync(*self.args)
obj_title = self._title_for_at_object(self.args[-1])
self.assertIn(obj_title, out.getvalue().strip())
def test_full_sync(self):
self.test_sync()
mock_call, return_value, at_object = self.args
args = [
mock_call,
{
"items": [],
"pageDetails": fixtures.API_PAGE_DETAILS
},
at_object
]
out = self._test_sync(*args, full_option=True)
obj_label = self._title_for_at_object(at_object)
msg_tmpl = '{} Sync Summary - Created: 0, Updated: 0, Skipped: 0, ' \
'Deleted: {}'
msg = msg_tmpl.format(obj_label, len(return_value.get('items')))
self.assertEqual(msg, out.getvalue().strip())
class PicklistSyncTest(AbstractBaseSyncRestTest):
def test_full_sync(self):
self.test_sync()
mock_call, return_value, at_object = self.args
args = [
mock_call,
{
"fields": []
},
at_object
]
out = self._test_sync(*args, full_option=True)
obj_label = self._title_for_at_object(at_object)
msg_tmpl = '{} Sync Summary - Created: 0, Updated: 0, Skipped: 0, ' \
'Deleted: {}'
msg = msg_tmpl.format(
obj_label, len(return_value.get('fields')[0].get('picklistValues'))
)
self.assertEqual(msg, out.getvalue().strip())
class TestSyncContactCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_contacts_call,
fixtures.API_CONTACT,
'contact',
)
def setUp(self):
super().setUp()
fixture_utils.init_contacts()
class AbstractBaseSyncTest(object):
def setUp(self):
mocks.init_api_connection(Wrapper)
mocks.init_api_rest_connection()
def _title_for_at_object(self, at_object):
return at_object.title().replace('_', ' ')
def get_api_mock(self):
return mocks.api_query_call
def get_return_value(self, at_object, fixture_list):
return fixture_utils.generate_objects(
at_object.title().replace('_', ''), fixture_list)
def init_sync_command(self, fixture_list, at_object, full_option=False):
return_value = self.get_return_value(at_object, fixture_list)
api_call = self.get_api_mock()
api_call(return_value)
output = run_sync_command(full_option, at_object)
return output
def _test_sync(self):
out = self.init_sync_command(*self.args)
obj_title = self._title_for_at_object(self.args[-1])
self.assertIn(obj_title, out.getvalue().strip())
def test_full_sync(self):
out = self.init_sync_command(*self.args)
fixture_list, at_object = self.args
args = [
[],
at_object,
]
out = self.init_sync_command(*args, full_option=True)
obj_label = self._title_for_at_object(at_object)
msg_tmpl = '{} Sync Summary - Created: 0, Updated: 0, Skipped: 0, ' \
'Deleted: {}'
value_count = len(fixture_list)
msg = msg_tmpl.format(obj_label, value_count)
self.assertEqual(msg, out.getvalue().strip())
class AbstractPicklistSyncCommandTest(AbstractBaseSyncTest):
def get_return_value(self, at_object, fixture_list):
field_info = fixture_utils.generate_picklist_objects(
self.field_name, fixture_list)
return field_info
def get_api_mock(self):
return mocks.api_picklist_call
class TestSyncTicketCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_tickets_call,
fixtures.API_TICKET,
'ticket',
)
def setUp(self):
super().setUp()
fixture_utils.init_tickets()
class TestSyncStatusCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_STATUS_FIELD,
'status',
)
def setUp(self):
super().setUp()
fixture_utils.init_statuses()
class TestSyncPriorityCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_PRIORITY_FIELD,
'priority',
)
def setUp(self):
super().setUp()
fixture_utils.init_priorities()
class TestSyncQueueCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_QUEUE_FIELD,
'queue',
)
def setUp(self):
super().setUp()
fixture_utils.init_queues()
class TestSyncProjectStatusCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_project_picklist_call,
fixtures.API_PROJECT_STATUS_FIELD,
'project_status',
)
def setUp(self):
super().setUp()
fixture_utils.init_project_statuses()
class TestSyncProjectTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_project_picklist_call,
fixtures.API_PROJECT_TYPE_FIELD,
'project_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_project_types()
class TestSyncSourceCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_SOURCE_FIELD,
'source',
)
def setUp(self):
super().setUp()
fixture_utils.init_sources()
class TestSyncIssueTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_ISSUE_TYPE_FIELD,
'issue_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_issue_types()
class TestSyncSubIssueTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_SUB_ISSUE_TYPE_FIELD,
'sub_issue_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_issue_types()
fixture_utils.init_sub_issue_types()
class TestSyncTicketTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_picklist_call,
fixtures.API_TICKET_TYPE_FIELD,
'ticket_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_ticket_types()
class TestSyncAccountTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_account_types_call,
fixtures.API_ACCOUNT_TYPE_FIELD,
'account_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_account_types()
class TestSyncServiceCallStatusCommand(PicklistSyncTest,
TestCase):
args = (
mocks.service_api_get_service_call_statuses_call,
fixtures.API_SERVICE_CALL_STATUS_FIELD,
'service_call_status',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
class TestSyncDisplayColorCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_ticket_category_picklist_call,
fixtures.API_DISPLAY_COLOR_FIELD,
'display_color',
)
def setUp(self):
super().setUp()
fixture_utils.init_display_colors()
class TestSyncLicenseTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_license_types_call,
fixtures.API_LICENSE_TYPE_FIELD,
'license_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_license_types()
class TestSyncTaskTypeLinkCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_task_type_links_call,
fixtures.API_TASK_TYPE_LINK_FIELD,
'task_type_link',
)
def setUp(self):
super().setUp()
fixture_utils.init_task_type_links()
class TestSyncUseTypeCommand(PicklistSyncTest, TestCase):
args = (
mocks.service_api_get_use_types_call,
fixtures.API_USE_TYPE_FIELD,
'use_type',
)
def setUp(self):
super().setUp()
fixture_utils.init_use_types()
class TestSyncTicketCategoryCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_ticket_categories_call,
fixtures.API_TICKET_CATEGORY,
'ticket_category',
)
def setUp(self):
super().setUp()
fixture_utils.init_ticket_categories()
class TestSyncResourceCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_RESOURCE_LIST,
'resource',
)
class TestSyncTicketSecondaryResourceCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_SECONDARY_RESOURCE_LIST,
'ticket_secondary_resource',
)
class TestSyncAccountCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_ACCOUNT_LIST,
'account',
)
class TestSyncAccountLocationCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_account_physical_locations_call,
fixtures.API_ACCOUNT_PHYSICAL_LOCATION,
'account_physical_location',
)
def setUp(self):
super().setUp()
fixture_utils.init_accounts()
fixture_utils.init_account_physical_locations()
class TestSyncProjectCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_projects_call,
fixtures.API_PROJECT,
'project',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
class TestSyncPhaseCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_PHASE_LIST,
'phase',
)
class TestSyncTaskCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_tasks_call,
fixtures.API_TASK,
'task',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
fixture_utils.init_tasks()
class TestSyncTaskSecondaryResourceCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TASK_SECONDARY_RESOURCE_LIST,
'task_secondary_resource',
)
class TestSyncTicketNoteCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TICKET_NOTE_LIST,
'ticket_note',
)
def setUp(self):
super().setUp()
fixture_utils.init_tickets()
fixture_utils.init_ticket_notes()
class TestSyncTaskNoteCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TASK_NOTE_LIST,
'task_note',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
fixture_utils.init_tasks()
fixture_utils.init_task_notes()
class TestSyncTimeEntryCommand(AbstractBaseSyncTest, TestCase):
args = (
fixtures.API_TIME_ENTRY_LIST,
'time_entry',
)
def setUp(self):
super().setUp()
fixture_utils.init_tickets()
class TestSyncAllocationCodeCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_allocation_codes_call,
fixtures.API_ALLOCATION_CODE,
'allocation_code',
)
def setUp(self):
super().setUp()
fixture_utils.init_allocation_codes()
class TestSyncRoleCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_roles_call,
fixtures.API_ROLE,
'role',
)
def setUp(self):
super().setUp()
fixture_utils.init_roles()
class TestSyncDepartmentCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_departments_call,
fixtures.API_DEPARTMENT,
'department',
)
def setUp(self):
super().setUp()
fixture_utils.init_departments()
class TestSyncResourceServiceDeskRoleCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_resource_service_desk_roles_call,
fixtures.API_RESOURCE_SERVICE_DESK_ROLE,
'resource_service_desk_role',
)
def setUp(self):
super().setUp()
fixture_utils.init_roles()
fixture_utils.init_resources()
fixture_utils.init_resource_service_desk_roles()
class TestSyncResourceRoleDepartmentCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_resource_role_departments_call,
fixtures.API_RESOURCE_ROLE_DEPARTMENT,
'resource_role_department',
)
def setUp(self):
super().setUp()
fixture_utils.init_departments()
fixture_utils.init_roles()
fixture_utils.init_resources()
fixture_utils.init_resource_role_departments()
class TestSyncContractCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_contracts_call,
fixtures.API_CONTRACT,
'contract',
)
def setUp(self):
super().setUp()
fixture_utils.init_contracts()
class TestSyncServiceCallCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_service_calls_call,
fixtures.API_SERVICE_CALL,
'service_call',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
class TestSyncServiceCallTicketCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_service_call_tickets_call,
fixtures.API_SERVICE_CALL_TICKET,
'service_call_ticket',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_tickets()
class TestSyncServiceCallTaskCommand(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_service_call_tasks_call,
fixtures.API_SERVICE_CALL_TASK,
'service_call_task',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_projects()
fixture_utils.init_tasks()
class TestSyncServiceCallTicketResourceCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_service_call_ticket_resources_call,
fixtures.API_SERVICE_CALL_TICKET_RESOURCE,
'service_call_ticket_resource',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_tickets()
fixture_utils.init_service_call_tickets()
class TestSyncServiceCallTaskResourceCommand(AbstractBaseSyncRestTest,
TestCase):
args = (
mocks.service_api_get_service_call_task_resources_call,
fixtures.API_SERVICE_CALL_TASK_RESOURCE,
'service_call_task_resource',
)
def setUp(self):
super().setUp()
fixture_utils.init_service_call_statuses()
fixture_utils.init_resources()
fixture_utils.init_account_types()
fixture_utils.init_accounts()
fixture_utils.init_service_calls()
fixture_utils.init_statuses()
fixture_utils.init_projects()
fixture_utils.init_tasks()
fixture_utils.init_service_call_tasks()
class TestSyncTaskPredecessor(AbstractBaseSyncRestTest, TestCase):
args = (
mocks.service_api_get_task_predecessors_call,
fixtures.API_TASK_PREDECESSOR,
'task_predecessor',
)
def setUp(self):
super().setUp()
fixture_utils.init_projects()
fixture_utils.init_tasks()
fixture_utils.init_task_predecessors()
class TestSyncAllCommand(TestCase):
def setUp(self):
super().setUp()
mocks.init_api_connection(Wrapper)
mocks.create_mock_call(
'djautotask.sync.TicketNoteSynchronizer._get_query_conditions',
None
)
mocks.create_mock_call(
'djautotask.sync.TaskNoteSynchronizer._get_query_conditions',
None
)
fixture_utils.mock_udfs()
self._call_service_api()
# Mock API calls to return values based on what entity
# is being requested
mocks.get_field_info_api_calls(
fixture_utils.manage_sync_picklist_return_data
)
mocks.wrapper_query_api_calls(
fixture_utils.manage_full_sync_return_data
)
sync_test_cases = [
TestSyncLicenseTypeCommand,
TestSyncTaskTypeLinkCommand,
TestSyncUseTypeCommand,
TestSyncAccountTypeCommand,
TestSyncRoleCommand,
TestSyncDepartmentCommand,
TestSyncTicketCommand,
TestSyncTaskCommand,
TestSyncStatusCommand,
TestSyncResourceCommand,
TestSyncPriorityCommand,
TestSyncQueueCommand,
TestSyncAccountCommand,
TestSyncProjectCommand,
TestSyncProjectStatusCommand,
TestSyncProjectTypeCommand,
TestSyncTicketCategoryCommand,
TestSyncSourceCommand,
TestSyncIssueTypeCommand,
TestSyncSubIssueTypeCommand,
TestSyncTicketTypeCommand,
TestSyncDisplayColorCommand,
TestSyncTaskSecondaryResourceCommand,
TestSyncPhaseCommand,
TestSyncTicketNoteCommand,
TestSyncTaskNoteCommand,
TestSyncTimeEntryCommand,
TestSyncAllocationCodeCommand,
TestSyncResourceRoleDepartmentCommand,
TestSyncResourceServiceDeskRoleCommand,
TestSyncContractCommand,
TestSyncServiceCallStatusCommand,
TestSyncServiceCallCommand,
TestSyncServiceCallTicketCommand,
TestSyncServiceCallTaskCommand,
TestSyncServiceCallTicketResourceCommand,
TestSyncServiceCallTaskResourceCommand,
TestSyncAccountLocationCommand,
TestSyncTaskPredecessor,
TestSyncContactCommand,
]
self.test_args = []
for test_case in sync_test_cases:
# for REST API
if len(test_case.args) == 3:
self.test_args.append(test_case.args)
# for SOAP API
else:
new_test_case = [None, *test_case.args]
self.test_args.append(new_test_case)
def test_partial_sync(self):
"""
Test the command to run a sync of all objects without
the --full argument.
"""
output = run_sync_command()
for mock_call, fixture, at_object in self.test_args:
if mock_call:
if 'fields' in fixture:
fixture_len = \
len(fixture.get('fields')[0].get('picklistValues'))
else:
fixture_len = len(fixture.get('items'))
else:
fixture_len = len(fixture)
summary = sync_summary(slug_to_title(at_object), fixture_len)
self.assertIn(summary, output.getvalue().strip())
self.assertEqual(
models.Ticket.objects.all().count(),
len(fixtures.API_TICKET['items'])
)
def test_full_sync(self):
"""Test the command to run a full sync of all objects."""
at_object_map = {
'account_type': models.AccountType,
'role': models.Role,
'department': models.Department,
'status': models.Status,
'priority': models.Priority,
'queue': models.Queue,
'source': models.Source,
'issue_type': models.IssueType,
'display_color': models.DisplayColor,
'ticket': models.Ticket,
'resource': models.Resource,
'ticket_secondary_resource': models.TicketSecondaryResource,
'account': models.Account,
'account_physical_location': models.AccountPhysicalLocation,
'project': models.Project,
'project_status': models.ProjectStatus,
'project_type': models.ProjectType,
'ticket_category': models.TicketCategory,
'sub_issue_type': models.SubIssueType,
'ticket_type': models.TicketType,
'license_type': models.LicenseType,
'task': models.Task,
'task_secondary_resource': models.TaskSecondaryResource,
'phase': models.Phase,
'ticket_note': models.TicketNote,
'task_note': models.TaskNote,
'time_entry': models.TimeEntry,
'task_type_link': models.TaskTypeLink,
'use_type': models.UseType,
'allocation_code': models.AllocationCode,
'resource_role_department': models.ResourceRoleDepartment,
'resource_service_desk_role': models.ResourceServiceDeskRole,
'contract': models.Contract,
'service_call_status': models.ServiceCallStatus,
'service_call': models.ServiceCall,
'service_call_ticket': models.ServiceCallTicket,
'service_call_task': models.ServiceCallTask,
'service_call_ticket_resource': models.ServiceCallTicketResource,
'service_call_task_resource': models.ServiceCallTaskResource,
'task_predecessor': models.TaskPredecessor,
'contact': models.Contact,
}
run_sync_command()
pre_full_sync_counts = {}
mocks.wrapper_query_api_calls()
mocks.get_field_info_api_calls()
_, _patch = mocks.build_batch_query()
self._call_empty_service_api()
for key, model_class in at_object_map.items():
pre_full_sync_counts[key] = model_class.objects.all().count()
output = run_sync_command(full_option=True)
_patch.stop()
# Verify the rest of sync classes summaries.
for mock_call, fixture, at_object in self.test_args:
if at_object in (
'resource_role_department',
'resource_service_desk_role',
'service_call',
'service_call_ticket',
'service_call_task',
'service_call_ticket_resource',
'service_call_task_resource',
'task_predecessor',
'task'
):
# Assert that there were objects to get deleted, then change
# to zero to verify the output formats correctly.
# We are just testing the command, there are sync tests to
# verify that the synchronizers work correctly
self.assertGreater(pre_full_sync_counts[at_object], 0)
pre_full_sync_counts[at_object] = 0
summary = full_sync_summary(
slug_to_title(at_object),
pre_full_sync_counts[at_object]
)
self.assertIn(summary, output.getvalue().strip())
def _call_service_api(self):
mocks.service_api_get_roles_call(fixtures.API_ROLE)
mocks.service_api_get_departments_call(fixtures.API_DEPARTMENT)
mocks.service_api_get_resource_service_desk_roles_call(
fixtures.API_RESOURCE_SERVICE_DESK_ROLE)
mocks.service_api_get_resource_role_departments_call(
fixtures.API_RESOURCE_ROLE_DEPARTMENT)
mocks.service_api_get_license_types_call(
fixtures.API_LICENSE_TYPE_FIELD)
mocks.service_api_get_use_types_call(fixtures.API_USE_TYPE_FIELD)
mocks.service_api_get_task_type_links_call(
fixtures.API_TASK_TYPE_LINK_FIELD)
mocks.service_api_get_account_types_call(
fixtures.API_ACCOUNT_TYPE_FIELD)
mocks.service_api_get_ticket_category_picklist_call(
fixtures.API_DISPLAY_COLOR_FIELD)
mocks.service_api_get_ticket_picklist_call(
fixtures.API_TICKET_PICKLIST_FIELD)
mocks.service_api_get_project_picklist_call(
fixtures.API_PROJECT_PICKLIST_FIELD)
mocks.service_api_get_service_call_statuses_call(
fixtures.API_SERVICE_CALL_STATUS_FIELD)
mocks.service_api_get_contacts_call(fixtures.API_CONTACT)
mocks.service_api_get_contracts_call(fixtures.API_CONTRACT)
mocks.service_api_get_allocation_codes_call(
fixtures.API_ALLOCATION_CODE)
mocks.service_api_get_account_physical_locations_call(
fixtures.API_ACCOUNT_PHYSICAL_LOCATION)
mocks.service_api_get_ticket_categories_call(
fixtures.API_TICKET_CATEGORY)
mocks.service_api_get_tickets_call(fixtures.API_TICKET)
mocks.service_api_get_tasks_call(fixtures.API_TASK)
mocks.service_api_get_projects_call(fixtures.API_PROJECT)
mocks.service_api_get_service_calls_call(fixtures.API_SERVICE_CALL)
mocks.service_api_get_service_call_tickets_call(
fixtures.API_SERVICE_CALL_TICKET)
mocks.service_api_get_service_call_ticket_resources_call(
fixtures.API_SERVICE_CALL_TICKET_RESOURCE)
mocks.service_api_get_service_call_tasks_call(
fixtures.API_SERVICE_CALL_TASK)
mocks.service_api_get_service_call_task_resources_call(
fixtures.API_SERVICE_CALL_TASK_RESOURCE)
mocks.service_api_get_task_predecessors_call(
fixtures.API_TASK_PREDECESSOR)
def _call_empty_service_api(self):
mocks.service_api_get_contacts_call(fixtures.API_EMPTY)
mocks.service_api_get_contracts_call(fixtures.API_EMPTY)
mocks.service_api_get_allocation_codes_call(fixtures.API_EMPTY)
mocks.service_api_get_account_physical_locations_call(
fixtures.API_EMPTY)
mocks.service_api_get_tickets_call(fixtures.API_EMPTY)
mocks.service_api_get_tasks_call(fixtures.API_EMPTY)
mocks.service_api_get_projects_call(fixtures.API_EMPTY)
mocks.service_api_get_ticket_categories_call(fixtures.API_EMPTY)
mocks.service_api_get_task_predecessors_call(fixtures.API_EMPTY)
mocks.service_api_get_roles_call(fixtures.API_EMPTY)
mocks.service_api_get_departments_call(fixtures.API_EMPTY)
mocks.service_api_get_resource_service_desk_roles_call(
fixtures.API_EMPTY)
mocks.service_api_get_resource_role_departments_call(
fixtures.API_EMPTY)
mocks.service_api_get_service_calls_call(fixtures.API_EMPTY)
mocks.service_api_get_service_call_tickets_call(fixtures.API_EMPTY)
mocks.service_api_get_service_call_ticket_resources_call(
fixtures.API_EMPTY)
mocks.service_api_get_service_call_tasks_call(fixtures.API_EMPTY)
mocks.service_api_get_service_call_task_resources_call(
fixtures.API_EMPTY)
mocks.service_api_get_ticket_category_picklist_call({"fields": []})
mocks.service_api_get_ticket_picklist_call({"fields": []})
mocks.service_api_get_project_picklist_call({"fields": []})
mocks.service_api_get_license_types_call({"fields": []})
mocks.service_api_get_use_types_call({"fields": []})
mocks.service_api_get_task_type_links_call({"fields": []})
mocks.service_api_get_account_types_call({"fields": []})
mocks.service_api_get_service_call_statuses_call({"fields": []})
| 31.441145 | 79 | 0.659516 | 28,554 | 0.963068 | 0 | 0 | 0 | 0 | 0 | 0 | 2,617 | 0.088266 |
9237845df9705971fc68a591a3a63ea569aa3bde | 26,319 | py | Python | src/utils/adbtool.py | wangzhi2689/data_analysis | 3410856d1df1a9cd95660e28e6ed47fd0102f6aa | [
"Apache-2.0"
] | 1 | 2022-01-01T09:36:54.000Z | 2022-01-01T09:36:54.000Z | src/utils/adbtool.py | wangzhi2689/data_analysis | 3410856d1df1a9cd95660e28e6ed47fd0102f6aa | [
"Apache-2.0"
] | 1 | 2022-02-26T13:07:05.000Z | 2022-02-26T13:07:05.000Z | src/utils/adbtool.py | wangzhi2689/data_analysis | 3410856d1df1a9cd95660e28e6ed47fd0102f6aa | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/evn python
# -*- coding:utf-8 -*-
# FileName adbtools.py
# Author: HeyNiu
# Created Time: 2016/9/19
"""
adb 工具类
"""
import os
import platform
import re
import time
#import utils.timetools
class AdbTools(object):
def __init__(self, device_id=''):
self.__system = platform.system()
self.__find = ''
self.__command = ''
self.__device_id = device_id
self.__get_find()
self.__check_adb()
self.__connection_devices()
def __get_find(self):
"""
判断系统类型,windows使用findstr,linux使用grep
:return:
"""
# if self.__system is "Windows":
#self.__find = "findstr"
#else:
#self.__find = "grep"
def __check_adb(self):
"""
检查adb
判断是否设置环境变量ANDROID_HOME
:return:
"""
if "ANDROID_HOME" in os.environ:
if self.__system == "Windows":
path = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb.exe")
if os.path.exists(path):
self.__command = path
else:
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." % os.environ["ANDROID_HOME"])
else:
path = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", "adb")
if os.path.exists(path):
self.__command = path
else:
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." % os.environ["ANDROID_HOME"])
else:
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." % os.environ["ANDROID_HOME"])
def __connection_devices(self):
"""
连接指定设备,单个设备可不传device_id
:return:
"""
if self.__device_id == "":
return
self.__device_id = "-s %s" % self.__device_id
def adb(self, args):
"""
执行adb命令
:param args:参数
:return:
"""
cmd = "%s %s %s" % (self.__command, self.__device_id, str(args))
# print(cmd)
return os.popen(cmd)
def shell(self, args):
"""
执行adb shell命令
:param args:参数
:return:
"""
cmd = "%s %s shell %s" % (self.__command, self.__device_id, str(args))
# print(cmd)
return os.popen(cmd)
def mkdir(self, path):
"""
创建目录
:param path: 路径
:return:
"""
return self.shell('mkdir %s' % path)
def get_devices(self):
"""
获取设备列表
:return:
"""
l = self.adb('devices').readlines()
return (i.split()[0] for i in l if 'devices' not in i and len(i) > 5)
def get_current_application(self):
"""
获取当前运行的应用信息
:return:
"""
return self.shell('dumpsys window w | %s \/ | %s name=' % (self.__find, self.__find)).read()
def get_current_package(self):
"""
获取当前运行app包名
:return:
"""
reg = re.compile(r'name=(.+?)/')
return re.findall(reg, self.get_current_application())[0]
def get_current_activity(self):
"""
获取当前运行activity
:return: package/activity
"""
reg = re.compile(r'name=(.+?)\)')
return re.findall(reg, self.get_current_application())[0]
'''def __get_process(self, package_name):
"""
获取进程信息
:param package_name:
:return:
"""
if self.__system is "Windows":
pid_command = self.shell("ps | %s %s$" % (self.__find, package_name)).read()
else:
pid_command = self.shell("ps | %s -w %s" % (self.__find, package_name)).read()
return pid_command'''
def process_exists(self, package_name):
"""
返回进程是否存在
:param package_name:
:return:
"""
process = self.__get_process(package_name)
return package_name in process
def get_pid(self, package_name):
"""
获取pid
:return:
"""
pid_command = self.__get_process(package_name)
if pid_command == '':
print("The process doesn't exist.")
return pid_command
req = re.compile(r"\d+")
result = str(pid_command).split()
result.remove(result[0])
return req.findall(" ".join(result))[0]
def get_uid(self, pid):
"""
获取uid
:param pid:
:return:
"""
result = self.shell("cat /proc/%s/status" % pid).readlines()
for i in result:
if 'uid' in i.lower():
return i.split()[1]
def get_flow_data_tcp(self, uid):
"""
获取应用tcp流量
:return:(接收, 发送)
"""
tcp_rcv = self.shell("cat proc/uid_stat/%s/tcp_rcv" % uid).read().split()[0]
tcp_snd = self.shell("cat proc/uid_stat/%s/tcp_snd" % uid).read().split()[0]
return tcp_rcv, tcp_snd
def get_flow_data_all(self, uid):
"""
获取应用流量全部数据
包含该应用多个进程的所有数据 tcp udp等
(rx_bytes, tx_bytes) >> (接收, 发送)
:param uid:
:return:list(dict)
"""
all_data = []
d = {}
data = self.shell("cat /proc/net/xt_qtaguid/stats | %s %s" % (self.__find, uid)).readlines()
for i in data:
if not i.startswith('\n'):
item = i.strip().split()
d['idx'] = item[0]
d['iface'] = item[1]
d['acct_tag_hex'] = item[2]
d['uid_tag_int'] = item[3]
d['cnt_set'] = item[4]
d['rx_bytes'] = item[5]
d['rx_packets'] = item[6]
d['tx_bytes'] = item[7]
d['tx_packets'] = item[8]
d['rx_tcp_bytes'] = item[9]
d['rx_tcp_packets'] = item[10]
d['rx_udp_bytes'] = item[11]
d['rx_udp_packets'] = item[12]
d['rx_other_bytes'] = item[13]
d['rx_other_packets'] = item[14]
d['tx_tcp_bytes'] = item[15]
d['tx_tcp_packets'] = item[16]
d['tx_udp_bytes'] = item[17]
d['tx_udp_packets'] = item[18]
d['tx_other_bytes'] = item[19]
d['tx_other_packets'] = item[20]
all_data.append(d)
d = {}
return all_data
@staticmethod
def dump_apk(path):
"""
dump apk文件
:param path: apk路径
:return:
"""
# 检查build-tools是否添加到环境变量中
# 需要用到里面的aapt命令
l = os.environ['PATH'].split(';')
build_tools = False
for i in l:
if 'build-tools' in i:
build_tools = True
if not build_tools:
raise EnvironmentError("ANDROID_HOME BUILD-TOOLS COMMAND NOT FOUND.\nPlease set the environment variable.")
return os.popen('aapt dump badging %s' % (path,))
@staticmethod
def dump_xml(path, filename):
"""
dump apk xml文件
:return:
"""
return os.popen('aapt dump xmlstrings %s %s' % (path, filename))
def uiautomator_dump(self):
"""
获取屏幕uiautomator xml文件
:return:
"""
return self.shell('uiautomator dump').read().split()[-1]
def pull(self, source, target):
"""
从手机端拉取文件到电脑端
:return:
"""
self.adb('pull %s %s' % (source, target))
def push(self, source, target):
"""
从电脑端推送文件到手机端
:param source:
:param target:
:return:
"""
self.adb('push %s %s' % (source, target))
def remove(self, path):
"""
从手机端删除文件
:return:
"""
self.shell('rm %s' % (path,))
def clear_app_data(self, package):
"""
清理应用数据
:return:
"""
self.shell('pm clear %s' % (package,))
def install(self, path):
"""
安装apk文件
:return:
"""
# adb install 安装错误常见列表
errors = {'INSTALL_FAILED_ALREADY_EXISTS': '程序已经存在',
'INSTALL_DEVICES_NOT_FOUND': '找不到设备',
'INSTALL_FAILED_DEVICE_OFFLINE': '设备离线',
'INSTALL_FAILED_INVALID_APK': '无效的APK',
'INSTALL_FAILED_INVALID_URI': '无效的链接',
'INSTALL_FAILED_INSUFFICIENT_STORAGE': '没有足够的存储空间',
'INSTALL_FAILED_DUPLICATE_PACKAGE': '已存在同名程序',
'INSTALL_FAILED_NO_SHARED_USER': '要求的共享用户不存在',
'INSTALL_FAILED_UPDATE_INCOMPATIBLE': '版本不能共存',
'INSTALL_FAILED_SHARED_USER_INCOMPATIBLE': '需求的共享用户签名错误',
'INSTALL_FAILED_MISSING_SHARED_LIBRARY': '需求的共享库已丢失',
'INSTALL_FAILED_REPLACE_COULDNT_DELETE': '需求的共享库无效',
'INSTALL_FAILED_DEXOPT': 'dex优化验证失败',
'INSTALL_FAILED_DEVICE_NOSPACE': '手机存储空间不足导致apk拷贝失败',
'INSTALL_FAILED_DEVICE_COPY_FAILED': '文件拷贝失败',
'INSTALL_FAILED_OLDER_SDK': '系统版本过旧',
'INSTALL_FAILED_CONFLICTING_PROVIDER': '存在同名的内容提供者',
'INSTALL_FAILED_NEWER_SDK': '系统版本过新',
'INSTALL_FAILED_TEST_ONLY': '调用者不被允许测试的测试程序',
'INSTALL_FAILED_CPU_ABI_INCOMPATIBLE': '包含的本机代码不兼容',
'CPU_ABIINSTALL_FAILED_MISSING_FEATURE': '使用了一个无效的特性',
'INSTALL_FAILED_CONTAINER_ERROR': 'SD卡访问失败',
'INSTALL_FAILED_INVALID_INSTALL_LOCATION': '无效的安装路径',
'INSTALL_FAILED_MEDIA_UNAVAILABLE': 'SD卡不存在',
'INSTALL_FAILED_INTERNAL_ERROR': '系统问题导致安装失败',
'INSTALL_PARSE_FAILED_NO_CERTIFICATES': '文件未通过认证 >> 设置开启未知来源',
'INSTALL_PARSE_FAILED_INCONSISTENT_CERTIFICATES': '文件认证不一致 >> 先卸载原来的再安装',
'INSTALL_FAILED_INVALID_ZIP_FILE': '非法的zip文件 >> 先卸载原来的再安装',
'INSTALL_CANCELED_BY_USER': '需要用户确认才可进行安装',
'INSTALL_FAILED_VERIFICATION_FAILURE': '验证失败 >> 尝试重启手机',
'DEFAULT': '未知错误'
}
print('Installing...')
l = self.adb('install -r %s' % (path,)).read()
if 'Success' in l:
print('Install Success')
if 'Failure' in l:
reg = re.compile('\\[(.+?)\\]')
key = re.findall(reg, l)[0]
try:
print('Install Failure >> %s' % errors[key])
except KeyError:
print('Install Failure >> %s' % key)
return l
def uninstall(self, package):
"""
卸载apk
:param package: 包名
:return:
"""
print('Uninstalling...')
l = self.adb('uninstall %s' % (package,)).read()
print(l)
def screenshot(self, target_path=''):
"""
手机截图
:param target_path: 目标路径
:return:
"""
format_time = utils.timetools.timestamp('%Y%m%d%H%M%S')
self.shell('screencap -p /sdcard/%s.png' % (format_time,))
time.sleep(1)
if target_path == '':
self.pull('/sdcard/%s.png' % (format_time,), os.path.expanduser('~'))
else:
self.pull('/sdcard/%s.png' % (format_time,), target_path)
self.remove('/sdcard/%s.png' % (format_time,))
def get_cache_logcat(self):
"""
导出缓存日志
:return:
"""
return self.adb('logcat -v time -d')
def get_crash_logcat(self):
"""
导出崩溃日志
:return:
"""
return self.adb('logcat -v time -d | %s AndroidRuntime' % (self.__find,))
def clear_cache_logcat(self):
"""
清理缓存区日志
:return:
"""
self.adb('logcat -c')
def get_device_time(self):
"""
获取设备时间
:return:
"""
return self.shell('date').read().strip()
def ls(self, command):
"""
shell ls命令
:return:
"""
return self.shell('ls %s' % (command,)).readlines()
def file_exists(self, target):
"""
判断文件在目标路径是否存在
:return:
"""
l = self.ls(target)
for i in l:
if i.strip() == target:
return True
return False
def is_install(self, target_app):
"""
判断目标app在设备上是否已安装
:param target_app: 目标app包名
:return: bool
"""
return target_app in self.shell('pm list packages %s' % (target_app,)).read()
def get_device_model(self):
"""
获取设备型号
:return:
"""
return self.shell('getprop ro.product.model').read().strip()
def get_device_id(self):
"""
获取设备id
:return:
"""
return self.adb('get-serialno').read().strip()
def get_device_android_version(self):
"""
获取设备Android版本
:return:
"""
return self.shell('getprop ro.build.version.release').read().strip()
def get_device_sdk_version(self):
"""
获取设备SDK版本
:return:
"""
return self.shell('getprop ro.build.version.sdk').read().strip()
def get_device_mac_address(self):
"""
获取设备MAC地址
:return:
"""
return self.shell('cat /sys/class/net/wlan0/address').read().strip()
def get_device_ip_address(self):
"""
获取设备IP地址
pass: 适用WIFI 蜂窝数据
:return:
"""
if not self.get_wifi_state() and not self.get_data_state():
return
l = self.shell('ip addr | %s global' % self.__find).read()
reg = re.compile('\d+\.\d+\.\d+\.\d+')
return re.findall(reg, l)[0]
def get_device_imei(self):
"""
获取设备IMEI
:return:
"""
sdk = self.get_device_sdk_version()
# Android 5.0以下方法
if int(sdk) < 21:
l = self.shell('dumpsys iphonesubinfo').read()
reg = re.compile('[0-9]{15}')
return re.findall(reg, l)[0]
elif self.root():
l = self.shell('service call iphonesubinfo 1').read()
print(l)
print(re.findall(re.compile("'.+?'"), l))
imei = ''
for i in re.findall(re.compile("'.+?'"), l):
imei += i.replace('.', '').replace("'", '').replace(' ', '')
return imei
else:
print('The device not root.')
return ''
def check_sim_card(self):
"""
检查设备SIM卡
:return:
"""
return len(self.shell('getprop | %s gsm.operator.alpha]' % self.__find).read().strip().split()[-1]) > 2
def get_device_operators(self):
"""
获取运营商
:return:
"""
return self.shell('getprop | %s gsm.operator.alpha]' % self.__find).read().strip().split()[-1]
def get_device_state(self):
"""
获取设备状态
:return:
"""
return self.adb('get-state').read().strip()
def get_display_state(self):
"""
获取屏幕状态
:return: 亮屏/灭屏
"""
l = self.shell('dumpsys power').readlines()
for i in l:
if 'mScreenOn=' in i:
return i.split()[-1] == 'mScreenOn=true'
if 'Display Power' in i:
return 'ON' in i.split('=')[-1].upper()
def get_screen_normal_size(self):
"""
获取设备屏幕分辨率 >> 标配
:return:
"""
return self.shell('wm size').read().strip().split()[-1].split('x')
def get_screen_reality_size(self):
"""
获取设备屏幕分辨率 >> 实际分辨率
:return:
"""
x = 0
y = 0
l = self.shell(r'getevent -p | %s -e "0"' % self.__find).readlines()
for n in l:
if len(n.split()) > 0:
if n.split()[0] == '0035':
x = int(n.split()[7].split(',')[0])
elif n.split()[0] == '0036':
y = int(n.split()[7].split(',')[0])
return x, y
def get_device_interior_sdcard(self):
"""
获取内部SD卡空间
:return: (path,total,used,free,block)
"""
return self.shell('df | %s \/mnt\/shell\/emulated' % self.__find).read().strip().split()
def get_device_external_sdcard(self):
"""
获取外部SD卡空间
:return: (path,total,used,free,block)
"""
return self.shell('df | %s \/storage' % self.__find).read().strip().split()
def __fill_rom(self, path, stream, count):
"""
填充数据
:param path: 填充地址
:param stream: 填充流大小
:param count: 填充次数
:return:
"""
self.shell('dd if=/dev/zero of=%s bs=%s count=%s' % (path, stream, count)).read().strip()
def fill_interior_sdcard(self, filename, size):
"""
填充内置SD卡
:param filename: 文件名
:param size: 填充大小,单位byte
:return:
"""
if size > 10485760: # 10m
self.__fill_rom('sdcard/%s' % filename, 10485760, size / 10485760)
else:
self.__fill_rom('sdcard/%s' % filename, size, 1)
def fill_external_sdcard(self, filename, size):
"""
填充外置SD卡
:param filename: 文件名
:param size: 填充大小,单位byte
:return:
"""
path = self.get_device_external_sdcard()[0]
if size > 10485760: # 10m
self.__fill_rom('%s/%s' % (path, filename), 10485760, size / 10485760)
else:
self.__fill_rom('%s/%s' % (path, filename), size, 1)
def kill_process(self, pid):
"""
杀死进程
pass: 一般需要权限不推荐使用
:return:
"""
return self.shell('kill %s' % pid).read().strip()
def quit_app(self, package):
"""
退出应用
:return:
"""
return self.shell('am force-stop %s' % package).read().strip()
def reboot(self):
"""
重启设备
:return:
"""
self.adb('reboot')
def recovery(self):
"""
重启设备并进入recovery模式
:return:
"""
self.adb('reboot recovery')
def fastboot(self):
"""
重启设备并进入fastboot模式
:return:
"""
self.adb('reboot bootloader')
def root(self):
"""
获取root状态
:return:
"""
return 'not found' not in self.shell('su -c ls -l /data/').read().strip()
def wifi(self, power):
"""
开启/关闭wifi
pass: 需要root权限
:return:
"""
if not self.root():
print('The device not root.')
return
if power:
self.shell('su -c svc wifi enable').read().strip()
else:
self.shell('su -c svc wifi disable').read().strip()
def data(self, power):
"""
开启/关闭蜂窝数据
pass: 需要root权限
:return:
"""
if not self.root():
print('The device not root.')
return
if power:
self.shell('su -c svc data enable').read().strip()
else:
self.shell('su -c svc data disable').read().strip()
def get_wifi_state(self):
"""
获取WiFi连接状态
:return:
"""
return 'enabled' in self.shell('dumpsys wifi | %s ^Wi-Fi' % self.__find).read().strip()
def get_data_state(self):
"""
获取移动网络连接状态
:return:
"""
return '2' in self.shell('dumpsys telephony.registry | %s mDataConnectionState' % self.__find).read().strip()
def get_network_state(self):
"""
设备是否连上互联网
:return:
"""
return 'unknown host' not in self.shell('ping -w 1 www.baidu.com').read().strip()
def get_wifi_password_list(self):
"""
获取WIFI密码列表
:return:
"""
if not self.root():
print('The device not root.')
return []
l = re.findall(re.compile('ssid=".+?"\s{3}psk=".+?"'), self.shell('su -c cat /data/misc/wifi/*.conf').read())
return [re.findall(re.compile('".+?"'), i) for i in l]
def call(self, number):
"""
拨打电话
:param number:
:return:
"""
self.shell('am start -a android.intent.action.CALL -d tel:%s' % number)
def open_url(self, url):
"""
打开网页
:return:
"""
self.shell('am start -a android.intent.action.VIEW -d %s' % url)
def start_application(self, component):
"""
启动一个应用
e.g: com.android.settings/com.android.settings.Settings
"""
self.shell("am start -n %s" % component)
def send_keyevent(self, keycode):
"""
发送一个按键事件
https://developer.android.com/reference/android/view/KeyEvent.html
:return:
"""
self.shell('input keyevent %s' % keycode)
def rotation_screen(self, param):
"""
旋转屏幕
:param param: 0 >> 纵向,禁止自动旋转; 1 >> 自动旋转
:return:
"""
self.shell('/system/bin/content insert --uri content://settings/system --bind '
'name:s:accelerometer_rotation --bind value:i:%s' % param)
def instrument(self, command):
"""
启动instrument app
:param command: 命令
:return:
"""
return self.shell('am instrument %s' % command).read()
def export_apk(self, package, target_path='', timeout=5000):
"""
从设备导出应用
:param timeout: 超时时间
:param target_path: 导出后apk存储路径
:param package: 包名
:return:
"""
num = 0
if target_path == '':
self.adb('pull /data/app/%s-1/base.apk %s' % (package, os.path.expanduser('~')))
while 1:
num += 1
if num <= timeout:
if os.path.exists(os.path.join(os.path.expanduser('~'), 'base.apk')):
os.rename(os.path.join(os.path.expanduser('~'), 'base.apk'),
os.path.join(os.path.expanduser('~'), '%s.apk' % package))
else:
self.adb('pull /data/app/%s-1/base.apk %s' % (package, target_path))
while 1:
num += 1
if num <= timeout:
if os.path.exists(os.path.join(os.path.expanduser('~'), 'base.apk')):
os.rename(os.path.join(os.path.expanduser('~'), 'base.apk'),
os.path.join(os.path.expanduser('~'), '%s.apk' % package))
class KeyCode:
KEYCODE_CALL = 5 # 拨号键
KEYCODE_ENDCALL = 6 # 挂机键
KEYCODE_HOME = 3 # Home键
KEYCODE_MENU = 82 # 菜单键
KEYCODE_BACK = 4 # 返回键
KEYCODE_SEARCH = 84 # 搜索键
KEYCODE_CAMERA = 27 # 拍照键
KEYCODE_FOCUS = 80 # 对焦键
KEYCODE_POWER = 26 # 电源键
KEYCODE_NOTIFICATION = 83 # 通知键
KEYCODE_MUTE = 91 # 话筒静音键
KEYCODE_VOLUME_MUTE = 164 # 扬声器静音键
KEYCODE_VOLUME_UP = 24 # 音量+键
KEYCODE_VOLUME_DOWN = 25 # 音量-键
KEYCODE_ENTER = 66 # 回车键
KEYCODE_ESCAPE = 111 # ESC键
KEYCODE_DPAD_CENTER = 23 # 导航键 >> 确定键
KEYCODE_DPAD_UP = 19 # 导航键 >> 向上
KEYCODE_DPAD_DOWN = 20 # 导航键 >> 向下
KEYCODE_DPAD_LEFT = 21 # 导航键 >> 向左
KEYCODE_DPAD_RIGHT = 22 # 导航键 >> 向右
KEYCODE_MOVE_HOME = 122 # 光标移动到开始键
KEYCODE_MOVE_END = 123 # 光标移动到末尾键
KEYCODE_PAGE_UP = 92 # 向上翻页键
KEYCODE_PAGE_DOWN = 93 # 向下翻页键
KEYCODE_DEL = 67 # 退格键
KEYCODE_FORWARD_DEL = 112 # 删除键
KEYCODE_INSERT = 124 # 插入键
KEYCODE_TAB = 61 # Tab键
KEYCODE_NUM_LOCK = 143 # 小键盘锁
KEYCODE_CAPS_LOCK = 115 # 大写锁定键
KEYCODE_BREAK = 121 # Break / Pause键
KEYCODE_SCROLL_LOCK = 116 # 滚动锁定键
KEYCODE_ZOOM_IN = 168 # 放大键
KEYCODE_ZOOM_OUT = 169 # 缩小键
KEYCODE_0 = 7
KEYCODE_1 = 8
KEYCODE_2 = 9
KEYCODE_3 = 10
KEYCODE_4 = 11
KEYCODE_5 = 12
KEYCODE_6 = 13
KEYCODE_7 = 14
KEYCODE_8 = 15
KEYCODE_9 = 16
KEYCODE_A = 29
KEYCODE_B = 30
KEYCODE_C = 31
KEYCODE_D = 32
KEYCODE_E = 33
KEYCODE_F = 34
KEYCODE_G = 35
KEYCODE_H = 36
KEYCODE_I = 37
KEYCODE_J = 38
KEYCODE_K = 39
KEYCODE_L = 40
KEYCODE_M = 41
KEYCODE_N = 42
KEYCODE_O = 43
KEYCODE_P = 44
KEYCODE_Q = 45
KEYCODE_R = 46
KEYCODE_S = 47
KEYCODE_T = 48
KEYCODE_U = 49
KEYCODE_V = 50
KEYCODE_W = 51
KEYCODE_X = 52
KEYCODE_Y = 53
KEYCODE_Z = 54
KEYCODE_PLUS = 81 # +
KEYCODE_MINUS = 69 # -
KEYCODE_STAR = 17 # *
KEYCODE_SLASH = 76 # /
KEYCODE_EQUALS = 70 # =
KEYCODE_AT = 77 # @
KEYCODE_POUND = 18 # #
KEYCODE_APOSTROPHE = 75 # '
KEYCODE_BACKSLASH = 73 # \
KEYCODE_COMMA = 55 # ,
KEYCODE_PERIOD = 56 # .
KEYCODE_LEFT_BRACKET = 71 # [
KEYCODE_RIGHT_BRACKET = 72 # ]
KEYCODE_SEMICOLON = 74 # ;
KEYCODE_GRAVE = 68 # `
KEYCODE_SPACE = 62 # 空格键
KEYCODE_MEDIA_PLAY = 126 # 多媒体键 >> 播放
KEYCODE_MEDIA_STOP = 86 # 多媒体键 >> 停止
KEYCODE_MEDIA_PAUSE = 127 # 多媒体键 >> 暂停
KEYCODE_MEDIA_PLAY_PAUSE = 85 # 多媒体键 >> 播放 / 暂停
KEYCODE_MEDIA_FAST_FORWARD = 90 # 多媒体键 >> 快进
KEYCODE_MEDIA_REWIND = 89 # 多媒体键 >> 快退
KEYCODE_MEDIA_NEXT = 87 # 多媒体键 >> 下一首
KEYCODE_MEDIA_PREVIOUS = 88 # 多媒体键 >> 上一首
KEYCODE_MEDIA_CLOSE = 128 # 多媒体键 >> 关闭
KEYCODE_MEDIA_EJECT = 129 # 多媒体键 >> 弹出
KEYCODE_MEDIA_RECORD = 130 # 多媒体键 >> 录音
if __name__ == '__main__':
a = AdbTools()
pass | 30.010262 | 120 | 0.49356 | 28,196 | 0.989715 | 0 | 0 | 808 | 0.028362 | 0 | 0 | 12,567 | 0.441118 |
9238a72a6c6ffca4fc17e398558fafc0e65223b0 | 425 | py | Python | apps/company/migrations/0018_alter_inspection_is_inspection_successful.py | samuVillegas/proyecto-fmc | 595a201ed3a136b5db7daadd1be1ecaaae58aea6 | [
"MIT"
] | null | null | null | apps/company/migrations/0018_alter_inspection_is_inspection_successful.py | samuVillegas/proyecto-fmc | 595a201ed3a136b5db7daadd1be1ecaaae58aea6 | [
"MIT"
] | null | null | null | apps/company/migrations/0018_alter_inspection_is_inspection_successful.py | samuVillegas/proyecto-fmc | 595a201ed3a136b5db7daadd1be1ecaaae58aea6 | [
"MIT"
] | null | null | null | # Generated by Django 4.0.1 on 2022-04-17 22:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0017_inspection_description'),
]
operations = [
migrations.AlterField(
model_name='inspection',
name='is_inspection_successful',
field=models.BooleanField(default=None, null=True),
),
]
| 22.368421 | 63 | 0.628235 | 332 | 0.781176 | 0 | 0 | 0 | 0 | 0 | 0 | 123 | 0.289412 |
92394ab87ad3c0d381f474179d582c21604b15ff | 816 | py | Python | vyperlogix/misc/_getframeInfo.py | raychorn/chrome_gui | f1fade70b61af12ee43c55c075aa9cfd32caa962 | [
"CC0-1.0"
] | 1 | 2020-09-29T01:36:33.000Z | 2020-09-29T01:36:33.000Z | vyperlogix/misc/_getframeInfo.py | raychorn/chrome_gui | f1fade70b61af12ee43c55c075aa9cfd32caa962 | [
"CC0-1.0"
] | null | null | null | vyperlogix/misc/_getframeInfo.py | raychorn/chrome_gui | f1fade70b61af12ee43c55c075aa9cfd32caa962 | [
"CC0-1.0"
] | null | null | null | # use sys._getframe() -- it returns a frame object, whose attribute
# f_code is a code object, whose attribute co_name is the name:
import sys
this_function_name = sys._getframe().f_code.co_name
# the frame and code objects also offer other useful information:
this_line_number = sys._getframe().f_lineno
this_filename = sys._getframe().f_code.co_filename
# also, by calling sys._getframe(1), you can get this information
# for the *caller* of the current function. So you can package
# this functionality up into your own handy functions:
def whoami():
import sys
return sys._getframe(1).f_code.co_name
me = whoami()
# this uses argument 1, because the call to whoami is now frame 0.
# and similarly:
def callersname():
import sys
return sys._getframe(2).f_code.co_name
him = callersname()
| 29.142857 | 67 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 459 | 0.5625 |
923a48452188fc84a18ece827d6488dd0408f125 | 8,859 | py | Python | msl/loadlib/freeze_server32.py | MSLNZ/msl-loadlib | 60f100221774e7c8bac067b50f427fd1d99d2552 | [
"MIT"
] | 51 | 2017-02-20T18:13:18.000Z | 2022-03-02T21:46:36.000Z | msl/loadlib/freeze_server32.py | MSLNZ/msl-loadlib | 60f100221774e7c8bac067b50f427fd1d99d2552 | [
"MIT"
] | 31 | 2017-02-20T18:09:43.000Z | 2022-03-02T15:21:37.000Z | msl/loadlib/freeze_server32.py | MSLNZ/msl-loadlib | 60f100221774e7c8bac067b50f427fd1d99d2552 | [
"MIT"
] | 15 | 2017-02-20T18:13:25.000Z | 2020-04-06T12:27:43.000Z | """
Creates a 32-bit server to use for
`inter-process communication <https://en.wikipedia.org/wiki/Inter-process_communication>`_.
This module must be run from a 32-bit Python interpreter with PyInstaller_ installed.
If you want to re-freeze the 32-bit server, for example, if you want a 32-bit version of
:mod:`numpy` to be available on the server, then run the following with a 32-bit Python
interpreter that has the packages that you want to be available on the server installed
.. code-block:: pycon
>>> from msl.loadlib import freeze_server32
>>> freeze_server32.main() # doctest: +SKIP
.. _PyInstaller: https://www.pyinstaller.org/
.. _Python for .NET: https://pypi.python.org/pypi/pythonnet/
.. _comtypes: https://pythonhosted.org/comtypes/#
"""
import os
import sys
import shutil
import subprocess
try:
from urllib.request import urlopen
except ImportError: # then Python 2
from urllib import urlopen
try:
from msl import loadlib
except ImportError:
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
from msl import loadlib
def main(spec=None, requires_pythonnet=True, requires_comtypes=True):
"""Creates a 32-bit Python server.
Uses PyInstaller_ to create a frozen 32-bit Python executable. This executable
starts a 32-bit server, :class:`~.server32.Server32`, which hosts a Python
module that can load a 32-bit library.
.. versionchanged:: 0.5
Added the `requires_pythonnet` and `requires_comtypes` arguments.
Parameters
----------
spec : :class:`str`, optional
If you want to freeze using a PyInstaller_ .spec file then you can specify the
path to the .spec file.
requires_pythonnet : :class:`bool`, optional
Whether `Python for .NET`_ must be available on the 32-bit server.
requires_comtypes : :class:`bool`, optional
Whether comtypes_ must be available on the 32-bit server. If you using a
non-Windows operating system then this argument is ignored.
"""
if loadlib.IS_PYTHON_64BIT:
print('Must run {} using a 32-bit Python interpreter'.format(os.path.basename(__file__)))
return
missing_packages = []
try:
import PyInstaller
except ImportError:
missing_packages.append('pyinstaller')
if requires_pythonnet:
try:
import clr
except ImportError:
missing_packages.append('pythonnet')
if loadlib.IS_WINDOWS and requires_comtypes:
try:
import comtypes
except ImportError:
missing_packages.append('comtypes')
except OSError:
# OSError: [WinError -2147417850] Cannot change thread mode after it is set
# don't care about this error since comtypes is indeed installed
pass
if missing_packages:
print('Packages are missing to be able to create the 32-bit server, run:')
print('pip install ' + ' '.join(missing_packages))
return
# start the freezing process
here = os.path.abspath(os.path.dirname(__file__))
cmd = [
# Specifically invoke pyinstaller in the context of the current
# python interpreter. This fixes the issue where the blind `pyinstaller`
# invocation points to a 64-bit version.
sys.executable,
'-m', 'PyInstaller',
'--distpath', here,
'--noconfirm',
]
if spec is None:
spec_file = '{}.spec'.format(loadlib.SERVER_FILENAME)
if os.path.exists(spec_file):
yn = input('A {0} file exists. You may want to run "python freeze_server32.py --spec {0}"\n'
'Do you want to continue and overwrite the spec file (y/[n])? '.format(spec_file))
if yn.lower() not in ('y', 'yes'):
print('Aborted.')
return
cmd.extend([
'--name', loadlib.SERVER_FILENAME,
'--onefile',
'--clean',
'--hidden-import', 'msl.examples.loadlib',
])
if requires_pythonnet:
cmd.extend(['--hidden-import', 'clr'])
if loadlib.IS_WINDOWS and requires_comtypes:
cmd.extend(['--hidden-import', 'comtypes'])
cmd.extend(_get_standard_modules())
cmd.append(os.path.join(here, 'start_server32.py'))
else:
cmd.append(spec)
subprocess.check_call(cmd)
# the --version-file option for pyinstaller does not currently work on Windows, this is a fix
verpatch = os.path.join(here, 'verpatch.exe')
if loadlib.IS_WINDOWS and os.path.isfile(verpatch):
ver = [verpatch,
os.path.join(here, loadlib.SERVER_FILENAME),
'/va', '{0}.{1}.{2}'.format(*loadlib.version_info) + '.0',
'/pv', '{0}.{1}.{2}.{4}'.format(*sys.version_info),
'/s', 'description', 'Access a 32-bit library from 64-bit Python',
'/s', 'product', 'Python 32-bit server',
'/s', 'copyright', loadlib.__copyright__]
subprocess.check_call(ver)
# cleanup
shutil.rmtree('./build/' + loadlib.SERVER_FILENAME)
if not os.listdir('./build'):
shutil.rmtree('./build')
if loadlib.IS_WINDOWS:
# pyinstaller is able to include Python.Runtime.dll and Python.Runtime.dll.config
# automatically in the build, so we don't need to keep the .spec file
os.remove(loadlib.SERVER_FILENAME + '.spec')
# create the .NET Framework config file
loadlib.utils.check_dot_net_config(os.path.join(here, loadlib.SERVER_FILENAME))
print('Server saved to: ' + os.path.join(here, loadlib.SERVER_FILENAME))
def _get_standard_modules():
"""
Returns a list of standard python modules to include and exclude in the
frozen application.
PyInstaller does not automatically bundle all of the standard Python modules
into the frozen application. This
method parses the 'docs.python.org' website for the list of standard Python
modules that are available.
The 'pyinstaller --exclude-module' option ensures that the module is
excluded from the frozen application.
The 'pyinstaller --hidden-import' option ensures that the module is included
into the frozen application (only if the module is available for the operating
system that is running this script).
Returns
-------
:class:`list` of :class:`str`
A list of modules to be included and excluded.
"""
# the frozen application is never meant to create GUIs or to add
# support for building and installing Python modules
ignore_list = ['__main__', 'distutils', 'ensurepip', 'test', 'tkinter', 'turtle']
# some modules are platform specific and got a
# RecursionError: maximum recursion depth exceeded
# when running this script with PyInstaller 3.3 installed
if loadlib.IS_WINDOWS:
os_ignore_list = ['(Unix)', '(Linux)', '(Linux, FreeBSD)']
elif loadlib.IS_LINUX:
os_ignore_list = ['(Windows)']
elif loadlib.IS_MAC:
os_ignore_list = ['(Windows)', '(Linux)', '(Linux, FreeBSD)']
else:
os_ignore_list = []
modules = []
url = 'https://docs.python.org/{0}.{1}/py-modindex.html'.format(*sys.version_info)
for s in urlopen(url).read().decode().split('#module-')[1:]:
m = s.split('"><code')
add_module = True
for x in os_ignore_list:
if x in m[1]:
ignore_list.append(m[0])
add_module = False
break
if add_module:
modules.append(m[0])
included_modules, excluded_modules = [], []
for module in modules:
include_module = True
for mod in ignore_list:
if module.startswith(mod):
excluded_modules.extend(['--exclude-module', module])
include_module = False
break
if include_module:
included_modules.extend(['--hidden-import', module])
return included_modules + excluded_modules
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Create the frozen 32-bit server.')
parser.add_argument(
'-s', '--spec',
help='the PyInstaller spec file to use'
)
parser.add_argument(
'--ignore-pythonnet',
action='store_true',
default=False,
help='ignore the error that pythonnet is not installed'
)
parser.add_argument(
'--ignore-comtypes',
action='store_true',
default=False,
help='ignore the error that comtypes is not installed'
)
args = parser.parse_args(sys.argv[1:])
sys.exit(
main(
spec=args.spec,
requires_pythonnet=not args.ignore_pythonnet,
requires_comtypes=not args.ignore_comtypes
)
)
| 35.866397 | 105 | 0.637769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,556 | 0.514279 |
923a9a6900420562cd0f49a3a2d29687195c5fed | 1,281 | py | Python | sprite-animation-generator.py | eijiuema/sprite-animation-generator | 096be1ff195a1de2d99aa9374b51dcca3cdf7b61 | [
"MIT"
] | null | null | null | sprite-animation-generator.py | eijiuema/sprite-animation-generator | 096be1ff195a1de2d99aa9374b51dcca3cdf7b61 | [
"MIT"
] | null | null | null | sprite-animation-generator.py | eijiuema/sprite-animation-generator | 096be1ff195a1de2d99aa9374b51dcca3cdf7b61 | [
"MIT"
] | null | null | null | from collections import defaultdict
from PIL import Image
import operator
image = Image.open("images\\test.png")
pixel_map = image.load()
initial_coordinate = tuple(int(x.strip())
for x in input("Initial coordinates: ").split(','))
pixel_list = []
directions = [(1, 0), (0, -1), (1, -1), (-1, 0), (-1, 1), (0, 1)]
def store_pixel(current_pixel):
if(
current_pixel[0] == image.size[0] or
current_pixel[1] == image.size[1] or
current_pixel[0] < 0 or
current_pixel[1] < 0 or
current_pixel in pixel_list or
pixel_map[current_pixel][3] == 0
):
return
pixel_list.append(current_pixel)
for direction in directions:
store_pixel(tuple(map(operator.add, current_pixel, direction)))
store_pixel(initial_coordinate)
print(pixel_list)
object_image = Image.new('RGBA', image.size, (0, 0, 0, 0))
object_image_pixel_map = object_image.load()
line_image = Image.new('RGBA', (1, len(pixel_list)), (0, 0, 0, 0))
line_image_pixel_map = line_image.load()
for index, pixel in enumerate(pixel_list):
object_image_pixel_map[pixel] = pixel_map[pixel]
line_image_pixel_map[0, index] = pixel_map[pixel]
object_image.save(f"out/{index}.png")
line_image.save(f"out/line.png")
| 26.142857 | 78 | 0.659641 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 89 | 0.069477 |
923b0cb630aa956d94a0d8aedaa532196c1f2daa | 4,931 | py | Python | src/figcli/svcs/aws_cfg.py | figtools/figgy-cli | 88f4ccb8221ef9734f95b2637acfacc6e00983e7 | [
"Apache-2.0"
] | 36 | 2020-07-21T21:22:02.000Z | 2021-10-20T06:55:47.000Z | src/figcli/svcs/aws_cfg.py | figtools/figgy-cli | 88f4ccb8221ef9734f95b2637acfacc6e00983e7 | [
"Apache-2.0"
] | 2 | 2020-10-29T12:49:15.000Z | 2021-04-29T01:12:05.000Z | src/figcli/svcs/aws_cfg.py | figtools/figgy-cli | 88f4ccb8221ef9734f95b2637acfacc6e00983e7 | [
"Apache-2.0"
] | null | null | null | import logging
import os
from figcli.config.style.color import Color
from figcli.io.input import Input
from figcli.svcs.config_manager import ConfigManager
from figcli.config.aws import *
from figcli.config.constants import *
log = logging.getLogger(__name__)
class AWSConfig:
"""
Utility methods for interacting with AWSCLI resources, such as the ~/.aws/credentials and ~/.aws/config files
"""
def __init__(self, color: Color = Color(False)):
self.init_files()
self.c = color
self._config = ConfigManager(AWS_CONFIG_FILE_PATH)
self._creds = ConfigManager(AWS_CREDENTIALS_FILE_PATH)
@staticmethod
def init_files():
os.makedirs(os.path.dirname(AWS_CREDENTIALS_FILE_PATH), exist_ok=True)
if not os.path.exists(AWS_CREDENTIALS_FILE_PATH):
with open(AWS_CREDENTIALS_FILE_PATH, "w+") as file:
file.write("")
if not os.path.exists(AWS_CONFIG_FILE_PATH):
with open(AWS_CONFIG_FILE_PATH, "w+") as file:
file.write("")
def _is_temporary_session(self, profile_name: str):
if self._creds.has_section(profile_name):
return self._creds.has_option(profile_name, AWS_CFG_TOKEN)
return False
def _backup_section(self, section: str):
backup_name, backup_profile = f'{section}-figgy-backup', f'profile {section}-figgy-backup'
profile_name = f'profile {section}'
if self._creds.has_section(section):
for opt in self._creds.options(section):
self._creds.set_config(backup_name, opt, self._creds.get_option(section, opt))
if self._config.has_section(profile_name):
for opt in self._config.options(profile_name):
self._config.set_config(backup_profile, opt, self._config.get_option(profile_name, opt))
def restore(self, profile_name: str) :
"""
Restore a credentials previously backed up by Figgy
"""
config_profile = f'profile {profile_name}'
backup_name, backup_profile = f'{profile_name}-figgy-backup', f'profile {profile_name}-figgy-backup'
creds_restored, config_restored = False, False
if self._creds.has_section(backup_name):
for opt in self._creds.options(backup_name):
self._creds.set_config(profile_name, opt, self._creds.get_option(backup_name, opt))
creds_restored = True
if self._config.has_section(backup_profile):
for opt in self._config.options(backup_profile):
self._config.set_config(config_profile, opt, self._config.get_option(backup_profile, opt))
config_restored = True
self._creds.delete(profile_name, AWS_CFG_TOKEN)
self._creds.save()
self._config.save()
if creds_restored and config_restored:
print(f"\n{self.c.fg_gr}Restoration successful!{self.c.rs}")
else:
print(f"\n{self.c.fg_yl}Unable to restore credentials. Profile: "
f"{self.c.fg_bl}[{backup_name}]{self.c.rs}{self.c.fg_yl} was not found in either the "
f"~/.aws/credentials or ~/.aws/config files.{self.c.rs}")
def write_credentials(self, access_key: str, secret_key: str, token: str, region: str,
profile_name: str = 'default') -> None:
"""
Overwrite credentials stored in the [default] profile in both ~/.aws/config and ~/.aws/credentials file
with the provided temporary credentials. This method also CREATES these files if they do not already exist.
"""
if not self._is_temporary_session(profile_name):
print(f"\n{self.c.fg_yl}Existing AWS Profile {self.c.fg_bl}[{profile_name}]{self.c.rs}{self.c.fg_yl} "
f"was found with long-lived access keys "
f"in file: {self.c.fg_bl}~/.aws/credentials{self.c.rs}{self.c.fg_yl}.\n"
f"To avoid overwriting these keys, they will be moved under profile: "
f"{self.c.rs}{self.c.fg_bl}[{profile_name}-figgy-backup]{self.c.rs}{self.c.fg_yl}.{self.c.rs}\n\n"
f"These old keys may be restored with: {self.c.fg_bl}`"
f"{CLI_NAME} iam restore`{self.c.rs}.")
self._backup_section(profile_name)
self._creds.set_config(profile_name, AWS_CFG_ACCESS_KEY_ID, access_key)
self._creds.set_config(profile_name, AWS_CFG_SECRET_KEY, secret_key)
self._creds.set_config(profile_name, AWS_CFG_TOKEN, token)
config_section = f'profile {profile_name}'
self._config.set_config(config_section, AWS_CFG_REGION, region)
self._config.set_config(config_section, AWS_CFG_OUTPUT, 'json')
print(f"\n\n{self.c.fg_gr}Successfully updated: {AWS_CREDENTIALS_FILE_PATH}{self.c.rs}")
print(f"{self.c.fg_gr}Successfully updated: {AWS_CONFIG_FILE_PATH}{self.c.rs}")
| 46.084112 | 116 | 0.659298 | 4,666 | 0.946258 | 0 | 0 | 412 | 0.083553 | 0 | 0 | 1,543 | 0.312918 |
923b793e73318ac543d6b8f506ca44596c39777b | 11,831 | py | Python | eopf/product/store/netcdf.py | CSC-DPR/eopf-cpm | 6af10c0905eec876e8ab884ce62d5b74d52cb5a3 | [
"Apache-2.0"
] | null | null | null | eopf/product/store/netcdf.py | CSC-DPR/eopf-cpm | 6af10c0905eec876e8ab884ce62d5b74d52cb5a3 | [
"Apache-2.0"
] | 171 | 2022-01-29T09:38:27.000Z | 2022-03-30T08:17:35.000Z | eopf/product/store/netcdf.py | CSC-DPR/eopf-cpm | 6af10c0905eec876e8ab884ce62d5b74d52cb5a3 | [
"Apache-2.0"
] | null | null | null | import itertools as it
import os
import pathlib
from collections.abc import MutableMapping
from typing import TYPE_CHECKING, Any, Iterator, Optional, Union
import xarray as xr
from netCDF4 import Dataset, Group, Variable
from eopf.exceptions import StoreNotOpenError
from eopf.product.store import EOProductStore
from eopf.product.utils import conv, decode_attrs, reverse_conv
if TYPE_CHECKING: # pragma: no cover
from eopf.product.core.eo_object import EOObject
class EONetCDFStore(EOProductStore):
"""
Store representation to access NetCDF format of the given URL
Parameters
----------
url: str
path url or the target store
Attributes
----------
url: str
path url or the target store
zlib: bool
enable/disable compression
complevel: int [1-9]
level of the compression
shuffle: bool
enable/disable hdf5 shuffle
"""
RESTRICTED_ATTR_KEY = ("_FillValue",)
# docstr-coverage: inherited
def __init__(self, url: str) -> None:
url = os.path.expanduser(url)
super().__init__(url)
self._root: Optional[Dataset] = None
self.zlib: bool = True
self.complevel: int = 4
self.shuffle: bool = True
def __getitem__(self, key: str) -> "EOObject":
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
from eopf.product.core import EOGroup, EOVariable
try:
obj = self._select_node(key)
except IndexError as e: # if key is invalid, netcdf4 raise IndexError ...
raise KeyError(e)
if self.is_group(key):
return EOGroup(attrs=decode_attrs(obj.__dict__))
return EOVariable(data=obj, attrs=decode_attrs(obj.__dict__), dims=obj.dimensions)
def __iter__(self) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return it.chain(iter(self._root.groups), iter(self._root.variables))
def __len__(self) -> int:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return len(self._root.groups) + len(self._root.variables)
def __setitem__(self, key: str, value: "EOObject") -> None:
from eopf.product.core import EOGroup, EOVariable
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
if isinstance(value, EOGroup):
self._root.createGroup(key)
self.write_attrs(key, value.attrs)
elif isinstance(value, EOVariable):
# Recover / create dimensions from target product
for idx, dim in enumerate(value.dims):
if dim not in self._root.dimensions:
self._root.createDimension(dim, size=value._data.shape[idx])
if len(self._root.dimensions[dim]) != value._data.shape[idx]:
raise ValueError(
"Netdf4 format does not support mutiples dimensions with the same name and different size.",
)
# Create and write EOVariable
variable = self._root.createVariable(
key,
value._data.values[:].dtype,
dimensions=value.dims,
zlib=self.zlib,
complevel=self.complevel,
shuffle=self.shuffle,
)
self.write_attrs(key, value.attrs, value._data.values[:].dtype)
variable[:] = value._data.values
else:
raise TypeError("Only EOGroup and EOVariable can be set")
# docstr-coverage: inherited
def close(self) -> None:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
super().close()
self._root.close()
self._root = None
# docstr-coverage: inherited
@staticmethod
def guess_can_read(file_path: str) -> bool:
"""
Determines if a given file path can be read with the current store
Parameters
----------
file_path: str
Path to netCDF4 file
Return
------
Boolean
"""
return pathlib.Path(file_path).suffix in [".nc"]
# docstr-coverage: inherited
def is_group(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(path)
return isinstance(current_node, (Group, Dataset))
# docstr-coverage: inherited
def is_variable(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(path)
return isinstance(current_node, Variable)
# docstr-coverage: inherited
def iter(self, path: str) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(path)
return it.chain(iter(current_node.groups), iter(current_node.variables))
# docstr-coverage: inherited
def open(self, mode: str = "r", **kwargs: Any) -> None:
super().open()
# Overwrite compression / scale parameters if given by user
if "zlib" in kwargs:
self.zlib = bool(kwargs.get("zlib"))
kwargs.pop("zlib")
if "complevel" in kwargs:
self.complevel = int(str(kwargs.get("complevel")))
kwargs.pop("complevel")
if "shuffle" in kwargs:
self.shuffle = bool(kwargs.get("shuffle"))
kwargs.pop("shuffle")
self._root = Dataset(self.url, mode, **kwargs)
def write_attrs(self, group_path: str, attrs: MutableMapping[str, Any] = {}, data_type: Any = int) -> None:
"""
This method is used to update attributes in the store
Raises
------
StoreNotOpenError
If the store is closed
"""
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
current_node = self._select_node(group_path)
from json import dumps
from numbers import Number
conv_attr: MutableMapping[str, Any] = {}
for attr, value in attrs.items():
if attr not in self.RESTRICTED_ATTR_KEY:
if isinstance(value, Number):
conv_attr[attr] = value
else:
conv_attr[attr] = dumps(conv(value))
else:
if type(value) is not data_type:
conv_attr[attr] = reverse_conv(data_type, value)
else:
conv_attr[attr] = value
current_node.setncatts(conv_attr)
def _select_node(self, key: str) -> Union[Dataset, Group, Variable]:
"""Retrieve and return the netcdf4 object corresponding to the node at the given path
Returns
----------
Union of Dataset, Group, Variable
Raises
------
StoreNotOpenError
If the store is closed
"""
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
if key in ["/", ""]:
return self._root
return self._root[key]
class EONetcdfStringToTimeAccessor(EOProductStore):
"""
Store representation to access NetCDF date time format of the given URL
Parameters
----------
url: str
path url or the target store
"""
# docstr-coverage: inherited
def __init__(self, url: str) -> None:
url = os.path.expanduser(url)
super().__init__(url)
self._root = None
def __getitem__(self, key: str) -> "EOObject":
import pandas as pd
from eopf.product.core import EOVariable
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
# convert unix start time to date time format
time_da = self._root.get(key)
start = pd.to_datetime("1970-1-1T0:0:0.000000Z")
end = pd.to_datetime(time_da)
# compute and convert the time difference into microseconds
time_delta = (end - start) // pd.Timedelta("1microsecond")
# create coresponding attributes
attributes = {}
attributes["unit"] = "microseconds since 1970-1-1T0:0:0.000000Z"
attributes["standard_name"] = "time"
if key == "ANX_time":
attributes["long_name"] = "Time of ascending node crossing in UTC"
elif key == "calibration_time":
attributes["long_name"] = "Time of calibration in UTC"
# create an EOVariable and return it
eov: EOVariable = EOVariable(data=time_delta, attrs=attributes)
return eov
def __iter__(self) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
yield from ()
def __len__(self) -> int:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return 1
def __setitem__(self, key: str, value: "EOObject") -> None:
from eopf.product.core import EOVariable
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
# set the data
if not isinstance(value, EOVariable):
raise TypeError(f"The value {key} must be an EOVariable")
self._check_node(key)
self._root[key] = value._data
# set the attrs of the value
self.write_attrs(key, value.attrs)
# write to netcdf
self._root.to_netcdf(self.url)
# docstr-coverage: inherited
def close(self) -> None:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
super().close()
self._root.close()
self._root = None
# docstr-coverage: inherited
def is_group(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return True
# docstr-coverage: inherited
def is_variable(self, path: str) -> bool:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
return False
# docstr-coverage: inherited
def iter(self, path: str) -> Iterator[str]:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
self._check_node(path)
return iter([])
# docstr-coverage: inherited
def open(self, mode: str = "r", **kwargs: Any) -> None:
super().open()
self._root = xr.open_dataset(self.url, mode=mode)
# docstr-coverage: inherited
def write_attrs(self, group_path: str, attrs: MutableMapping[str, Any] = {}) -> None:
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
self._check_node(group_path)
self._root.attrs.update(attrs)
def _check_node(self, key: str) -> Union[Dataset, Group, Variable]:
"""Check if the key exists, only top level is used
Returns
----------
Union of Dataset, Group, Variable
Raises
------
StoreNotOpenError
If the store is closed
KeyError
If the key does not exist
"""
if self._root is None:
raise StoreNotOpenError("Store must be open before access to it")
if key not in ["/", ""]:
raise KeyError(f"{key} does not exist")
| 33.706553 | 116 | 0.603415 | 11,354 | 0.959682 | 167 | 0.014115 | 361 | 0.030513 | 0 | 0 | 3,724 | 0.314766 |
923c4e2cb4ba7cdc78f566b0f424d01000307fda | 1,213 | py | Python | static/src/srcjpy.py | JoshuaOndieki/oneforma-fashion-attribute | 00aaea0e964adbe7b33e5e51cd8eabb42817dec7 | [
"MIT"
] | null | null | null | static/src/srcjpy.py | JoshuaOndieki/oneforma-fashion-attribute | 00aaea0e964adbe7b33e5e51cd8eabb42817dec7 | [
"MIT"
] | null | null | null | static/src/srcjpy.py | JoshuaOndieki/oneforma-fashion-attribute | 00aaea0e964adbe7b33e5e51cd8eabb42817dec7 | [
"MIT"
] | null | null | null | import json
def restructure_category_data():
with open('categoryimages.json', 'r') as jfile:
new_data = json.load(jfile)
with open('category.json') as jfile:
data = json.load(jfile)
with open('category.json', 'w') as jfile:
for item in data:
item['IMAGES'] = new_data[item['NAME']]
json.dump(data, jfile, sort_keys=True, indent=4)
def restructure_part_data():
with open('partimages.json', 'r') as jfile:
new_data = json.load(jfile)
with open('part.json') as jfile:
data = json.load(jfile)
with open('part.json', 'w') as jfile:
for item in data:
item['IMAGES'] = new_data[item['PART']]
json.dump(data, jfile, sort_keys=True, indent=4)
def restructure_texture_data():
with open('textureimages.json', 'r') as jfile:
new_data = json.load(jfile)
with open('texture.json') as jfile:
data = json.load(jfile)
with open('texture.json', 'w') as jfile:
for item in data:
item['IMAGES'] = new_data[item['TEXTURE']]
json.dump(data, jfile, sort_keys=True, indent=4)
restructure_category_data()
restructure_part_data()
restructure_texture_data()
| 26.369565 | 56 | 0.623248 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 201 | 0.165705 |
923c9cf40938312d11d7cd0afb9a1a8d330bf48b | 24,250 | py | Python | floodlight/vll_pusher.py | netgroup/Dreamer-VLL-Pusher | 9b543a2799d7805ac2e1b5d4cfd3fa86d93875c6 | [
"Apache-2.0"
] | 1 | 2018-10-27T11:18:26.000Z | 2018-10-27T11:18:26.000Z | floodlight/vll_pusher.py | netgroup/Dreamer-VLL-Pusher | 9b543a2799d7805ac2e1b5d4cfd3fa86d93875c6 | [
"Apache-2.0"
] | null | null | null | floodlight/vll_pusher.py | netgroup/Dreamer-VLL-Pusher | 9b543a2799d7805ac2e1b5d4cfd3fa86d93875c6 | [
"Apache-2.0"
] | 1 | 2018-11-21T10:21:26.000Z | 2018-11-21T10:21:26.000Z | #!/usr/bin/python
##############################################################################################
# Copyright (C) 2014 Pier Luigi Ventre - (Consortium GARR and University of Rome "Tor Vergata")
# Copyright (C) 2014 Giuseppe Siracusano, Stefano Salsano - (CNIT and University of Rome "Tor Vergata")
# www.garr.it - www.uniroma2.it/netgroup - www.cnit.it
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Virtual Leased Line Pusher.
#
# @author Pier Luigi Ventre <pl.ventre@gmail.com>
# @author Giuseppe Siracusano <a_siracusano@tin.it>
# @author Stefano Salsano <stefano.salsano@uniroma2.it>
#
#
import os
import sys
import subprocess
import json
import argparse
import io
import time
import re
import siphash
# XXX Be Careful, For Now The Vll_Pusher Depends On vll_pusher.cfg; This file should be created by the [x] Deployer
# (x = Mininet Deployer, TestBeds Deployer)
# Parse vll options. Currently supports add and delete actions.
# Syntax:
# vll_pusher --controller {IP:REST_PORT} --add
# vll_pusher --controller {IP:REST_PORT} --delete
def parse_cmd_line():
parser = argparse.ArgumentParser(description='Virtual Leased Line Pusher')
parser.add_argument('--controller', dest='controllerRestIp', action='store', default='localhost:8080', help='controller IP:RESTport, e.g., localhost:8080 or A.B.C.D:8080')
parser.add_argument('--add', dest='action', action='store_const', const='add', default='add', help='action: add')
parser.add_argument('--delete', dest='action', action='store_const', const='delete', default='add', help='action: delete')
args = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
return args
# Read From vll_pusher.cfg The Configuration For The Vlls
def read_conf_file():
global pusher_cfg
print "*** Read Configuration File For Vll Pusher"
path = "vll_pusher.cfg"
if os.path.exists(path):
conf = open(path,'r')
pusher_cfg = json.load(conf)
conf.close()
else:
print "No Configuration File Find In %s" % path
sys.exit(-2)
print "*** PUSHER_CFG", json.dumps(pusher_cfg, sort_keys=True, indent=4)
# Utility function for the vlls persisentce
def store_vll(name, dpid):
# Store created vll attributes in local ./vlls.json
datetime = time.asctime()
vllParams = {'name': name, 'Dpid':dpid, 'datetime':datetime}
str = json.dumps(vllParams)
vllsDb = open('./vlls.json','a+')
vllsDb.write(str+"\n")
vllsDb.close()
intf_to_port_number = {}
def convert_intf_to_port_number(controllerRestIP):
global intf_to_port_number
command = "curl -s http://%s/wm/core/controller/switches/json | python -mjson.tool" % (controllerRestIP)
result = os.popen(command).read()
parsedResult = json.loads(result)
default = None
for vll in pusher_cfg['vlls']:
lhs_intf = vll['lhs_intf']
lhs_dpid = vll['lhs_dpid']
port_number = intf_to_port_number.get("%s-%s" % (lhs_dpid, lhs_intf), default)
if port_number == None :
for switch in parsedResult:
if switch["dpid"] == lhs_dpid:
for port in switch["ports"]:
if port["name"] == lhs_intf:
port_number = str(port["portNumber"])
intf_to_port_number["%s-%s" % (lhs_dpid, lhs_intf)] = port_number
vll['lhs_intf'] = port_number
rhs_intf = vll['rhs_intf']
rhs_dpid = vll['rhs_dpid']
port_number = intf_to_port_number.get("%s-%s" % (rhs_dpid, rhs_intf), default)
if port_number == None :
for switch in parsedResult:
if switch["dpid"] == rhs_dpid:
for port in switch["ports"]:
if port["name"] == rhs_intf:
port_number = str(port["portNumber"])
intf_to_port_number["%s-%s" % (rhs_dpid, rhs_intf)] = port_number
vll['rhs_intf'] = port_number
print "*** PUSHER_CFG", json.dumps(pusher_cfg, sort_keys=True, indent=4)
print "*** INTFS", json.dumps(intf_to_port_number, sort_keys=True, indent=4)
# Add Vlls Reading All the Information From Configuration File
def add_command(args):
print "*** Add Vlls From Configuration File"
print "*** Read Previous Vlls Inserted"
if os.path.exists('./vlls.json'):
vllsDb = open('./vlls.json','r')
vlllines = vllsDb.readlines()
vllsDb.close()
else:
vlllines={}
read_conf_file()
# We use this algorithm for the name generation
key = '0123456789ABCDEF'
sip = siphash.SipHash_2_4(key)
# Extract from cmd line options the controlller information
controllerRestIp = args.controllerRestIp
# Dictionary that stores the mapping port:next_label
# We allocate the label using a counter, and we associate for each port used in this execution the next usable label
# Probably in future we can add the persistence for the label
sw_port_tag = {}
convert_intf_to_port_number(controllerRestIp)
# We can have more than one vlls
for vll in pusher_cfg['vlls']:
# Retrieve the information
srcSwitch = vll['lhs_dpid']
srcPort = vll['lhs_intf']
dstSwitch = vll['rhs_dpid']
dstPort = vll['rhs_intf']
srcLabel = vll['lhs_label']
dstLabel = vll['rhs_label']
print "*** Generate Name From VLL (%s-%s-%s) - (%s-%s-%s)" % (srcSwitch, srcPort, srcLabel, dstSwitch, dstPort, dstLabel)
sip.update(srcSwitch + "$" + srcPort + "$" + dstSwitch + "$" + dstPort + "$" + srcLabel + "$" + dstLabel)
# Generate the name
digest = sip.hash()
digest = str(digest)
print "*** Vll Name", digest
vllExists = False
# if the vll exists in the vllDb, we don't insert the flow
for line in vlllines:
data = json.loads(line)
if data['name']==(digest):
print "Vll %s exists already Skip" % digest
vllExists = True
break
if vllExists == True:
continue
print "*** Create Vll:"
print "*** From Source Device OSHI-PE %s Port %s" % (srcSwitch,srcPort)
print "*** To Destination Device OSHI-PE %s Port %s"% (dstSwitch,dstPort)
# Retrieving route from source to destination
# using Routing rest API
command = "curl -s http://%s/wm/topology/route/%s/%s/%s/%s/json | python -mjson.tool" % (controllerRestIp, srcSwitch, srcPort, dstSwitch, dstPort)
result = os.popen(command).read()
parsedResult = json.loads(result)
print
#print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
# Dictionary used for store the label of current vll
temp_sw_port_tag = {}
# We insert the rule each two json item, because floodlight's getRoute for each dpid, provides
# A couple of item the in/out port and the out/in port for the rules forward/reverse - see the
# output of the previous command
temp_key1 = None
temp_key2 = None
temp_tag1 = None
temp_tag2 = None
ap1Dpid = None
ap1Port = None
ap2Dpid = None
ap2Port = None
default = 2
max_value = 4095
if int(srcLabel) > max_value or int(dstLabel) > max_value:
print "Ingress or Egress Label Not Allowable"
sys.exit(-2)
# We generate the labels associated for each port, while the ingress/egress and egress/ingress labels
# come from the configuration file, because they depend on the local network choice
for j in range(0, (len(parsedResult))):
# Label for the LHS port
if j == 0:
temp_key1 = srcSwitch + "-" + srcPort
temp_sw_port_tag[temp_key1] = int(srcLabel)
if sw_port_tag.get(temp_key1,default) <= int(srcLabel):
sw_port_tag[temp_key1] = int(srcLabel)
# Label for the RHS port
elif j == (len(parsedResult)-1):
temp_key1 = dstSwitch + "-" + dstPort
temp_sw_port_tag[temp_key1] = int(dstLabel)
if sw_port_tag.get(temp_key1,default) <= int(dstLabel):
sw_port_tag[temp_key1] = int(dstLabel)
# Middle ports
else :
apDPID = parsedResult[j]['switch']
apPORT = parsedResult[j]['port']
temp_key1 = apDPID + "-" + str(apPORT)
value = sw_port_tag.get(temp_key1, default)
temp_sw_port_tag[temp_key1] = value
value = value + 1
sw_port_tag[temp_key1] = value
print "*** Current Route Tag:"
print json.dumps(temp_sw_port_tag, sort_keys=True, indent=4)
print
print "*** Global Routes Tag:"
print json.dumps(sw_port_tag, sort_keys=True, indent=4)
print
# Manage the special case of one hop
if len(parsedResult) == 2:
print "*** One Hop Route"
# The Switch, where we insert the rule
ap1Dpid = parsedResult[0]['switch']
# In port
ap1Port = str(parsedResult[0]['port'])
temp_key1 = ap1Dpid + "-" + ap1Port
tag1 = temp_sw_port_tag[temp_key1]
# ap1Dpid == ap2Dpid
ap2Dpid = parsedResult[1]['switch']
# Out port
ap2Port = str(parsedResult[1]['port'])
temp_key2 = ap2Dpid + "-" + ap2Port
tag2 = temp_sw_port_tag[temp_key2]
if tag1 == 0 and tag2 ==0:
# Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", ap1Port, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag1 !=0 and tag2==0:
# Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"strip-vlan,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", tag1, ap1Port, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag1 ==0 and tag2 !=0:
# Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", "0xffff", ap1Port, tag2, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
else:
# Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", tag1, ap1Port, tag2, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
if tag2 == 0 and tag1 ==0:
# Reverse Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", ap2Port, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag2 != 0 and tag1 ==0:
# Reverse Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"strip-vlan,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", tag2, ap2Port, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag2 == 0 and tag1 !=0:
# Reverse Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", "0xffff", ap2Port, tag1, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
else:
# Reverse Forward's Rule
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", tag2, ap2Port, tag1, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
store_vll(digest, ap1Dpid)
# see the image one_hop for details on the switching label procedure
else:
# In the other cases we use a different approach for the rule; before we see the label
# of the inport and outport of the same dpid; with more than one hop we see in general for
# the forward rule the label of the inport on the next switch, while in the reverse rule the label of the inport on the
# previous switch. The previous approach is nested in a for loop, we use this loop in the middle dpid, while
# we manage as special case the ingress/egress node, because the rules are different
print "*** %s Hop Route" % (len(parsedResult)/2)
# We manage first ingress/egress node
print "*** Create Ingress Rules For LHS Of The Vll - %s" % (srcSwitch)
# see the image more_than_one_hop for details on the switching label procedure
ap1Dpid = parsedResult[0]['switch']
ap1Port = parsedResult[0]['port']
temp_key1 = ap1Dpid + "-" + str(ap1Port)
tag1 = temp_sw_port_tag[temp_key1]
print "*** inKey: %s, inTag: %s" % (temp_key1, tag1)
ap2Dpid = parsedResult[1]['switch']
ap2Port = parsedResult[1]['port']
temp_key2 = parsedResult[2]['switch'] + "-" + str(parsedResult[2]['port'])
tag2 = temp_sw_port_tag[temp_key2]
print "*** outKey: %s, outTag: %s" % (temp_key2, tag2)
print
if tag1 == 0 and tag2 !=0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", "0xffff", ap1Port, tag2, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag1 != 0 and tag2 !=0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", tag1, ap1Port, tag2, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
else:
print "Error Tag";
sys.exit(-2)
print "*** Create Egress Rules For LHS Of The Vll - %s" % (srcSwitch)
temp_key2 = temp_key1
tag2 = tag1
temp_key1 = ap2Dpid + "-" + str(ap2Port)
tag1 = temp_sw_port_tag[temp_key1]
print "*** inKey: %s, inTag: %s" % (temp_key1, tag1)
print "*** outKey: %s, outTag: %s" % (temp_key2, tag2)
print
if tag1 != 0 and tag2 ==0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"strip-vlan,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", tag1, ap2Port, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag1 != 0 and tag2 !=0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", tag1, ap2Port, tag2, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
else:
print "Error Tag";
sys.exit(-2)
store_vll(digest, ap1Dpid)
print "*** Create Egress Rules For RHS Of The Vll - %s" % (dstSwitch)
ap1Dpid = parsedResult[len(parsedResult)-2]['switch']
ap1Port = parsedResult[len(parsedResult)-2]['port']
temp_key1 = ap1Dpid + "-" + str(ap1Port)
tag1 = temp_sw_port_tag[temp_key1]
print "*** inKey: %s, inTag: %s" % (temp_key1, tag1)
ap2Dpid = parsedResult[len(parsedResult)-1]['switch']
ap2Port = parsedResult[len(parsedResult)-1]['port']
temp_key2 = ap2Dpid + "-" + str(ap2Port)
tag2 = temp_sw_port_tag[temp_key2]
print "*** outKey: %s, outTag: %s" % (temp_key2, tag2)
print
if tag1 != 0 and tag2 ==0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"strip-vlan,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", tag1, ap1Port, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag1 != 0 and tag2 !=0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", tag1, ap1Port, tag2, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
else:
print "Error Tag";
sys.exit(-2)
print "*** Create Ingress Rules For RHS Of The Vll - %s" % (dstSwitch)
temp_key1 = parsedResult[len(parsedResult)-3]['switch'] + "-" + str(parsedResult[len(parsedResult)-3]['port'])
tag1 = temp_sw_port_tag[temp_key1]
print "*** inKey: %s, inTag: %s" % (temp_key2, tag2)
print "*** outKey: %s, outTag: %s" % (temp_key1, tag1)
print
if tag1 != 0 and tag2 ==0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", "0xffff", ap2Port, tag1, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
elif tag1 != 0 and tag2 !=0:
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", tag2, ap2Port, tag1, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
else:
print "Error Tag";
sys.exit(-2)
store_vll(digest, ap1Dpid)
# Now we manage the middle nodes
for i in range(2, (len(parsedResult)-2)):
print "index:", i
if i % 2 == 0:
ap1Dpid = parsedResult[i]['switch']
ap1Port = parsedResult[i]['port']
print ap1Dpid, ap1Port
else:
ap2Dpid = parsedResult[i]['switch']
ap2Port = parsedResult[i]['port']
print ap2Dpid, ap2Port
print "*** Create Rules For %s" % ap1Dpid
# send one flow mod per pair in route
# using StaticFlowPusher rest API
temp_key1 = ap1Dpid + "-" + str(ap1Port)
tag1 = temp_sw_port_tag[temp_key1]
print "*** inKey: %s, inTag: %s" % (temp_key1, tag1)
temp_key2 = parsedResult[i+1]['switch'] + "-" + str(parsedResult[i+1]['port'])
tag2 = temp_sw_port_tag[temp_key2]
print "*** outKey: %s, outTag: %s" % (temp_key2, tag2)
print
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".f", tag1, ap1Port, tag2, ap2Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
temp_key1 = ap2Dpid + "-" + str(ap2Port)
tag1 = temp_sw_port_tag[temp_key1]
print "*** inKey: %s, inTag: %s" % (temp_key1, tag1)
temp_key2 = parsedResult[i-2]['switch'] + "-" + str(parsedResult[i-2]['port'])
tag2 = temp_sw_port_tag[temp_key2]
print "*** outKey: %s, outTag: %s" % (temp_key2, tag2)
print
command = "curl -s -d '{\"switch\": \"%s\", \"name\":\"%s\", \"vlan-id\":\"%s\", \"cookie\":\"0\", \"priority\":\"32768\", \"ingress-port\":\"%s\",\"active\":\"true\", \"actions\":\"set-vlan-id=%s,output=%s\"}' http://%s/wm/staticflowentrypusher/json | python -mjson.tool" % (ap1Dpid, ap1Dpid + "." + digest + ".r", tag1, ap2Port, tag2, ap1Port, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
store_vll(digest, ap1Dpid)
def del_command(args):
print "*** Delete Vlls From Configuration File"
print "*** Read Previous Vlls Inserted"
if os.path.exists('vlls.json'):
vllsDb = open('vlls.json','r')
lines = vllsDb.readlines()
vllsDb.close()
vllsDb = open('vlls.json','w')
else:
lines={}
print "*** No Vlls Inserted"
return
# Removing previously created flow from switches
# using StaticFlowPusher rest API
# currently, circuitpusher records created circuits in local file ./circuits.db
# with circuit name and list of switches
controllerRestIp = args.controllerRestIp
for line in lines:
data = json.loads(line)
sw = data['Dpid']
digest = data['name']
print "*** Deleting Vll: %s - Switch %s" % (digest,sw)
command = "curl -X DELETE -d '{\"name\":\"%s\", \"switch\":\"%s\"}' http://%s/wm/staticflowentrypusher/json 2> /dev/null | python -mjson.tool" % (sw + "." + digest + ".f", sw, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
command = "curl -X DELETE -d '{\"name\":\"%s\", \"switch\":\"%s\"}' http://%s/wm/staticflowentrypusher/json 2> /dev/null | python -mjson.tool" % (sw + "." + digest +".r", sw, controllerRestIp)
result = os.popen(command).read()
print "*** Sent Command:", command + "\n"
print "*** Received Result:", result + "\n"
vllsDb.close()
def run_command(data):
if args.action == 'add':
add_command(data)
elif args.action == 'delete':
del_command(data)
if __name__ == '__main__':
args = parse_cmd_line()
run_command(args)
| 45.66855 | 371 | 0.623258 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12,375 | 0.510309 |
923ce42885b5acee5c6a003f6685bde886fc0313 | 7,334 | py | Python | lib/external_lib/arvore.py | patrick7star/jogo_da_forca | 93a982d940d0b7ae1dc4e9ea7f88df24d7b95c4b | [
"MIT"
] | null | null | null | lib/external_lib/arvore.py | patrick7star/jogo_da_forca | 93a982d940d0b7ae1dc4e9ea7f88df24d7b95c4b | [
"MIT"
] | null | null | null | lib/external_lib/arvore.py | patrick7star/jogo_da_forca | 93a982d940d0b7ae1dc4e9ea7f88df24d7b95c4b | [
"MIT"
] | null | null | null | '''
Aqui o programa conterá uma função que permite
listar tanto diretórios, como arquivos na forma de
árvores, ou seja, seus ramos terão linhas, e também,
espaçamentos mostrando a profundidade de cada diretório
dado uma pasta raíz.
'''
#só pode ser importado:
__all__ = ['arvore']
# ********* bibliotecas **********
from os import listdir, chdir, system
from random import randint
import string
from sys import platform
# ************* dados ***********
'''
desabilitando, pois gera "saída" indesejada
no código não relevante. Isto é uma copia
para está biblioteca, funções aqui não são
todas úteis para o código que a ascrecentou
na biblioteca.
# conforme a plataforma em execução.
buffer = 'temporario.txt'
if platform == 'win32':
# no caso do windows.
caminho = 'C:\\Users\\SAVIO\\AppData\\Local\\Temp'
arqBuffer = caminho+'\\'+buffer
elif platform == 'linux':
# no caso de linux.
caminho = '/tmp'
arqBuffer = caminho+'/'+buffer
#criando arquivo... não existente inicialmente.
try:
arq = open(arqBuffer, mode='x')
except FileExistsError:
print('o arquivo "%s" já existe, então apenas continuando...'%arqBuffer)
'''
# ************ funções ****************
# gera uma árvore, imprimindo cada diretório, com
# determinado recuo no arquivo, para simular uma
# falsa hierárquia.
def trilha_dirs(caminho):
#lista dos diretórios desta raíz.
dirs = []
#print(listdir(path=caminho))
for pasta in listdir(path = caminho):
#tenta acessar diretório, caso não
#seja possível, de modo nenhum, a
#conclusão é que não é um diretório.
try:
if platform == 'win32':
listdir(caminho+'\\'+pasta)
elif platform == 'linux':
listdir(caminho+'/'+pasta)
dirs.append(pasta)
except: pass
#print('"%s" não é um diretório, nem um vázio.' %pasta)
#delimitando o recuo de espaços.
espacos = 2
#0x20 espaço vázio em hexadecimal.
recuo = (chr(0x20) * espacos) * trilha_dirs.profundidade
#listando pasta em ramos.
for d in dirs:
#limite de profundidade. Definida em 8.
if trilha_dirs.profundidade > 8:
trilha_dirs.profundidade = 0
continue
#se houver subdiretórios, voltar a fazer
#tudo baseado em recursão.
if platform == 'win32':
novo_caminho = caminho + '\\' + d
elif platform == 'linux':
novo_caminho = caminho + '/' + d
#texto limitado.
texto_limitado = '%s|__ "%s [...]"' % (recuo, d[0:20])
#texto não limitado.
texto = '%s|__ "%s"' % (recuo, d)
if len(listdir(novo_caminho)) > 0:
if len(d) > 20: print(texto_limitado,file=open(arqBuffer,'a'))
else: print(texto,file=open(arqBuffer,'a'))
#um espaço à frente para cada profundidade.
trilha_dirs.profundidade += 1
trilha_dirs(novo_caminho)
else:
if len(d) > 20: print(texto_limitado, file=open(arqBuffer,'a'))
else: print(texto,file=open(arqBuffer, 'a'))
#volta par a formatção do primeiro diretório.
#diminui o recuo para zero.
trilha_dirs.profundidade = 0
#função gera string aleatórias de extensão "".tmp".
def gera_str_aleatoria(comprimento):
x, Str = 1,''
while x <= comprimento:
opcoes = [string.ascii_uppercase, string.ascii_lowercase,
string.digits]
escolha = opcoes[randint(0,len(opcoes)-1)]
Str += escolha[randint(0,len(escolha)-1)]
x+=1
return 'temporario_' + Str + '.tmp'
# função retorna uma a string com todo o diretório
# até uma certa profundidade, listado como àrvore.
def arvore(caminho, mostra_arquivos=False):
#caso a opção visualizar arquivos esteja
#disabilitada, que é o padrão.
if not mostra_arquivos:
#armazenando raíz também no arquivo temporário.
print(caminho,file=open(arqBuffer,'a'))
#definindo profundidade em um, já
#que o caminho(raíz) é zero.
trilha_dirs.profundidade = 1
#executando procedimento, e criando
#árvores de caminhos.
trilha_dirs(caminho)
#filtrando conteúdo do arquivo.
conteudo = open(arqBuffer, 'r').read()
# deletando linha em branco.
conteudo = conteudo.split('\n')
conteudo.pop(-1)
conteudo = '\n'.join(conteudo[0:])
#novo nome para arquivo temporário para
#que não atrapalhe na execução de um
#próximo.
if platform == 'win32':
nome_antigo = arqBuffer.split('\\')[-1]
elif platform == 'linux':
nome_antigo = arqBuffer.split('/')[-1]
novo_nome = gera_str_aleatoria(21)
#print(nome_antigo, ' ==> ', novo_nome)
if platform == 'win32':
system('ren %s %s' % (arqBuffer, novo_nome))
elif platform == 'linux':
system('mv %s /tmp/%s' % (arqBuffer, novo_nome))
#retornando string com árvore impressa.
return ' ᐅ ' + conteudo
# transforma string numa matriz, de acordo com
# a formatação dela.
def matriciar_str(_str):
# todas as linhas.
linhas = _str.split('\n')
# linha com maior caractéres.
n = max(len(s) for s in linhas)
# Criando matriz. O resto, por uniformização
# será preenchido os espaços em brancos da
# strings que não completam a matriz, com
# trêmulas.
matriz = [list(s + '¨' * (n-len(s))) for s in linhas]
# preenchendo também os resto dos espaços em
# brancos.
for i in range(len(matriz)):
for j in range(len(matriz[0])):
if matriz[i][j].isspace():
matriz[i][j] = '¨'
return matriz
# imprime matriz, para uma boa visualização do que ocorre.
def imprime_matriz(matriz):
print("mostrando com está indo a matriz:")
m, n = len(matriz), len(matriz[1])
for i in range(m):
for j in range(n):
print(matriz[i][j],end='')
print('')
print('\t\t---- ---- FIM --- ----')
# está função conserta os demais galhos.
def conserta(_str):
matriz = matriciar_str(_str)
# dimensões da matriz.
(m,n) = len(matriz), len(matriz[0])
# marcando colunas contendo mais de três
# barras verticais.
mais = {j:0 for j in range(n)}
for j in range(n):
for i in range(m):
if matriz[i][j] == '|':
mais[j] += 1
for coluna in mais.keys():
def posicao_valida(i, j):
palavra_ij = 0
for j in range(n):
if matriz[i][j].isascii():
palavra_ij = j
if mais[coluna] >= 2:
for i in range(m):
if matriz[i][coluna] == '¨':
matriz[i][coluna] = '|'
# processo de limpeza do código.
for i in range(m):
for j in range(n):
if matriz[i][j] == '¨':
matriz[i][j] = ' '
return matriz
# execução:
if __name__ == '__main__':
#print(gera_str_aleatoria(15))
caminho = "/home/savio/Documents"
str_arv = arvore(caminho)
print(str_arv)
imprime_matriz(matriciar_str(str_arv))
imprime_matriz(conserta(str_arv))
imprime_matriz(conserta(arvore('/etc')))
#print(arvore('/etc')) | 34.271028 | 76 | 0.590674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,455 | 0.464008 |
923d5b2799ddb2b60a1a2513f89ea6d082ceb654 | 5,032 | py | Python | magneto/utils/__init__.py | MagnetoTesting/magneto | e34232f93caa970524cc88792dc22acb2b479b85 | [
"Apache-2.0"
] | 24 | 2015-11-25T06:58:32.000Z | 2021-09-20T10:41:25.000Z | magneto/utils/__init__.py | MagnetoTesting/magneto | e34232f93caa970524cc88792dc22acb2b479b85 | [
"Apache-2.0"
] | null | null | null | magneto/utils/__init__.py | MagnetoTesting/magneto | e34232f93caa970524cc88792dc22acb2b479b85 | [
"Apache-2.0"
] | 7 | 2015-12-10T06:01:38.000Z | 2018-04-09T01:37:58.000Z | from __future__ import absolute_import
from contextlib import contextmanager
from multiprocessing import TimeoutError
import signal
import datetime
import os
import subprocess
import time
import urllib
import zipfile
import shutil
import pytest
from .adb import ADB
from ..logger import Logger
def get_center(bounds):
"""
Returns given element center coords::
from magneto.utils import get_center
element = self.magneto(text='Foo')
(x, y) = get_center(element.info['bounds'])
:param dict bounds: Element position coordinates (top, right, bottom, left)
:return: x and y coordinates of element center
"""
x = bounds['right'] - ((bounds['right'] - bounds['left']) / 2)
y = bounds['bottom'] - ((bounds['bottom'] - bounds['top']) / 2)
return x, y
def get_config(attr, default=None):
"""
Allows access to config parameters::
from magneto.utils import get_config
package = get_config('--app-package')
:param str attr: Command line argument
:return: Requested config value
"""
# must have this check to avoid sphinx-autodoc exception
if getattr(pytest, 'config', None) != None:
return pytest.config.getoption(attr) or default
else:
return default
@contextmanager
def timewarp(timedelta_):
now = datetime.datetime.now()
future = now + timedelta_
ADB.set_datetime(future)
try:
yield
finally:
now = datetime.datetime.now()
ADB.set_datetime(now)
class Timeout():
"""
Allows polling a function till success or timeout::
import time
from magneto.utils import Timeout
result = False
with Timeout(seconds=5):
while not result:
result = some_function()
time.sleep(0.5)
:param integer seconds: Timeout value in seconds. Defaults to 1.
:param str error_message: Error message to display when timeout occurs. Defaults to 'Timeout'.
"""
def __init__(self, seconds=1, error_message='Timeout'):
self.seconds = seconds or 1
self.error_message = error_message
def handle_timeout(self, signum, frame):
Logger.debug('Timeout reached {} seconds limit'.format(self.seconds))
raise TimeoutError(self.error_message)
def __enter__(self):
Logger.debug('Timeout started for {} seconds'.format(self.seconds))
signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
Logger.debug('Timeout stopped.')
signal.alarm(0)
def unlock_device():
"""
Powers on device and unlocks it.
"""
# read device screen state
p = ADB.exec_cmd("shell 'if [ -z $(dumpsys power | grep mScreenOn=true) ]; then echo off; else echo on;fi'",
stdout=subprocess.PIPE)
device_screen = p.stdout.readline().strip('\r\n')
if device_screen == 'off':
# power on device
ADB.exec_cmd('shell input keyevent 26').wait()
# unlock device
ADB.exec_cmd('shell input keyevent 82').wait()
def wait_for_device():
"""
Wait for device to boot. 1 minute timeout.
"""
wait_for_device_cmd = 'wait-for-device shell getprop sys.boot_completed'
p = ADB.exec_cmd(wait_for_device_cmd, stdout=subprocess.PIPE)
boot_completed = p.stdout.readline().strip('\r\n')
try:
with Timeout(seconds=60):
while boot_completed != '1':
time.sleep(1)
p = ADB.exec_cmd(wait_for_device_cmd, stdout=subprocess.PIPE)
boot_completed = p.stdout.readline().strip('\r\n')
Logger.debug('Waiting for device to finish booting (adb shell getprop sys.boot_completed)')
except TimeoutError:
Logger.debug('Timed out while waiting for sys.boot_completed, there might not be a default launcher set, trying to run anyway')
pass
class Bootstrap(object):
_map = {
'no_app': 'https://github.com/EverythingMe/magneto-init/archive/master.zip',
'calc': 'https://github.com/EverythingMe/magneto-demo-calc/archive/master.zip'
}
def __init__(self, name):
if name not in self._map:
raise Exception('{} not recognized'.format(name))
filename, headers = urllib.urlretrieve(self._map[name])
with zipfile.ZipFile(filename) as zip_file:
rootdir = zip_file.namelist()[0]
for member in zip_file.namelist()[1:]:
if not os.path.basename(member):
# create dir from zipfile
os.mkdir(os.path.join(os.path.curdir, member.replace(rootdir, '')))
else:
# copy file (taken from zipfile's extract)
source = zip_file.open(member)
target = file(os.path.join(os.path.curdir, member.replace(rootdir, '')), "wb")
with source, target:
shutil.copyfileobj(source, target)
| 29.775148 | 135 | 0.630962 | 2,177 | 0.432631 | 223 | 0.044316 | 239 | 0.047496 | 0 | 0 | 2,024 | 0.402226 |
923d66fe6edb0b62b34853584252cfb7100d51a4 | 1,222 | py | Python | common/widgets.py | saulm/firedeptmanagement | 06548bf872fc76ac214ec25cc536f34aa8145305 | [
"Unlicense"
] | 2 | 2019-09-24T19:12:04.000Z | 2019-09-28T19:07:57.000Z | common/widgets.py | saulm/firedeptmanagement | 06548bf872fc76ac214ec25cc536f34aa8145305 | [
"Unlicense"
] | 1 | 2020-08-16T02:34:28.000Z | 2021-03-16T14:15:47.000Z | common/widgets.py | saulm/firedeptmanagement | 06548bf872fc76ac214ec25cc536f34aa8145305 | [
"Unlicense"
] | 2 | 2017-01-18T21:10:18.000Z | 2020-03-12T20:25:08.000Z | from django import forms
from django.db import models
from django.conf import settings
class LocationPickerWidget(forms.TextInput):
class Media:
css = {
'all': (
settings.STATIC_URL + 'css/location_picker.css',
)
}
js = (
'http://ajax.googleapis.com/ajax/libs/jquery/1.3.2/jquery.min.js',
'http://www.google.com/jsapi?key=' + settings.MAPS_API_KEY,
settings.STATIC_URL + 'js/jquery.location_picker.js',
)
def __init__(self, language=None, attrs=None):
self.language = language or settings.LANGUAGE_CODE[:2]
super(LocationPickerWidget, self).__init__(attrs=attrs)
def render(self, name, value, attrs=None):
if None == attrs:
attrs = {}
attrs['class'] = 'location_picker'
return super(LocationPickerWidget, self).render(name, value, attrs)
class LocationField(models.CharField):
def formfield(self, **kwargs):
kwargs['widget'] = LocationPickerWidget
return super(LocationField, self).formfield(**kwargs)
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^common\.widgets\.LocationField"]) | 34.914286 | 78 | 0.6473 | 1,007 | 0.824059 | 0 | 0 | 0 | 0 | 0 | 0 | 224 | 0.183306 |
923f16aaf648cbadbb3a766cadbe8a39ea5540cc | 605 | py | Python | 2015/advent25.py | AwesomeGitHubRepos/adventofcode | 84ba7963a5d7905973f14bb1c2e3a59165f8b398 | [
"MIT"
] | 96 | 2018-04-21T07:53:34.000Z | 2022-03-15T11:00:02.000Z | 2015/advent25.py | AwesomeGitHubRepos/adventofcode | 84ba7963a5d7905973f14bb1c2e3a59165f8b398 | [
"MIT"
] | 17 | 2019-02-07T05:14:47.000Z | 2021-12-27T12:11:04.000Z | 2015/advent25.py | AwesomeGitHubRepos/adventofcode | 84ba7963a5d7905973f14bb1c2e3a59165f8b398 | [
"MIT"
] | 14 | 2019-02-05T06:34:15.000Z | 2022-01-24T17:35:00.000Z | import re
from functools import reduce
def coord_to_count(row, col):
# calculate the ordinal of the given coordinates, counting from 1
return ((col + row - 2) * (col + row - 1) // 2) + col
def calculate_code(row, col):
count = coord_to_count(row, col)
return reduce(lambda c, i: c * 252533 % 33554393, range(count - 1), 20151125)
if __name__ == '__main__':
import sys
filename = sys.argv[-1]
with open(filename) as f:
line = next(f)
row, col = map(int, re.search(r'row (\d+), column (\d+)', line).groups())
print('Part 1:', calculate_code(row, col))
| 26.304348 | 81 | 0.624793 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 110 | 0.181818 |
923fbcda3f989955bd68ae7294e1df2739368dbf | 1,792 | py | Python | test/detect_os.py | ThunderSoft123/mbed-ls | bf2750e2da4649591fcbb8047e0926ae4d6c74fd | [
"Apache-2.0"
] | null | null | null | test/detect_os.py | ThunderSoft123/mbed-ls | bf2750e2da4649591fcbb8047e0926ae4d6c74fd | [
"Apache-2.0"
] | null | null | null | test/detect_os.py | ThunderSoft123/mbed-ls | bf2750e2da4649591fcbb8047e0926ae4d6c74fd | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import os
import sys
import errno
import logging
import platform
from mbed_lstools.main import create
from mbed_lstools.main import mbed_os_support
from mbed_lstools.main import mbed_lstools_os_info
class DetectOSTestCase(unittest.TestCase):
""" Test cases for host OS related functionality. Helpful during porting
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_porting_mbed_lstools_os_info(self):
self.assertNotEqual(None, mbed_lstools_os_info())
def test_porting_mbed_os_support(self):
self.assertNotEqual(None, mbed_os_support())
def test_porting_create(self):
self.assertNotEqual(None, create())
def test_supported_os_name(self):
os_names = ['Windows7', 'Ubuntu', 'LinuxGeneric', 'Darwin']
self.assertIn(mbed_os_support(), os_names)
def test_detect_os_support_ext(self):
os_info = (os.name,
platform.system(),
platform.release(),
platform.version(),
sys.platform)
self.assertEqual(os_info, mbed_lstools_os_info())
if __name__ == '__main__':
unittest.main()
| 27.151515 | 76 | 0.708147 | 923 | 0.515067 | 0 | 0 | 0 | 0 | 0 | 0 | 728 | 0.40625 |
9240a548d742f534b991d13e7dee47d24390d843 | 416 | py | Python | aiocloudflare/api/user/billing/subscriptions/subscriptions.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 2 | 2021-09-14T13:20:55.000Z | 2022-02-24T14:18:24.000Z | aiocloudflare/api/user/billing/subscriptions/subscriptions.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 46 | 2021-09-08T08:39:45.000Z | 2022-03-29T12:31:05.000Z | aiocloudflare/api/user/billing/subscriptions/subscriptions.py | Stewart86/aioCloudflare | 341c0941f8f888a8b7e696e64550bce5da4949e6 | [
"MIT"
] | 1 | 2021-12-30T23:02:23.000Z | 2021-12-30T23:02:23.000Z | from aiocloudflare.commons.unused import Unused
from .apps.apps import Apps
from .zones.zones import Zones
class Subscriptions(Unused):
_endpoint1 = "user/billing/subscriptions"
_endpoint2 = None
_endpoint3 = None
@property
def apps(self) -> Apps:
return Apps(self._config, self._session)
@property
def zones(self) -> Zones:
return Zones(self._config, self._session)
| 21.894737 | 49 | 0.697115 | 305 | 0.733173 | 0 | 0 | 175 | 0.420673 | 0 | 0 | 28 | 0.067308 |
92414f09496d34fe212d6725d451c35584414eae | 1,548 | py | Python | dlutils/timer.py | podgorskiy/dlutils | 716c99fd26cde73550c58b2be1071d64c67ca036 | [
"Apache-2.0"
] | 5 | 2019-11-06T20:42:38.000Z | 2020-09-06T02:09:56.000Z | dlutils/timer.py | podgorskiy/dlutils | 716c99fd26cde73550c58b2be1071d64c67ca036 | [
"Apache-2.0"
] | 2 | 2020-04-23T09:22:29.000Z | 2020-04-23T14:41:22.000Z | dlutils/timer.py | podgorskiy/dlutils | 716c99fd26cde73550c58b2be1071d64c67ca036 | [
"Apache-2.0"
] | 3 | 2020-04-28T18:12:47.000Z | 2020-08-28T06:50:34.000Z | # Copyright 2017-2019 Stanislav Pidhorskyi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Profiling utils"""
import time
def timer(f):
""" Decorator for timeing function (method) execution time.
After return from function will print string: ``func: <function name> took: <time in seconds> sec``.
Args:
f (Callable[Any]): function to decorate.
Returns:
Callable[Any]: Decorated function.
Example:
::
>>> from dlutils import timer
>>> @timer.timer
... def foo(x):
... for i in range(x):
... pass
...
>>> foo(100000)
func:'foo' took: 0.0019 sec
"""
def __wrapper(*args, **kw):
time_start = time.time()
result = f(*args, **kw)
time_end = time.time()
print('func:%r took: %2.4f sec' % (f.__name__, time_end - time_start))
return result
return __wrapper
| 29.769231 | 104 | 0.583333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,274 | 0.822997 |
9242274087f97ac266387b636296af92b2d7d9f8 | 422 | py | Python | global_covid_tracker/plotting/__init__.py | kvanderveen/global_covid_tracker | ad4466b099aae30cbc73b62cce440c62a4fc87f9 | [
"MIT"
] | null | null | null | global_covid_tracker/plotting/__init__.py | kvanderveen/global_covid_tracker | ad4466b099aae30cbc73b62cce440c62a4fc87f9 | [
"MIT"
] | 3 | 2021-08-23T20:45:46.000Z | 2022-03-12T00:33:28.000Z | global_covid_tracker/plotting/__init__.py | kvanderveen/global_covid_tracker | ad4466b099aae30cbc73b62cce440c62a4fc87f9 | [
"MIT"
] | null | null | null | from .plot_positive_test_rates import plot_positive_test_rates
from .plot_total_cases import plot_total_cases
from .plot_total_deaths import plot_total_deaths
from .plot_deaths_by_country import plot_deaths_by_country
from .plot_cases_by_country import plot_cases_by_country
from .plot_total_tests import plot_total_tests
from .plot_cases_growth import plot_cases_growth
from .plot_deaths_growth import plot_deaths_growth
| 46.888889 | 62 | 0.905213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9242fcee18924cfc75c9b5f04ff881874a1a4e66 | 1,229 | py | Python | iot_services_sdk/session.py | sap-archive/iot-services-sdk | 157e607b0c8b3a7b77836336aa31d89ebd8e9f86 | [
"CNRI-Python"
] | 4 | 2019-05-02T07:51:13.000Z | 2019-09-25T12:14:06.000Z | iot_services_sdk/session.py | sap-archive/iot-services-sdk | 157e607b0c8b3a7b77836336aa31d89ebd8e9f86 | [
"CNRI-Python"
] | 2 | 2019-09-13T15:36:32.000Z | 2019-11-15T06:01:09.000Z | iot_services_sdk/session.py | SAP/iot-services-sdk | 157e607b0c8b3a7b77836336aa31d89ebd8e9f86 | [
"CNRI-Python"
] | 1 | 2020-01-17T15:44:52.000Z | 2020-01-17T15:44:52.000Z | """ Author: Philipp Steinrötter (steinroe) """
from .iot_service import IoTService
from .response import Response
class SessionService(IoTService):
def __init__(self,
instance,
user,
password):
"""Instantiate SessionService object
Arguments:
instance {string} -- IoT Services instance
user {string} -- IoT Services user
password {string} -- IoT Services password
"""
self.service = ''
IoTService.__init__(
self,
instance=instance,
user=user,
password=password
)
def logout(self) -> Response:
"""Logs out the user by invalidating the session. The user is identified via session cookie or the
Authorization header. """
service = '/logout'
response = self.request_core(method='POST', service=service, accept_json=True)
return response
def me(self) -> Response:
"""The current user is identified via session cookie or the Authorization header."""
service = '/me'
response = self.request_core(method='POST', service=service, accept_json=True)
return response
| 29.97561 | 106 | 0.597234 | 1,111 | 0.903252 | 0 | 0 | 0 | 0 | 0 | 0 | 516 | 0.419512 |
9244aa4117e40f1bf99002ca86723057b1690b0e | 736 | py | Python | tests/test_pieces.py | trslater/chess | 0c29faf1296f76020f8e213e9c218c05043668bb | [
"MIT"
] | null | null | null | tests/test_pieces.py | trslater/chess | 0c29faf1296f76020f8e213e9c218c05043668bb | [
"MIT"
] | null | null | null | tests/test_pieces.py | trslater/chess | 0c29faf1296f76020f8e213e9c218c05043668bb | [
"MIT"
] | null | null | null | from chess.pieces import Pawn, Knight, Bishop, Rook, Queen, King
class TestPiece:
def test_sum(self):
groups = ((Pawn(), Knight(), Bishop()),
(Knight(), Bishop(), Queen()),
(Pawn(), Pawn(), Pawn(), Pawn()))
actual_sums = tuple(map(sum, groups))
expected_sums = (7, 15, 4)
for actual_sum, expected_sum in zip(actual_sums, expected_sums):
assert actual_sum == expected_sum
def test_compare(self):
assert Knight() == Knight()
assert Bishop() == Knight()
assert King() <= King()
assert Bishop() <= Rook()
assert King() > Queen()
assert Pawn() < Knight()
assert Queen() != Rook()
| 30.666667 | 72 | 0.532609 | 668 | 0.907609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9244ebfd6d415914bf8fb0696b6cfab97096c3f1 | 19,961 | py | Python | src/ezdxf/tools/pattern.py | hh-wu/ezdxf | 62509ba39b826ee9b36f19c0a5abad7f3518186a | [
"MIT"
] | null | null | null | src/ezdxf/tools/pattern.py | hh-wu/ezdxf | 62509ba39b826ee9b36f19c0a5abad7f3518186a | [
"MIT"
] | null | null | null | src/ezdxf/tools/pattern.py | hh-wu/ezdxf | 62509ba39b826ee9b36f19c0a5abad7f3518186a | [
"MIT"
] | null | null | null | # Purpose: Standard definitions
# Created: 08.07.2015
# Copyright (c) 2015-2020, Manfred Moitzi
# License: MIT License
# pattern type: predefined (1)
from ezdxf.math import Vec2
PATTERN_NEW = {
"ANSI31": [[45.0, (0.0, 0.0), (-2.2627, 2.2627), []]],
"ANSI32": [
[45.0, (0.0, 0.0), (-6.7882, 6.7882), []],
[45.0, (4.5255, 0.0), (-6.7882, 6.7882), []],
],
"ANSI33": [
[45.0, (512.0, 0.0), (-4.5255, 4.5255), []],
[45.0, (516.5255, 0.0), (-4.5255, 4.5255), [3.2, -1.6]],
],
"ANSI34": [
[45.0, (0.0, 0.0), (-13.5765, 13.5765), []],
[45.0, (4.5255, 0.0), (-13.5765, 13.5765), []],
[45.0, (9.051, 0.0), (-13.5765, 13.5765), []],
[45.0, (13.5765, 0.0), (-13.5765, 13.5765), []],
],
"ANSI35": [
[45.0, (-1024.0, -256.0), (-4.5255, 4.5255), []],
[45.0, (-1019.4745, -256.0), (-4.5255, 4.5255), [8.0, -1.6, 0.0, -1.6]],
],
"ANSI36": [[45.0, (-1024.0, -256.0), (1.6971, 6.2225), [8.0, -1.6, 0.0, -1.6]]],
"ANSI37": [
[45.0, (0.0, 0.0), (-2.2627, 2.2627), []],
[135.0, (0.0, 0.0), (-2.2627, -2.2627), []],
],
"ANSI38": [
[45.0, (0.0, 0.0), (-2.2627, 2.2627), []],
[135.0, (0.0, 0.0), (-6.7882, 2.2627), [8.0, -4.8]],
],
"ACAD_ISO02W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68]]],
"ACAD_ISO03W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -46.08]]],
"ACAD_ISO04W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 1.28, -7.68]]],
"ACAD_ISO05W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 1.28, -7.68, 1.28, -7.68]]
],
"ACAD_ISO06W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 1.28, -7.68, 1.28, -16.64]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-87.04, 1.28, -7.68]],
],
"ACAD_ISO07W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [1.28, -7.68]]],
"ACAD_ISO08W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 15.36, -7.68]]],
"ACAD_ISO09W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [61.44, -7.68, 15.36, -7.68, 15.36, -7.68]]
],
"ACAD_ISO10W100": [[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 1.28, -7.68]]],
"ACAD_ISO11W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 30.72, -7.68, 1.28, -7.68]]
],
"ACAD_ISO12W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 1.28, -7.68, 1.28, -7.68]]
],
"ACAD_ISO13W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 30.72, -7.68, 1.28, -16.64]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-85.76, 1.28, -7.68]],
],
"ACAD_ISO14W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 1.28, -7.68, 1.28, -16.64]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-56.32, 1.28, -7.68]],
],
"ACAD_ISO15W100": [
[0.0, (0.0, 0.0), (0.0, 12.8), [30.72, -7.68, 30.72, -7.68, 1.28, -25.6]],
[0.0, (0.0, 0.0), (0.0, 12.8), [-85.76, 1.28, -7.68, 1.28, -7.68]],
],
"ANGLE": [
[0.0, (0.0, 0.0), (0.0, 7.04), [5.12, -1.92]],
[90.0, (0.0, 0.0), (-7.04, 0.0), [5.12, -1.92]],
],
"AR-B816": [
[0.0, (0.0, 0.0), (0.0, 20.48), []],
[90.0, (0.0, 0.0), (-20.48, 20.48), [20.48, -20.48]],
],
"AR-B816C": [
[0.0, (0.0, 0.0), (20.48, 20.48), [40.0, -0.96]],
[0.0, (-20.48, 0.96), (20.48, 20.48), [40.0, -0.96]],
[90.0, (0.0, 0.0), (-20.48, 20.48), [-21.44, 19.52]],
[90.0, (-0.96, 0.0), (-20.48, 20.48), [-21.44, 19.52]],
],
"AR-B88": [
[0.0, (0.0, 0.0), (0.0, 20.48), []],
[90.0, (0.0, 0.0), (-10.24, 20.48), [20.48, -20.48]],
],
"AR-BRELM": [
[0.0, (0.0, 0.0), (0.0, 68.2752), [97.6, -4.8]],
[0.0, (0.0, 28.8), (0.0, 68.2752), [97.6, -4.8]],
[0.0, (25.6, 34.1376), (0.0, 68.2752), [46.4, -4.8]],
[0.0, (25.6, 62.9376), (0.0, 68.2752), [46.4, -4.8]],
[90.0, (0.0, 0.0), (-102.4, 0.0), [28.8, -39.4752]],
[90.0, (-4.8, 0.0), (-102.4, 0.0), [28.8, -39.4752]],
[90.0, (25.6, 34.1376), (-51.2, 0.0), [28.8, -39.4752]],
[90.0, (20.8, 34.1376), (-51.2, 0.0), [28.8, -39.4752]],
],
"AR-BRSTD": [
[0.0, (0.0, 0.0), (0.0, 68.2752), []],
[90.0, (0.0, 0.0), (-102.4, 68.2752), [68.2752, -68.2752]],
],
"AR-CONC": [
[50.0, (0.0, 0.0), (36.7237, -3.2129), [3.84, -42.24]],
[355.0, (0.0, 0.0), (-7.1041, 38.5122), [3.072, -33.792]],
[100.4514, (3.0603, -0.2677), (29.6197, 35.2993), [3.2635, -35.8985]],
[46.1842, (0.0, 10.24), (54.6428, -8.4746), [5.76, -63.36]],
[96.6356, (4.5536, 9.5338), (47.8547, 49.8749), [4.8952, -53.8477]],
[351.1842, (0.0, 10.24), (47.8547, 49.8749), [4.608, -50.688]],
[21.0, (5.12, 7.68), (30.5616, -20.6141), [3.84, -42.24]],
[326.0, (5.12, 7.68), (12.4577, 37.1277), [3.072, -33.792]],
[71.4514, (7.6668, 5.9622), (43.0194, 16.5136), [3.2635, -35.8985]],
[
37.5,
(0.0, 0.0),
(0.6226, 17.0442),
[0.0, -33.3824, 0.0, -34.304, 0.0, -33.92],
],
[
7.5,
(0.0, 0.0),
(13.4692, 20.1939),
[0.0, -19.5584, 0.0, -32.6144, 0.0, -12.928],
],
[
327.5,
(-11.4176, 0.0),
(27.3317, -1.1548),
[0.0, -12.8, 0.0, -39.936, 0.0, -52.992],
],
[
317.5,
(-16.5376, 0.0),
(29.8591, 5.1254),
[0.0, -16.64, 0.0, -26.5216, 0.0, -37.632],
],
],
"AR-HBONE": [
[45.0, (0.0, 0.0), (0.0, 28.9631), [61.44, -20.48]],
[135.0, (14.4815, 14.4815), (0.0, 28.9631), [61.44, -20.48]],
],
"AR-PARQ1": [
[90.0, (0.0, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (10.24, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (20.48, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (30.72, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (40.96, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (51.2, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[90.0, (61.44, 0.0), (-61.44, 61.44), [61.44, -61.44]],
[0.0, (0.0, 61.44), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 71.68), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 81.92), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 92.16), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 102.4), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 112.64), (61.44, -61.44), [61.44, -61.44]],
[0.0, (0.0, 122.88), (61.44, -61.44), [61.44, -61.44]],
],
"AR-RROOF": [
[0.0, (0.0, 0.0), (56.32, 25.6), [384.0, -51.2, 128.0, -25.6]],
[0.0, (34.048, 12.8), (-25.6, 34.048), [76.8, -8.448, 153.6, -19.2]],
[0.0, (12.8, 21.76), (133.12, 17.152), [204.8, -35.84, 102.4, -25.6]],
],
"AR-RSHKE": [
[0.0, (0.0, 0.0), (65.28, 30.72), [15.36, -12.8, 17.92, -7.68, 23.04, -10.24]],
[0.0, (15.36, 1.28), (65.28, 30.72), [12.8, -48.64, 10.24, -15.36]],
[0.0, (46.08, -1.92), (65.28, 30.72), [7.68, -79.36]],
[90.0, (0.0, 0.0), (-21.76, 30.72), [29.44, -93.44]],
[90.0, (15.36, 0.0), (-21.76, 30.72), [28.8, -94.08]],
[90.0, (28.16, 0.0), (-21.76, 30.72), [26.88, -96.0]],
[90.0, (46.08, -1.92), (-21.76, 30.72), [29.44, -93.44]],
[90.0, (53.76, -1.92), (-21.76, 30.72), [29.44, -93.44]],
[90.0, (76.8, 0.0), (-21.76, 30.72), [28.16, -94.72]],
],
"AR-SAND": [
[37.5, (0.0, 0.0), (-1.6126, 49.3267), [0.0, -38.912, 0.0, -43.52, 0.0, -41.6]],
[
7.5,
(0.0, 0.0),
(45.3063, 72.2469),
[0.0, -20.992, 0.0, -35.072, 0.0, -13.44],
],
[
327.5,
(-31.488, 0.0),
(79.722, 0.1449),
[0.0, -12.8, 0.0, -46.08, 0.0, -60.16],
],
[
317.5,
(-31.488, 0.0),
(76.9568, 22.4685),
[0.0, -6.4, 0.0, -30.208, 0.0, -34.56],
],
],
"BOX": [
[90.0, (0.0, 0.0), (-25.6, 0.0), []],
[90.0, (6.4, 0.0), (-25.6, 0.0), []],
[0.0, (0.0, 0.0), (0.0, 25.6), [-6.4, 6.4]],
[0.0, (0.0, 6.4), (0.0, 25.6), [-6.4, 6.4]],
[0.0, (0.0, 12.8), (0.0, 25.6), [6.4, -6.4]],
[0.0, (0.0, 19.2), (0.0, 25.6), [6.4, -6.4]],
[90.0, (12.8, 0.0), (-25.6, 0.0), [6.4, -6.4]],
[90.0, (19.2, 0.0), (-25.6, 0.0), [6.4, -6.4]],
],
"BRASS": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 3.2), (0.0, 6.4), [3.2, -1.6]],
],
"BRICK": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[90.0, (0.0, 0.0), (-12.8, 0.0), [6.4, -6.4]],
[90.0, (6.4, 0.0), (-12.8, 0.0), [-6.4, 6.4]],
],
"BRSTONE": [
[0.0, (0.0, 0.0), (0.0, 8.448), []],
[90.0, (23.04, 0.0), (-12.8, 8.448), [8.448, -8.448]],
[90.0, (20.48, 0.0), (-12.8, 8.448), [8.448, -8.448]],
[0.0, (23.04, 1.408), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 2.816), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 4.224), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 5.632), (12.8, 8.448), [-23.04, 2.56]],
[0.0, (23.04, 7.04), (12.8, 8.448), [-23.04, 2.56]],
],
"CLAY": [
[0.0, (0.0, 0.0), (0.0, 4.8), []],
[0.0, (0.0, 0.8), (0.0, 4.8), []],
[0.0, (0.0, 1.6), (0.0, 4.8), []],
[0.0, (0.0, 3.2), (0.0, 4.8), [4.8, -3.2]],
],
"CORK": [
[0.0, (0.0, 0.0), (0.0, 3.2), []],
[135.0, (1.6, -1.6), (-6.4, -6.4), [4.5255, -4.5255]],
[135.0, (2.4, -1.6), (-6.4, -6.4), [4.5255, -4.5255]],
[135.0, (3.2, -1.6), (-6.4, -6.4), [4.5255, -4.5255]],
],
"CROSS": [
[0.0, (0.0, 0.0), (6.4, 6.4), [3.2, -9.6]],
[90.0, (1.6, -1.6), (-6.4, 6.4), [3.2, -9.6]],
],
"DASH": [[0.0, (0.0, 0.0), (3.2, 3.2), [3.2, -3.2]]],
"DOLMIT": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[45.0, (0.0, 0.0), (-12.8, 12.8), [9.051, -18.1019]],
],
"DOTS": [[0.0, (0.0, 0.0), (0.8, 1.6), [0.0, -1.6]]],
"EARTH": [
[0.0, (0.0, 0.0), (6.4, 6.4), [6.4, -6.4]],
[0.0, (0.0, 2.4), (6.4, 6.4), [6.4, -6.4]],
[0.0, (0.0, 4.8), (6.4, 6.4), [6.4, -6.4]],
[90.0, (0.8, 5.6), (-6.4, 6.4), [6.4, -6.4]],
[90.0, (3.2, 5.6), (-6.4, 6.4), [6.4, -6.4]],
[90.0, (5.6, 5.6), (-6.4, 6.4), [6.4, -6.4]],
],
"ESCHER": [
[60.0, (0.0, 0.0), (-30.72, -0.0), [28.16, -2.56]],
[180.0, (0.0, 0.0), (15.36, -26.6043), [28.16, -2.56]],
[300.0, (0.0, 0.0), (30.72, -0.0), [28.16, -2.56]],
[60.0, (2.56, 0.0), (-30.72, -0.0), [5.12, -25.6]],
[300.0, (2.56, 0.0), (30.72, -0.0), [5.12, -25.6]],
[60.0, (-1.28, 2.217), (-30.72, -0.0), [5.12, -25.6]],
[180.0, (-1.28, 2.217), (15.36, -26.6043), [5.12, -25.6]],
[300.0, (-1.28, -2.217), (30.72, -0.0), [5.12, -25.6]],
[180.0, (-1.28, -2.217), (15.36, -26.6043), [5.12, -25.6]],
[60.0, (-10.24, 0.0), (-30.72, -0.0), [5.12, -25.6]],
[300.0, (-10.24, 0.0), (30.72, -0.0), [5.12, -25.6]],
[60.0, (5.12, -8.8681), (-30.72, -0.0), [5.12, -25.6]],
[180.0, (5.12, -8.8681), (15.36, -26.6043), [5.12, -25.6]],
[300.0, (5.12, 8.8681), (30.72, -0.0), [5.12, -25.6]],
[180.0, (5.12, 8.8681), (15.36, -26.6043), [5.12, -25.6]],
[0.0, (5.12, 4.4341), (-15.36, 26.6043), [17.92, -12.8]],
[0.0, (5.12, -4.4341), (-15.36, 26.6043), [17.92, -12.8]],
[120.0, (1.28, 6.6511), (-30.72, 0.0), [17.92, -12.8]],
[120.0, (-6.4, 2.217), (-30.72, 0.0), [17.92, -12.8]],
[240.0, (-6.4, -2.217), (15.36, -26.6043), [17.92, -12.8]],
[240.0, (1.28, -6.6511), (15.36, -26.6043), [17.92, -12.8]],
],
"FLEX": [
[0.0, (0.0, 0.0), (0.0, 6.4), [6.4, -6.4]],
[45.0, (6.4, 0.0), (0.0, 6.4), [1.6, -5.851, 1.6, -9.051]],
],
"GOST_GLASS": [
[45.0, (0.0, 0.0), (21.7223, -0.0), [12.8, -17.92]],
[45.0, (5.4306, 0.0), (21.7223, -0.0), [5.12, -25.6]],
[45.0, (0.0, 5.4306), (21.7223, -0.0), [5.12, -25.6]],
],
"GOST_WOOD": [
[90.0, (0.0, 0.0), (30.72, -0.0), [51.2, -10.24]],
[90.0, (10.24, -10.24), (30.72, -0.0), [30.72, -7.68, 15.36, -7.68]],
[90.0, (20.48, -25.6), (30.72, -0.0), [51.2, -10.24]],
],
"GOST_GROUND": [
[45.0, (0.0, 0.0), (72.4077, -0.0), [102.4]],
[45.0, (15.36, 0.0), (72.4077, -0.0), [102.4]],
[45.0, (30.72, 0.0), (72.4077, -0.0), [102.4]],
],
"GRASS": [
[90.0, (0.0, 0.0), (-18.1019, 18.1019), [4.8, -31.4039]],
[45.0, (0.0, 0.0), (-18.1019, 18.1019), [4.8, -20.8]],
[135.0, (0.0, 0.0), (-18.1019, -18.1019), [4.8, -20.8]],
],
"GRATE": [[0.0, (0.0, 0.0), (0.0, 0.8), []], [90.0, (0.0, 0.0), (-3.2, 0.0), []]],
"GRAVEL": [
[228.0128, (18.432, 25.6), (-204.8, -230.4), [3.4441, -340.9687]],
[184.9697, (16.128, 23.04), (307.2, 25.6), [5.9102, -585.1117]],
[132.5104, (10.24, 22.528), (256.0, -281.6), [4.1674, -412.5704]],
[267.2737, (0.256, 16.128), (25.6, 512.0), [5.3821, -532.8271]],
[292.8337, (0.0, 10.752), (-128.0, 307.2), [5.2776, -522.48]],
[357.2737, (2.048, 5.888), (-512.0, 25.6), [5.3821, -532.8271]],
[37.6942, (7.424, 5.632), (-332.8, -256.0), [7.1175, -704.6361]],
[72.2553, (13.056, 9.984), (179.2, 563.2), [6.7197, -665.2498]],
[121.4296, (15.104, 16.384), (-204.8, 332.8), [5.4003, -534.6323]],
[175.2364, (12.288, 20.992), (281.6, -25.6), [6.1653, -302.0995]],
[222.3974, (6.144, 21.504), (-307.2, -281.6), [7.9731, -789.3344]],
[138.8141, (25.6, 15.872), (-179.2, 153.6), [2.7213, -269.4104]],
[171.4692, (23.552, 17.664), (332.8, -51.2), [5.1773, -512.5507]],
[225.0, (18.432, 18.432), (-0.0, -25.6), [3.6204, -32.5835]],
[203.1986, (16.64, 21.504), (128.0, 51.2), [1.9496, -193.0141]],
[291.8014, (14.848, 20.736), (-25.6, 76.8), [2.7572, -135.103]],
[30.9638, (15.872, 18.176), (76.8, 51.2), [4.4782, -144.7942]],
[161.5651, (19.712, 20.48), (51.2, -25.6), [3.2382, -77.7161]],
[16.3895, (0.0, 20.736), (256.0, 76.8), [4.5363, -449.0968]],
[70.3462, (4.352, 22.016), (-102.4, -281.6), [3.8057, -376.7656]],
[293.1986, (19.712, 25.6), (-51.2, 128.0), [3.8993, -191.0645]],
[343.6105, (21.248, 22.016), (-256.0, 76.8), [4.5363, -449.0968]],
[339.444, (0.0, 4.864), (-128.0, 51.2), [4.3745, -214.352]],
[294.7751, (4.096, 3.328), (-128.0, 281.6), [3.6654, -362.8709]],
[66.8014, (19.968, 0.0), (51.2, 128.0), [3.8993, -191.0645]],
[17.354, (21.504, 3.584), (-332.8, -102.4), [4.2914, -424.8428]],
[69.444, (7.424, 0.0), (-51.2, -128.0), [2.1873, -216.5392]],
[101.3099, (18.432, 0.0), (-25.6, 102.4), [1.3053, -129.2296]],
[165.9638, (18.176, 1.28), (76.8, -25.6), [5.2776, -100.2739]],
[186.009, (13.056, 2.56), (256.0, 25.6), [4.8909, -484.1964]],
[303.6901, (15.872, 15.872), (-25.6, 51.2), [3.6921, -88.61]],
[353.1572, (17.92, 12.8), (435.2, -51.2), [6.4459, -638.1456]],
[60.9454, (24.32, 12.032), (-102.4, -179.2), [2.6357, -260.9325]],
[90.0, (25.6, 14.336), (-25.6, 25.6), [1.536, -24.064]],
[120.2564, (12.544, 3.328), (102.4, -179.2), [3.5565, -352.0901]],
[48.0128, (10.752, 6.4), (204.8, 230.4), [6.8882, -337.5245]],
[0.0, (15.36, 11.52), (25.6, 25.6), [6.656, -18.944]],
[325.3048, (22.016, 11.52), (256.0, -179.2), [4.0477, -400.7238]],
[254.0546, (25.344, 9.216), (-25.6, -102.4), [3.7274, -182.6434]],
[207.646, (24.32, 5.632), (-486.4, -256.0), [6.0689, -600.8185]],
[175.4261, (18.944, 2.816), (-332.8, 25.6), [6.4205, -635.6243]],
],
"HEX": [
[0.0, (0.0, 0.0), (0.0, 5.5426), [3.2, -6.4]],
[120.0, (0.0, 0.0), (-4.8, -2.7713), [3.2, -6.4]],
[60.0, (3.2, 0.0), (-4.8, 2.7713), [3.2, -6.4]],
],
"HONEY": [
[0.0, (0.0, 0.0), (4.8, 2.7713), [3.2, -6.4]],
[120.0, (0.0, 0.0), (-4.8, 2.7713), [3.2, -6.4]],
[60.0, (0.0, 0.0), (0.0, 5.5426), [-6.4, 3.2]],
],
"HOUND": [
[0.0, (0.0, 0.0), (6.4, 1.6), [25.6, -12.8]],
[90.0, (0.0, 0.0), (-1.6, -6.4), [25.6, -12.8]],
],
"INSUL": [
[0.0, (0.0, 0.0), (0.0, 9.6), []],
[0.0, (0.0, 3.2), (0.0, 9.6), [3.2, -3.2]],
[0.0, (0.0, 6.4), (0.0, 9.6), [3.2, -3.2]],
],
"LINE": [[0.0, (0.0, 0.0), (0.0, 3.2), []]],
"MUDST": [[0.0, (0.0, 0.0), (12.8, 6.4), [6.4, -6.4, 0.0, -6.4, 0.0, -6.4]]],
"NET": [[0.0, (0.0, 0.0), (0.0, 3.2), []], [90.0, (0.0, 0.0), (-3.2, 0.0), []]],
"NET3": [
[0.0, (0.0, 0.0), (0.0, 3.2), []],
[60.0, (0.0, 0.0), (-2.7713, 1.6), []],
[120.0, (0.0, 0.0), (-2.7713, -1.6), []],
],
"PLAST": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 0.8), (0.0, 6.4), []],
[0.0, (0.0, 1.6), (0.0, 6.4), []],
],
"PLASTI": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 0.8), (0.0, 6.4), []],
[0.0, (0.0, 1.6), (0.0, 6.4), []],
[0.0, (0.0, 4.0), (0.0, 6.4), []],
],
"SACNCR": [
[45.0, (0.0, 0.0), (-1.6971, 1.6971), []],
[45.0, (1.6971, 0.0), (-1.6971, 1.6971), [0.0, -2.4]],
],
"SQUARE": [
[0.0, (0.0, 0.0), (0.0, 3.2), [3.2, -3.2]],
[90.0, (0.0, 0.0), (-3.2, 0.0), [3.2, -3.2]],
],
"STARS": [
[0.0, (0.0, 0.0), (0.0, 5.5426), [3.2, -3.2]],
[60.0, (0.0, 0.0), (-4.8, 2.7713), [3.2, -3.2]],
[120.0, (1.6, 2.7713), (-4.8, -2.7713), [3.2, -3.2]],
],
"STEEL": [
[45.0, (0.0, 0.0), (-2.2627, 2.2627), []],
[45.0, (0.0, 1.6), (-2.2627, 2.2627), []],
],
"SWAMP": [
[0.0, (0.0, 0.0), (12.8, 22.1703), [3.2, -22.4]],
[90.0, (1.6, 0.0), (-12.8, 22.1703), [1.6, -42.7405]],
[90.0, (2.0, 0.0), (-12.8, 22.1703), [1.28, -43.0605]],
[90.0, (1.2, 0.0), (-12.8, 22.1703), [1.28, -43.0605]],
[60.0, (2.4, 0.0), (-12.8, 22.1703), [1.024, -24.576]],
[120.0, (0.8, 0.0), (-25.6, 0.0), [1.024, -24.576]],
],
"TRANS": [
[0.0, (0.0, 0.0), (0.0, 6.4), []],
[0.0, (0.0, 3.2), (0.0, 6.4), [3.2, -3.2]],
],
"TRIANG": [
[60.0, (0.0, 0.0), (-4.8, 8.3138), [4.8, -4.8]],
[120.0, (0.0, 0.0), (-9.6, 0.0), [4.8, -4.8]],
[0.0, (-2.4, 4.1569), (4.8, 8.3138), [4.8, -4.8]],
],
"ZIGZAG": [
[0.0, (0.0, 0.0), (3.2, 3.2), [3.2, -3.2]],
[90.0, (3.2, 0.0), (-3.2, 3.2), [3.2, -3.2]],
],
}
def load(old_pattern=None):
from ezdxf.options import options
if old_pattern is not None:
use_old = bool(old_pattern)
options.use_old_predefined_pattern_scaling = use_old
else:
use_old = options.use_old_predefined_pattern_scaling
return PATTERN_OLD if use_old else PATTERN_NEW
def scale_pattern(pattern, factor: float = 1, angle: float = 0, ndigits: int = 4):
def _scale(iterable):
return [round(i * factor, ndigits) for i in iterable]
def _scale_line(line):
angle0, base_point, offset, dash_length_items = line
if angle:
base_point = Vec2(base_point).rotate_deg(angle)
offset = Vec2(offset).rotate_deg(angle)
angle0 = (angle0 + angle) % 360.0
return [
round(angle0, ndigits),
tuple(_scale(base_point)),
tuple(_scale(offset)),
_scale(dash_length_items)
]
return [_scale_line(line) for line in pattern]
def scale_all(pattern: dict, factor: float = 1, angle: float = 0, ndigits: int = 4):
return {name: scale_pattern(p, factor, angle, ndigits) for name, p in pattern.items()}
PATTERN_OLD = scale_all(PATTERN_NEW, factor=0.03906836964688205)
| 43.299349 | 90 | 0.38475 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 816 | 0.04088 |
92460118208d7c2379de4fa4f0cbfe2ad3d68845 | 223 | py | Python | Cogs/Nullify.py | TheMasterGhost/CorpBot | 3133d5b7fdfef09ac4b75fb42d91628b86d94ac0 | [
"MIT"
] | null | null | null | Cogs/Nullify.py | TheMasterGhost/CorpBot | 3133d5b7fdfef09ac4b75fb42d91628b86d94ac0 | [
"MIT"
] | null | null | null | Cogs/Nullify.py | TheMasterGhost/CorpBot | 3133d5b7fdfef09ac4b75fb42d91628b86d94ac0 | [
"MIT"
] | null | null | null | def clean(string):
# A helper script to strip out @here and @everyone mentions
zerospace = ""
return string.replace("@everyone", "@{}everyone".format(zerospace)).replace("@here", "@{}here".format(zerospace)) | 55.75 | 117 | 0.668161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 105 | 0.466667 |
9246db381d9f89a88ccf848b69e719ab81150613 | 1,763 | py | Python | src/grab_stations.py | davis68/capstone-python-intermediate-forecast | 4f120fcb3389b84227a40ec572dd0042d00a736b | [
"CC-BY-4.0",
"MIT"
] | null | null | null | src/grab_stations.py | davis68/capstone-python-intermediate-forecast | 4f120fcb3389b84227a40ec572dd0042d00a736b | [
"CC-BY-4.0",
"MIT"
] | null | null | null | src/grab_stations.py | davis68/capstone-python-intermediate-forecast | 4f120fcb3389b84227a40ec572dd0042d00a736b | [
"CC-BY-4.0",
"MIT"
] | null | null | null | #!/usr/bin/env python3.3
import requests
def grab_website_data():
'''Get raw data as HTML string from the NOAA website.'''
url = 'http://www.nws.noaa.gov/mdl/gfslamp/docs/stations_info.shtml'
page = requests.get(url)
return page.text
def extract_section(text):
'''Find Illinois data segment (in a PRE tag).
We know (from examination) that inside of the PRE block containing ' IL '
(with whitespace and case matching) we can find the IL station data.
This solution isn't robust, but it's good enough for practical cases.'''
il_start = text.find(' IL ')
tag_start = text.rfind('PRE', il_start-200, il_start) # look backwards
tag_end = text.find('PRE', il_start)
return text[tag_start+4:tag_end-2]
def parse_station_line(line):
'''Extract latitude and longitude of stations. We know the columns are fixed
(which is both inconvenient and convenient). In this case, we will simply
set the limits of the relevant columns by counting the number of columns
over we need to go.'''
#print(line)
r_stn = (5, 9) #remember that the last index is an exclusive bound
r_name = (10, 31)
r_lat = (36, 41) #we don't need the N/W designation; we know where we are
r_lon = (46, 51)
stn = line[r_stn[0]:r_stn[1]]
name = line[r_name[0]:r_name[1]]
lat = float(line[r_lat[0]:r_lat[1]])
lon = -float(line[r_lon[0]:r_lon[1]])
return stn, lat, lon
if __name__ == '__main__':
text = grab_website_data()
data = extract_section(text)
for line in data.splitlines():
try:
stn, lat, lon = parse_station_line(line)
print('%s\t%f\t%f'%(stn,lon,lat))
except:
pass
#print('Could not parse line\n\t%s'%line)
| 38.326087 | 80 | 0.650028 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 887 | 0.50312 |
924898f498fe7a8a90741bbddcf129930d8751be | 11,617 | py | Python | tarpn/netrom/router.py | rxt1077/tarpn-node-controller | ffbe1d78fbd1c10e891b3339b50002e5233e21ad | [
"MIT"
] | null | null | null | tarpn/netrom/router.py | rxt1077/tarpn-node-controller | ffbe1d78fbd1c10e891b3339b50002e5233e21ad | [
"MIT"
] | null | null | null | tarpn/netrom/router.py | rxt1077/tarpn-node-controller | ffbe1d78fbd1c10e891b3339b50002e5233e21ad | [
"MIT"
] | null | null | null | import datetime
import os
from dataclasses import dataclass, field
from operator import attrgetter
from typing import List, Dict, Optional, cast, Set
from tarpn.ax25 import AX25Call
from tarpn.netrom import NetRomPacket, NetRomNodes, NodeDestination
from tarpn.network import L3RoutingTable, L3Address
import tarpn.network.netrom_l3 as l3
from tarpn.util import json_dump, json_load
@dataclass
class Neighbor:
call: AX25Call
port: int
quality: int
def __hash__(self):
return hash(self.call)
def to_safe_dict(self):
return {
"call": str(self.call),
"port": self.port,
"quality": self.quality
}
@classmethod
def from_safe_dict(cls, d):
return cls(call=AX25Call.parse(d["call"]), port=d["port"], quality=d["quality"])
@dataclass
class Route:
neighbor: AX25Call
dest: AX25Call
next_hop: AX25Call
quality: int
obsolescence: int
def to_safe_dict(self):
return {
"neighbor": str(self.neighbor),
"destination": str(self.dest),
"next_hop": str(self.next_hop),
"quality": self.quality,
"obsolescence": self.obsolescence
}
@classmethod
def from_safe_dict(cls, d):
return cls(neighbor=AX25Call.parse(d["neighbor"]), dest=AX25Call.parse(d["destination"]),
next_hop=AX25Call.parse(d["next_hop"]), quality=d["quality"], obsolescence=d["obsolescence"])
def __hash__(self):
return hash((self.neighbor, self.dest))
@dataclass
class Destination:
node_call: AX25Call
node_alias: str
neighbor_map: Dict[str, Route] = field(default_factory=dict, compare=False, hash=False)
freeze: bool = False
def __hash__(self):
return hash((self.node_call, self.node_alias))
def to_safe_dict(self):
return {
"call": str(self.node_call),
"alias": self.node_alias,
"freeze": self.freeze,
"routes": [route.to_safe_dict() for route in self.neighbor_map.values()]
}
@classmethod
def from_safe_dict(cls, d):
instance = cls(node_call=AX25Call.parse(d["call"]), node_alias=d["alias"], freeze=d["freeze"])
instance.neighbor_map = {
route_dict["neighbor"]: Route.from_safe_dict(route_dict) for route_dict in d["routes"]
}
return instance
def sorted_neighbors(self):
return sorted(self.neighbor_map.values(), key=attrgetter("quality"), reverse=True)
@dataclass
class NetRomRoutingTable(L3RoutingTable):
node_alias: str
updated_at: datetime.datetime = field(default_factory=datetime.datetime.now)
our_calls: Set[AX25Call] = field(default_factory=set, compare=False, hash=False)
# Neighbors is a map of direct neighbors we have, i.e., who we have heard NODES from
neighbors: Dict[str, Neighbor] = field(default_factory=dict, compare=False, hash=False)
# Destinations is the content of the NODES table, what routes exist to other nodes through which neighbors
destinations: Dict[str, Destination] = field(default_factory=dict, compare=False, hash=False)
# TODO config all these
default_obs: int = 100
default_quality: int = 255
min_quality: int = 50
min_obs: int = 4
def __repr__(self):
s = "Neighbors:\n"
for neighbor in self.neighbors.values():
s += f"\t{neighbor}\n"
s += "Destinations:\n"
for dest in self.destinations.values():
s += f"\t{dest}\n"
return s.strip()
def __hash__(self):
return hash((self.node_alias, self.updated_at))
def save(self, filename: str):
d = {
"node_alias": self.node_alias,
"updated_at": self.updated_at.isoformat(),
"our_calls": [str(call) for call in self.our_calls],
"neighbors": [n.to_safe_dict() for n in self.neighbors.values()],
"destinations": [d.to_safe_dict() for d in self.destinations.values()]
}
json_dump(filename, d)
@classmethod
def load(cls, filename: str, node_alias: str):
if not os.path.exists(filename):
return NetRomRoutingTable(node_alias=node_alias, updated_at=datetime.datetime.now())
d = json_load(filename)
return NetRomRoutingTable(node_alias=d["node_alias"],
updated_at=datetime.datetime.fromisoformat(d["updated_at"]),
our_calls={AX25Call.parse(call) for call in d["our_calls"]},
neighbors={n_dict["call"]: Neighbor.from_safe_dict(n_dict) for n_dict in d["neighbors"]},
destinations={d_dict["call"]: Destination.from_safe_dict(d_dict) for d_dict in d["destinations"]})
def route(self, packet: NetRomPacket) -> List[AX25Call]:
"""
If a packet's destination is a known neighbor, route to it. Otherwise look up the route with the highest
quality and send the packet to the neighbor which provided that route
:param packet:
:return: list of neighbor callsign's in sorted order of route quality
"""
if packet.dest in self.neighbors:
return [packet.dest]
else:
dest = self.destinations.get(str(packet.dest))
if dest:
return [n.neighbor for n in dest.sorted_neighbors()]
else:
return []
def route1(self, destination: L3Address) -> Optional[int]:
if not isinstance(destination, l3.NetRomAddress):
print(f"Wrong address family, expected NET/ROM got {destination.__class__}")
return None
netrom_dest = cast(l3.NetRomAddress, destination)
packet_dest = AX25Call(netrom_dest.callsign, netrom_dest.ssid)
# TODO handle alias here
if packet_dest in self.neighbors:
return self.neighbors.get(str(packet_dest)).port
else:
dest = self.destinations.get(str(packet_dest))
if dest:
neighbors = dest.sorted_neighbors()
if len(neighbors) > 0:
return self.neighbors.get(str(neighbors[0].neighbor)).port
else:
return None
else:
return None
def listen_for_address(self, app_call: AX25Call, app_alias: str):
app_routes = {}
for our_call in self.our_calls:
app_routes[str(our_call)] = Route(our_call, app_call, our_call, 95, 100)
self.destinations[str(app_call)] = Destination(app_call, app_alias, app_routes, True)
def refresh_route(self, heard_from: str, node: str):
"""
Refresh the obsolescence for a route
"""
if node in self.destinations:
route = self.destinations[node].neighbor_map.get(heard_from)
if route is not None:
route.obsolescence = self.default_obs
else:
print(f"Cannot refresh route to {node} via {heard_from}. {heard_from} is not in our neighbor map.")
else:
print(f"Cannot refresh route to {node}. It is not in our destination map.")
def update_routes(self, heard_from: AX25Call, heard_on_port: int, nodes: NetRomNodes):
"""
Update the routing table with a NODES broadcast.
This method is not thread-safe.
"""
# Get or create the neighbor and destination
neighbor = self.neighbors.get(str(heard_from), Neighbor(heard_from, heard_on_port, self.default_quality))
self.neighbors[str(heard_from)] = neighbor
# Add direct route to whoever sent the NODES
dest = self.destinations.get(str(heard_from), Destination(heard_from, nodes.sending_alias))
dest.neighbor_map[str(heard_from)] = Route(heard_from, heard_from, heard_from,
self.default_quality, self.default_obs)
self.destinations[str(heard_from)] = dest
for destination in nodes.destinations:
# Filter out ourselves
route_quality = 0
if destination.best_neighbor in self.our_calls:
# Best neighbor is us, this is a "trivial loop", quality is zero
continue
else:
# Otherwise compute this route's quality based on the NET/ROM spec
route_quality = (destination.quality * neighbor.quality + 128.) / 256.
# Only add routes which are above the minimum quality to begin with TODO check this logic
if route_quality > self.min_quality:
new_dest = self.destinations.get(str(destination.dest_node),
Destination(destination.dest_node, destination.dest_alias))
new_route = new_dest.neighbor_map.get(
str(neighbor.call), Route(neighbor.call, destination.dest_node, destination.best_neighbor,
int(route_quality), self.default_obs))
new_route.quality = route_quality
new_route.obsolescence = self.default_obs
new_dest.neighbor_map[str(neighbor.call)] = new_route
self.destinations[str(destination.dest_node)] = new_dest
else:
# print(f"Saw new route for {destination}, but quality was too low")
pass
self.updated_at = datetime.datetime.now()
def prune_routes(self) -> None:
"""
Prune any routes which we haven't heard about in a while.
This method is not thread-safe.
"""
# print("Pruning routes")
for call, destination in list(self.destinations.items()):
if destination.freeze:
# Don't prune frozen routes
continue
for neighbor, route in list(destination.neighbor_map.items()):
route.obsolescence -= 1
if route.obsolescence <= 0:
# print(f"Removing {neighbor} from {destination} neighbor's list")
del destination.neighbor_map[neighbor]
if len(destination.neighbor_map.keys()) == 0:
# print(f"No more routes to {call}, removing from routing table")
del self.destinations[call]
if call in self.neighbors.keys():
del self.neighbors[call]
self.updated_at = datetime.datetime.now()
def clear_routes(self) -> None:
self.destinations.clear()
self.neighbors.clear()
self.updated_at = datetime.datetime.now()
def get_nodes(self) -> NetRomNodes:
node_destinations = []
for destination in self.destinations.values():
# Otherwise find best neighbor route
best_neighbor = None
for neighbor in destination.sorted_neighbors():
if neighbor.obsolescence >= self.min_obs:
best_neighbor = neighbor
break
else:
# print(f"Not including {neighbor} in NODES, obsolescence below threshold")
pass
if best_neighbor:
node_destinations.append(NodeDestination(destination.node_call, destination.node_alias,
best_neighbor.next_hop, best_neighbor.quality))
else:
# print(f"No good neighbor was found for {destination}")
pass
return NetRomNodes(self.node_alias, node_destinations)
| 40.197232 | 132 | 0.608419 | 11,174 | 0.961866 | 0 | 0 | 11,218 | 0.965654 | 0 | 0 | 2,240 | 0.192821 |
924aac08eec6f6d80fd3a7f3fb5250d621f04816 | 162 | py | Python | tests/configuration/config.py | NHSDigital/karsten-ratelimit-test | f4eeccd999ef440d4a1ec4d0a71ffc187665877d | [
"MIT"
] | 1 | 2021-11-08T14:54:07.000Z | 2021-11-08T14:54:07.000Z | tests/configuration/config.py | NHSDigital/karsten-ratelimit-test | f4eeccd999ef440d4a1ec4d0a71ffc187665877d | [
"MIT"
] | 36 | 2020-06-24T10:55:10.000Z | 2022-02-11T10:11:07.000Z | tests/configuration/config.py | NHSDigital/public-data-catalogue | 7c385698a8db241cc9496a74bd41a1806bee2907 | [
"MIT"
] | 1 | 2021-04-11T07:32:37.000Z | 2021-04-11T07:32:37.000Z | from .environment import ENV
# Api Details
ENVIRONMENT = ENV["environment"]
BASE_URL = f"https://{ENVIRONMENT}.api.service.nhs.uk"
BASE_PATH = ENV["base_path"]
| 20.25 | 54 | 0.740741 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.493827 |
924ad870dca0b68ac369ce463d3a75f82ddca91b | 1,119 | py | Python | blacktape/pipeline.py | carascap/blacktape | 52e0b912f4c67899911d10d2d6e3770671db02fa | [
"MIT"
] | null | null | null | blacktape/pipeline.py | carascap/blacktape | 52e0b912f4c67899911d10d2d6e3770671db02fa | [
"MIT"
] | 1 | 2022-02-22T19:45:27.000Z | 2022-02-22T19:45:27.000Z | blacktape/pipeline.py | carascap/blacktape | 52e0b912f4c67899911d10d2d6e3770671db02fa | [
"MIT"
] | null | null | null | from concurrent.futures import ProcessPoolExecutor, as_completed
from typing import Iterable, Optional
from blacktape.lib import match_entities_in_text, match_pattern_in_text
from blacktape.util import worker_init
class Pipeline:
"""
Wrapper around ProcessPoolExecutor
"""
def __init__(self, spacy_model: Optional[str] = None):
self.spacy_model = spacy_model
self.executor = ProcessPoolExecutor(initializer=worker_init)
self.futures = []
def results(self):
for future in as_completed(self.futures):
yield future.result()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.executor.shutdown()
def submit_ner_job(self, text: str, entity_types: Optional[Iterable[str]] = None):
self.futures.append(
self.executor.submit(
match_entities_in_text, text, self.spacy_model, entity_types
)
)
def submit_regex_job(self, text: str, pattern: str):
self.futures.append(self.executor.submit(match_pattern_in_text, text, pattern))
| 30.243243 | 87 | 0.688114 | 901 | 0.805183 | 102 | 0.091153 | 0 | 0 | 0 | 0 | 50 | 0.044683 |
924b525f72949c8b2846500dfb435617c66b0ba8 | 4,881 | py | Python | opencda/customize/core/sensing/localization/extented_kalman_filter.py | xst-666/OpenCDA | d69069bb617c8197c5f254891be255fc46a47a3a | [
"MIT"
] | 1 | 2021-07-17T02:34:19.000Z | 2021-07-17T02:34:19.000Z | opencda/customize/core/sensing/localization/extented_kalman_filter.py | xst-666/OpenCDA | d69069bb617c8197c5f254891be255fc46a47a3a | [
"MIT"
] | null | null | null | opencda/customize/core/sensing/localization/extented_kalman_filter.py | xst-666/OpenCDA | d69069bb617c8197c5f254891be255fc46a47a3a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Use Extended Kalman Filter on GPS + IMU for better localization.
"""
# Author: Runsheng Xu <rxx3386@ucla.edu>
# License: MIT
import math
import numpy as np
class ExtentedKalmanFilter(object):
"""
Extended Kalman Filter implementation for gps and imu.
Parameters
-dt : float
The step time for kalman filter calculation.
Attributes
-Q : numpy.array
predict state covariance.
-R : numpy.array
Observation x,y position covariance.
-time_step : float
The step time for kalman filter calculation.
-xEst : numpy.array
Estimated x values.
-PEst : numpy.array
The estimated P values.
"""
def __init__(self, dt):
self.Q = np.diag([
0.2, # variance of location on x-axis
0.2, # variance of location on y-axis
np.deg2rad(0.1), # variance of yaw angle
0.001 # variance of velocity
]) ** 2 # predict state covariance
self.R = np.diag([0.5, 0.5, 0.2]) ** 2
self.time_step = dt
self.xEst = np.zeros((4, 1))
self.PEst = np.eye(4)
def motion_model(self, x, u):
"""
Predict current position and yaw based on previous result
(X = F * X_prev + B * u).
Args:
-x (np.array): [x_prev, y_prev, yaw_prev, v_prev], shape: (4, 1).
-u (np.array): [v_current, imu_yaw_rate], shape:(2, 1).
Returns:
x (np.array): predicted state.
"""
F = np.array([[1.0, 0, 0, 0],
[0, 1.0, 0, 0],
[0, 0, 1.0, 0],
[0, 0, 0, 0]])
B = np.array([[self.time_step * math.cos(x[2, 0]), 0],
[self.time_step * math.sin(x[2, 0]), 0],
[0.0, self.time_step],
[1.0, 0.0]])
x = F @ x + B @ u
return x
def jacob_f(self, x, u):
"""
Jacobian of Motion Model motion model
Args:
-x (np.array): Input X array.
Returns:
-jF (np.array): Jacobian of Motion Model motion model.
"""
yaw = x[2, 0]
v = u[0, 0]
jF = np.array([
[1.0, 0.0, -self.time_step * v * math.sin(yaw),
self.time_step * math.cos(yaw)],
[0.0, 1.0, self.time_step * v * math.cos(yaw),
self.time_step * math.sin(yaw)],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]])
return jF
def observation_model(self, x):
"""
Project the state.array to sensor measurement.array.
Args:
-x (np.array): [x, y, yaw, v], shape: (4. 1).
Returns:
-z (np.array): predicted measurement.
"""
H = np.array([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]
])
z = H @ x
return z
def run_step_init(self, x, y, heading, velocity):
"""
Initalization for states.
Args:
-x (float): The X coordinate.
-y (float): Tehe y coordinate.
-heading (float): The heading direction.
-velocity (float): The velocity.
"""
self.xEst[0] = x
self.xEst[1] = y
self.xEst[2] = heading
self.xEst[3] = velocity
def run_step(self, x, y, heading, velocity, yaw_rate_imu):
"""
Apply EKF on current measurement and previous prediction.
Args:
-x (float): x(esu) coordinate from
gnss sensor at current timestamp.
-y (float): y(esu) coordinate from
gnss sensor at current timestamp.
-heading (float): heading direction at current timestamp.
-velocity (float): current speed.
-yaw_rate_imu (float): yaw rate rad/s from IMU sensor.
Returns:
- xEST (np.array): The corrected x, y, heading,
and velocity information.
"""
# gps observation
z = np.array([x, y, heading]).reshape(3, 1)
# velocity and imu yaw rate
u = np.array([velocity, yaw_rate_imu]).reshape(2, 1)
# EKF starts
xPred = self.motion_model(self.xEst, u)
jF = self.jacob_f(self.xEst, u)
PPred = jF @ self.PEst @ jF.T + self.Q
# Jacobian of Observation Model
jH = np.array([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]])
zPred = self.observation_model(xPred)
y = z - zPred
S = jH @ PPred @ jH.T + self.R
K = PPred @ jH.T @ np.linalg.inv(S)
self.xEst = xPred + K @ y
self.PEst = (np.eye(len(self.xEst)) - K @ jH) @ PPred
return self.xEst[0][0], \
self.xEst[1][0], \
self.xEst[2][0], \
self.xEst[3][0]
| 27.576271 | 77 | 0.489039 | 4,693 | 0.961483 | 0 | 0 | 0 | 0 | 0 | 0 | 2,436 | 0.499078 |
924de0a0c74c357b33f982e135db16cb5de4bd6a | 546 | py | Python | BinarySearch/koko_eating_bananas.py | mishrakeshav/Competitive-Programming | b25dcfeec0fb9a9c71bf3a05644b619f4ca83dd2 | [
"MIT"
] | 2 | 2020-06-25T21:10:32.000Z | 2020-12-10T06:53:45.000Z | BinarySearch/koko_eating_bananas.py | mishrakeshav/Competitive-Programming | b25dcfeec0fb9a9c71bf3a05644b619f4ca83dd2 | [
"MIT"
] | null | null | null | BinarySearch/koko_eating_bananas.py | mishrakeshav/Competitive-Programming | b25dcfeec0fb9a9c71bf3a05644b619f4ca83dd2 | [
"MIT"
] | 3 | 2020-05-15T14:17:09.000Z | 2021-07-25T13:18:20.000Z | """
link : https://leetcode.com/problems/koko-eating-bananas/
"""
class Solution:
def minEatingSpeed(self, piles: List[int], h: int) -> int:
def condition(val):
hr = 0
for i in piles:
hr += (val + i - 1)//val
return hr <= h
left,right = 1,max(piles)
while left < right:
mid = (left + right)//2
if condition(mid):
right = mid
else:
left = mid + 1
return left
| 26 | 62 | 0.430403 | 470 | 0.860806 | 0 | 0 | 0 | 0 | 0 | 0 | 65 | 0.119048 |
924dffc77f794977e8214ff14f2ae22012c1fb17 | 6,729 | py | Python | src/rgw/const.py | suryakumar1024/cortx-rgw-integration | 765d0625b6f15153cbf2c39b7018dfee95b5a2bd | [
"Apache-2.0"
] | null | null | null | src/rgw/const.py | suryakumar1024/cortx-rgw-integration | 765d0625b6f15153cbf2c39b7018dfee95b5a2bd | [
"Apache-2.0"
] | null | null | null | src/rgw/const.py | suryakumar1024/cortx-rgw-integration | 765d0625b6f15153cbf2c39b7018dfee95b5a2bd | [
"Apache-2.0"
] | null | null | null | #!/bin/env python3
# Copyright (c) 2021 Seagate Technology LLC and/or its Affiliates
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# For any questions about this software or licensing,
# please email opensource@seagate.com or cortx-questions@seagate.com.
from enum import Enum
COMPONENT_NAME = 'rgw'
COMPONENT_SVC_NAME = 'rgw_s3'
DECRYPTION_KEY = 'cortx'
SERVICE_NAME = f'{COMPONENT_NAME}_setup' # rgw_setup
INSTALL_PATH = '/opt/seagate/cortx'
RGW_INSTALL_PATH = f'{INSTALL_PATH}/{COMPONENT_NAME}'
# TODO: Revisit after Motr delay issue resolved, seen while admin creation, for CORTX cluster with 15 nodes.
ADMIN_CREATION_TIMEOUT = 600
ADMIN_USER_CREATED = 'user_created'
DEFAULT_HTTP_PORT = '22751'
DEFAULT_HTTPS_PORT = '23001'
CONSUL_LOCK_KEY = f'component>{COMPONENT_NAME}>volatile>{COMPONENT_NAME}_lock' # component>rgw>volatile>rgw_lock
CLUSTER_ID_KEY = 'cluster>id'
CONFSTORE_FILE_HANDLER = 'ini://' # confstore uses 'ini' file handler to open any config file.e.g.ini://<filepath>
CHANGESET_URL = 'yaml:///etc/cortx/changeset.conf'
CONF_TMPL = f'{RGW_INSTALL_PATH}/conf/cortx_{COMPONENT_NAME}.conf'
LOGROTATE_TMPL = f'{RGW_INSTALL_PATH}/conf/{COMPONENT_NAME}.logrotate.tmpl'
CRON_LOGROTATE_TMPL = f'{RGW_INSTALL_PATH}/conf/logrotate.service.tmpl'
# e.g CONF_TMPL will be /opt/seagate/cortx/rgw/conf/cortx_rgw.conf
# e.g LOGROTATE_TMPL will be /opt/seagate/cortx/rgw/conf/rgw.logrotate.tmpl
RGW_CONF_FILE = f'cortx_{COMPONENT_NAME}.conf'
RELEASE_INFO_URL = f'yaml://{INSTALL_PATH}/RELEASE.INFO'
SUPPORTED_BACKEND_STORES = ['motr']
# e.g. RGW_CONFI_FILE path will be cortx_rgw.conf
LOGROTATE_DIR = "/etc/logrotate.d"
LOGROTATE_CONF = f'{LOGROTATE_DIR}/radosgw'
FREQUENCY='hourly'
CRON_DIR = f'/etc/cron.{FREQUENCY}'
CRON_LOGROTATE = f'{CRON_DIR}/logrotate'
CRASHDUMP_DIR = '/var/lib/ceph/crash'
REQUIRED_RPMS = ['cortx-hare', 'cortx-py-utils', 'ceph-radosgw']
ADMIN_PARAMETERS = {'MOTR_ADMIN_FID':'motr admin fid', 'MOTR_ADMIN_ENDPOINT':'motr admin endpoint', 'RGW_FRONTENDS': 'rgw frontends'}
# CORTX cluster confstore keys
SVC_NAME_KEY = 'node>%s>components[%s]>services[0]'
NUM_COMPONENTS_KEY = 'node>%s>num_components'
COMPONENT_NAME_KEY = 'node>%s>components[%s]>name'
LOG_PATH_KEY = 'cortx>common>storage>log'
CONFIG_PATH_KEY = 'cortx>common>storage>config'
CLIENT_INSTANCE_NAME_KEY = 'cortx>motr>clients[%s]>name'
CLIENT_INSTANCE_NUMBER_KEY = 'cortx>motr>clients[%s]>num_instances'
CONSUL_ENDPOINT_KEY = 'cortx>external>consul>endpoints'
NODE_HOSTNAME = 'node>%s>hostname'
NODE_TYPE = 'node>%s>type'
STORAGE_SET = 'node>%s>storage_set'
STORAGE_SET_COUNT = 'cluster>num_storage_set'
STORAGE_SET_NAME = 'cluster>storage_set[%s]>name'
STORAGE_SET_NODE = 'cluster>storage_set[%s]>nodes'
AUTH_USER_KEY = f'cortx>{COMPONENT_NAME}>auth_user'
AUTH_ADMIN_KEY = f'cortx>{COMPONENT_NAME}>auth_admin'
AUTH_SECRET_KEY = f'cortx>{COMPONENT_NAME}>auth_secret'
VERSION_KEY = 'cortx>common>release>version'
CLUSTER_ID_KEY = 'cluster>id'
DATA_NODE = 'data_node'
# SSL certificate parameters
SSL_CERT_CONFIGS = {"country" : "IN", "state" : "MH", "locality" : "Pune",
"organization" : "Seagate Technology", "CN" : "seagate.com", "SAN": u"*.seagate.com"}
SSL_DNS_LIST = [u'*.seagate.com', u'localhost', u'*.localhost']
SSL_CERT_PATH_KEY = 'cortx>common>security>ssl_certificate'
SVC_ENDPOINT_KEY = f'cortx>{COMPONENT_NAME}>service>endpoints'
# SVC additional paramters.(default value to be used in case of config key is missing in confstore.)
# e.g. svc_keys = {'actual_svc_config_key1':'confstore_key1', 'actual_svc_config_key2':'confstore_key2'}
SVC_CONFIG_DICT = {}
SVC_CONFIG_DICT[f'{COMPONENT_NAME} thread pool size'] = f'cortx>{COMPONENT_NAME}>thread_pool_size'
SVC_CONFIG_DICT[f'{COMPONENT_NAME} max concurrent request'] = f'cortx>{COMPONENT_NAME}>max_concurrent_request'
SVC_CONFIG_DICT[f'{COMPONENT_NAME} init timeout'] = f'cortx>{COMPONENT_NAME}>init_timeout'
SVC_CONFIG_DICT[f'{COMPONENT_NAME} gc max objs'] = f'cortx>{COMPONENT_NAME}>gc_max_objs'
SVC_CONFIG_DICT[f'{COMPONENT_NAME} gc obj min wait'] = f'cortx>{COMPONENT_NAME}>gc_obj_min_wait'
SVC_CONFIG_DICT[f'{COMPONENT_NAME} gc processor max time'] = f'cortx>{COMPONENT_NAME}>gc_processor_max_time'
SVC_CONFIG_DICT[f'{COMPONENT_NAME} gc processor period'] = f'cortx>{COMPONENT_NAME}>gc_processor_period'
# MOTR additional parameters in SVC config file.
SVC_CONFIG_DICT['motr layout id'] = f'cortx>{COMPONENT_NAME}>motr_layout_id'
SVC_CONFIG_DICT['motr unit size'] = f'cortx>{COMPONENT_NAME}>motr_unit_size'
SVC_CONFIG_DICT['motr max units per request'] = f'cortx>{COMPONENT_NAME}>motr_max_units_per_request'
SVC_CONFIG_DICT['motr max idx fetch count'] = f'cortx>{COMPONENT_NAME}>motr_max_idx_fetch_count'
SVC_CONFIG_DICT['motr max rpc msg size'] = f'cortx>{COMPONENT_NAME}>motr_max_rpc_msg_size'
SVC_CONFIG_DICT['motr reconnect interval'] = f'cortx>{COMPONENT_NAME}>motr_reconnect_interval'
SVC_CONFIG_DICT['motr reconnect retry count'] = f'cortx>{COMPONENT_NAME}>motr_reconnect_retry_count'
SVC_CONFIG_DICT['motr addb enabled'] = f'cortx>{COMPONENT_NAME}>motr_addb_enabled'
SVC_DATA_PATH_CONFSTORE_KEY = f'cortx>{COMPONENT_NAME}>data_path'
SVC_DATA_PATH_KEY = f'{COMPONENT_NAME} data path'
SVC_DATA_PATH_DEFAULT_VALUE = '/var/lib/ceph/radosgw/' # e.g. /var/lib/ceph/radosgw/<cluster-id>
# RGW config keys (cortx_rgw.conf).
CLIENT_SECTION='client'
ADMIN_SECTION = 'client.radosgw-admin'
SVC_SECTION = 'client.rgw-%s'
MOTR_ADMIN_FID_KEY = f'{ADMIN_SECTION}>{ADMIN_PARAMETERS["MOTR_ADMIN_FID"]}'
MOTR_ADMIN_ENDPOINT_KEY = f'{ADMIN_SECTION}>{ADMIN_PARAMETERS["MOTR_ADMIN_ENDPOINT"]}'
RADOS_ADMIN_LOG_FILE_KEY = f'{ADMIN_SECTION}>log file'
SVC_LOG_FILE = f'{SVC_SECTION}>log file'
RGW_FRONTEND_KEY = f'{SVC_SECTION}>{ADMIN_PARAMETERS["RGW_FRONTENDS"]}'
RGW_BACKEND_STORE_KEY = 'client>rgw backend store'
UTF_ENCODING = 'utf-8'
MOTR_MY_FID = f'{SVC_SECTION}>motr my fid'
class RgwEndpoint(Enum):
"""Enum class to define rgw endpoints provided by hare."""
PROFILE_FID = {'profile_fid': 'motr profile fid'}
MOTR_HA_EP = {'ha_ep': 'motr ha endpoint'}
MOTR_CLIENT_EP = {'ep': 'motr my endpoint'}
MOTR_PROCESS_FID = {'fid': 'motr my fid'}
| 51.761538 | 133 | 0.777679 | 283 | 0.042057 | 0 | 0 | 0 | 0 | 0 | 0 | 4,849 | 0.720612 |
924e9af1eed998986d5218116f64284930f05bc0 | 4,516 | py | Python | day2/exercises/Jamila/dartdrafts.py | lavjams/BI-Demo | 2ff4aeb9dc71eeb1aa9e1f6510a79994c6c20ef1 | [
"MIT"
] | null | null | null | day2/exercises/Jamila/dartdrafts.py | lavjams/BI-Demo | 2ff4aeb9dc71eeb1aa9e1f6510a79994c6c20ef1 | [
"MIT"
] | null | null | null | day2/exercises/Jamila/dartdrafts.py | lavjams/BI-Demo | 2ff4aeb9dc71eeb1aa9e1f6510a79994c6c20ef1 | [
"MIT"
] | null | null | null | ######
###BACKGROUND
#Below Section: Imports necessary functions
import numpy as np
import matplotlib.pyplot as graph
import random as rand
import time as watch
pi = np.pi
#FUNCTION: ishit
#PURPOSE: This function is meant to test whether or not a given 2d point is within the unit circle.
#INPUTS: x = x-axis location, y = y-axis location
#OUTPUT: Boolean
def ishit(x, y):
#Below Section: Calculates (x, y) vector distance from origin using Pythagorean theorem
hyplength = np.sqrt(x**2 + y**2)
#Below Section: Returns whether or not length within unit circle or not
if hyplength >= 1:
return True
else:
return False
#FUNCTION: simdarts
#PURPOSE: This function is meant to simulate a dart game, with the goal of hitting the inner circle.
#INPUTS: Number of times to 'throw' a dart
#OUTPUTS: Arrays of x and y locations where the dart falls each time; estimated pi value
def simdarts(num):
#Below Section: Simulates given number of dart throws
xlocs = np.zeros(num) #X-axis locations
ylocs = np.zeros(num) #Y-axis locations
#Below generates the random numbers in [0, 1)
for a in range(0, num):
xlocs[a] = rand.random()
ylocs[a] = rand.random()
#Below Section: Estimates the value of pi using the results
count = 0
for b in range(0, num):
if ishit(x=xlocs[b], y=ylocs[b]):
count = count + 1
#Below Section: Returns the calculated values
return {'xs':xlocs, 'ys':ylocs, 'estpi':(count/1.0/num)}
#FUNCTION: histdarts
#PURPOSE: This function is meant to plot a histogram of the estimated pi value of several dart simulations.
#INPUTS: Number of histogram points; number of darts per simulation
def histdarts(numdarts, numsims):
#Below Section: Generates several simulations of given number of darts each
estpis = np.zeros(numsims)
for c in range(0, numsims):
estpis[c] = simdarts(num=numdarts)['estpi']
#Below Section: Graphs results in histogram
meanstring = 'Mean: {:.4f}, '.format(np.mean(estpis))
stdstring = 'St. Dev: {.4f}'.format(np.std(estpis))
graph.hist(estpis, histtype='step', alpha=.6)
graph.title('Histogram of Pi Estimations')
graph.suptitle(meanstring+stdstring)
graph.xlabel('Estimated Pi Values')
graph.ylabel('Frequencies')
#graph.savefig('EstPiHist-'+numdarts'Darts-'+numsims+'Sims')
graph.show()
graph.close()
#FUNCTION: drawdarts
#PURPOSE: This function is meant to draw the darts for a given simulation within the unit square.
#INPUTS: X- and Y- locations of the simulated darts
def drawdarts(x, y):
#Below Section: Scatters the simulated darts
graph.scatter(x, y, color='blue', alpha=.4)
#Below Section: Plots a circle as a guideline to show boundaries of unit circle
xcircle = np.linspace(0, 1, 1000)
ycircle = np.sqrt(1 - xcircle**2)
graph.plot(xcircle, ycircle, color='cyan', alpha=.2, linestyle='--')
#graph.savefig('EstPiDraw-'+str(len(x))+'Darts')
graph.show()
graph.close()
#FUNCTION: plottime
#PURPOSE: This function is meant to plot the dependence of time upon the number of darts.
#INPUTS: An array containing the number of darts for each simulation
def plottime(numdartsarray):
#Below Section: Times the dart simulation for each number of darts given
simtimes = np.zeros(len(numdartsarray))
for d in range(0, len(numdartsarray)):
starthere = watch.time() #Start time
simhere = simdarts(num=numdartsarray[d]) #Simulation
endhere = watch.time() #End time
#Below records time taken for current simulation
simtimes[d] = endhere - starthere
#Below Section: Plots the time taken
graph.plot(numdartsarray, simtimes, alpha=.4, color='purple')
graph.title('Time Dependence of the Number of Darts')
graph.xlabel('Number of Darts')
graph.ylabel('Time Dependence')
graph.show()
graph.close()
#FUNCTION: plotacc
#PURPOSE: This function is meant to plot the dependence of accuracy upon the number of darts.
#INPUT: An array containing number of darts for each simulation
def plotacc(numdartsarray):
#Below Section: Determines accuracy for each number of darts given
simacc = np.zeros(len(numdartsarray))
for e in range(0, len(numdartsarray)):
esthere = simdarts(num=numdartsarray[e])['estpi']
#Below calculates and records current accuracy
simacc[e] = abs(esthere - pi)
#Below Section: Graphs the accuracy of pi estimations
graph.plot(numdartsarray, simacc, alpha=.4, color='orange')
graph.title('Estimation Accuracy of the Number of Darts')
graph.xlabel('Number of Darts')
graph.ylabel('Accuracy of Pi Estimation')
graph.show()
graph.close()
| 32.028369 | 107 | 0.731178 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,622 | 0.580602 |
924f6c3f27a65566969cd1db1da75e95eab90307 | 25,486 | py | Python | baidu_code/soap_mockserver/spyne/test/model/test_primitive.py | deevarvar/myLab | 7a5019f5f7fc11e173d350e6e2a7d2c80504782d | [
"MIT"
] | null | null | null | baidu_code/soap_mockserver/spyne/test/model/test_primitive.py | deevarvar/myLab | 7a5019f5f7fc11e173d350e6e2a7d2c80504782d | [
"MIT"
] | null | null | null | baidu_code/soap_mockserver/spyne/test/model/test_primitive.py | deevarvar/myLab | 7a5019f5f7fc11e173d350e6e2a7d2c80504782d | [
"MIT"
] | 3 | 2016-10-08T15:01:49.000Z | 2018-05-24T03:14:24.000Z | #!/usr/bin/env python
# coding=utf-8
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import re
import datetime
import unittest
import pytz
import uuid
from datetime import timedelta
from lxml import etree
from spyne.util import total_seconds
from spyne.const import xml_ns as ns
from spyne.model import Null, AnyDict, Uuid
from spyne.model.complex import Array
from spyne.model.complex import ComplexModel
from spyne.model.primitive import Date
from spyne.model.primitive import Time
from spyne.model.primitive import Boolean
from spyne.model.primitive import DateTime
from spyne.model.primitive import Duration
from spyne.model.primitive import Float
from spyne.model.primitive import Integer
from spyne.model.primitive import UnsignedInteger
from spyne.model.primitive import Unicode
from spyne.model.primitive import String
from spyne.model.primitive import Decimal
from spyne.protocol import ProtocolBase
from spyne.protocol.xml import XmlDocument
ns_test = 'test_namespace'
from spyne.model import ModelBase
class TestPrimitive(unittest.TestCase):
def test_nillable_quirks(self):
assert ModelBase.Attributes.nillable == True
class Attributes(ModelBase.Attributes):
nillable = False
nullable = False
assert Attributes.nillable == False
assert Attributes.nullable == False
class Attributes(ModelBase.Attributes):
nillable = True
assert Attributes.nillable == True
assert Attributes.nullable == True
class Attributes(ModelBase.Attributes):
nillable = False
assert Attributes.nillable == False
assert Attributes.nullable == False
class Attributes(ModelBase.Attributes):
nullable = True
assert Attributes.nillable == True
assert Attributes.nullable == True
class Attributes(ModelBase.Attributes):
nullable = False
assert Attributes.nillable == False
assert Attributes.nullable == False
class Attributes(ModelBase.Attributes):
nullable = False
class Attributes(Attributes):
pass
assert Attributes.nullable == False
def test_nillable_inheritance_quirks(self):
class Attributes(ModelBase.Attributes):
nullable = False
class AttrMixin:
pass
class NewAttributes(Attributes, AttrMixin):
pass
assert NewAttributes.nullable is False
class AttrMixin:
pass
class NewAttributes(AttrMixin, Attributes):
pass
assert NewAttributes.nullable is False
def test_decimal(self):
assert Decimal(10,4).Attributes.total_digits == 10
assert Decimal(10,4).Attributes.fraction_digits == 4
def test_decimal_format(self):
f = 123456
str_format='${0}'
element = etree.Element('test')
XmlDocument().to_parent(None, Decimal(str_format=str_format), f, element, ns_test)
element = element[0]
self.assertEquals(element.text, '$123456')
def test_string(self):
s = String()
element = etree.Element('test')
XmlDocument().to_parent(None, String, 'value', element, ns_test)
element=element[0]
self.assertEquals(element.text, 'value')
value = XmlDocument().from_element(None, String, element)
self.assertEquals(value, 'value')
def test_datetime(self):
n = datetime.datetime.now(pytz.utc)
element = etree.Element('test')
XmlDocument().to_parent(None, DateTime, n, element, ns_test)
element = element[0]
self.assertEquals(element.text, n.isoformat())
dt = XmlDocument().from_element(None, DateTime, element)
self.assertEquals(n, dt)
def test_datetime_format(self):
n = datetime.datetime.now().replace(microsecond=0)
format = "%Y %m %d %H %M %S"
element = etree.Element('test')
XmlDocument().to_parent(None, DateTime(format=format), n, element, ns_test)
element = element[0]
assert element.text == datetime.datetime.strftime(n, format)
dt = XmlDocument().from_element(None, DateTime(format=format), element)
assert n == dt
def test_date_format(self):
t = datetime.date.today()
format = "%Y %m %d"
element = etree.Element('test')
XmlDocument().to_parent(None, Date(format=format), t, element, ns_test)
assert element[0].text == datetime.date.strftime(t, format)
dt = XmlDocument().from_element(None, Date(format=format), element[0])
assert t == dt
def test_datetime_timezone(self):
import pytz
n = datetime.datetime.now(pytz.timezone('EST'))
element = etree.Element('test')
cls = DateTime(as_timezone=pytz.utc, timezone=False)
XmlDocument().to_parent(None, cls, n, element, ns_test)
element = element[0]
c = n.astimezone(pytz.utc).replace(tzinfo=None)
self.assertEquals(element.text, c.isoformat())
dt = XmlDocument().from_element(None, cls, element)
assert dt.tzinfo is not None
dt = dt.replace(tzinfo=None)
self.assertEquals(c, dt)
def test_date_timezone(self):
elt = etree.Element('wot')
elt.text = '2013-08-09+02:00'
dt = XmlDocument().from_element(None, Date, elt)
print("ok without validation.")
dt = XmlDocument(validator='soft').from_element(None, Date, elt)
print(dt)
def test_time(self):
n = datetime.time(1, 2, 3, 4)
ret = ProtocolBase().to_string(Time, n)
self.assertEquals(ret, n.isoformat())
dt = ProtocolBase().from_string(Time, ret)
self.assertEquals(n, dt)
def test_date(self):
n = datetime.date(2011,12,13)
ret = ProtocolBase().to_string(Date, n)
self.assertEquals(ret, n.isoformat())
dt = ProtocolBase().from_string(Date, ret)
self.assertEquals(n, dt)
def test_utcdatetime(self):
datestring = '2007-05-15T13:40:44Z'
e = etree.Element('test')
e.text = datestring
dt = XmlDocument().from_element(None, DateTime, e)
self.assertEquals(dt.year, 2007)
self.assertEquals(dt.month, 5)
self.assertEquals(dt.day, 15)
datestring = '2007-05-15T13:40:44.003Z'
e = etree.Element('test')
e.text = datestring
dt = XmlDocument().from_element(None, DateTime, e)
self.assertEquals(dt.year, 2007)
self.assertEquals(dt.month, 5)
self.assertEquals(dt.day, 15)
def test_integer(self):
i = 12
integer = Integer()
element = etree.Element('test')
XmlDocument().to_parent(None, Integer, i, element, ns_test)
element = element[0]
self.assertEquals(element.text, '12')
value = XmlDocument().from_element(None, integer, element)
self.assertEquals(value, i)
def test_limits(self):
try:
ProtocolBase().from_string(Integer, "1" * (Integer.__max_str_len__ + 1))
except:
pass
else:
raise Exception("must fail.")
ProtocolBase().from_string(UnsignedInteger, "-1") # This is not supposed to fail.
try:
UnsignedInteger.validate_native(-1) # This is supposed to fail.
except:
pass
else:
raise Exception("must fail.")
def test_large_integer(self):
i = 128375873458473
integer = Integer()
element = etree.Element('test')
XmlDocument().to_parent(None, Integer, i, element, ns_test)
element = element[0]
self.assertEquals(element.text, '128375873458473')
value = XmlDocument().from_element(None, integer, element)
self.assertEquals(value, i)
def test_float(self):
f = 1.22255645
element = etree.Element('test')
XmlDocument().to_parent(None, Float, f, element, ns_test)
element = element[0]
self.assertEquals(element.text, repr(f))
f2 = XmlDocument().from_element(None, Float, element)
self.assertEquals(f2, f)
def test_array(self):
type = Array(String)
type.resolve_namespace(type, "zbank")
values = ['a', 'b', 'c', 'd', 'e', 'f']
element = etree.Element('test')
XmlDocument().to_parent(None, type, values, element, ns_test)
element = element[0]
self.assertEquals(len(values), len(element.getchildren()))
values2 = XmlDocument().from_element(None, type, element)
self.assertEquals(values[3], values2[3])
def test_array_empty(self):
type = Array(String)
type.resolve_namespace(type, "zbank")
values = []
element = etree.Element('test')
XmlDocument().to_parent(None, type, values, element, ns_test)
element = element[0]
self.assertEquals(len(values), len(element.getchildren()))
values2 = XmlDocument().from_element(None, type, element)
self.assertEquals(len(values2), 0)
def test_unicode(self):
s = u'\x34\x55\x65\x34'
self.assertEquals(4, len(s))
element = etree.Element('test')
XmlDocument().to_parent(None, String, s, element, 'test_ns')
element = element[0]
value = XmlDocument().from_element(None, String, element)
self.assertEquals(value, s)
def test_unicode_pattern_mult_cust(self):
assert Unicode(pattern='a').Attributes.pattern == 'a'
assert Unicode(pattern='a')(5).Attributes.pattern == 'a'
def test_unicode_upattern(self):
patt = r'[\w .-]+'
attr = Unicode(unicode_pattern=patt).Attributes
assert attr.pattern == patt
assert attr._pattern_re.flags & re.UNICODE
assert attr._pattern_re.match(u"Ğ Ğ ç .-")
assert attr._pattern_re.match(u"\t") is None
def test_unicode_nullable_mult_cust_false(self):
assert Unicode(nullable=False).Attributes.nullable == False
assert Unicode(nullable=False)(5).Attributes.nullable == False
def test_unicode_nullable_mult_cust_true(self):
assert Unicode(nullable=True).Attributes.nullable == True
assert Unicode(nullable=True)(5).Attributes.nullable == True
def test_null(self):
element = etree.Element('test')
XmlDocument().to_parent(None, Null, None, element, ns_test)
print(etree.tostring(element))
element = element[0]
self.assertTrue( bool(element.attrib.get('{%s}nil' % ns.xsi)) )
value = XmlDocument().from_element(None, Null, element)
self.assertEquals(None, value)
def test_point(self):
from spyne.model.primitive.geo import _get_point_pattern
a=re.compile(_get_point_pattern(2))
assert a.match('POINT (10 40)') is not None
assert a.match('POINT(10 40)') is not None
assert a.match('POINT(10.0 40)') is not None
assert a.match('POINT(1.310e4 40)') is not None
def test_multipoint(self):
from spyne.model.primitive.geo import _get_multipoint_pattern
a=re.compile(_get_multipoint_pattern(2))
assert a.match('MULTIPOINT (10 40, 40 30, 20 20, 30 10)') is not None
# FIXME:
#assert a.match('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))') is not None
def test_linestring(self):
from spyne.model.primitive.geo import _get_linestring_pattern
a=re.compile(_get_linestring_pattern(2))
assert a.match('LINESTRING (30 10, 10 30, 40 40)') is not None
def test_multilinestring(self):
from spyne.model.primitive.geo import _get_multilinestring_pattern
a=re.compile(_get_multilinestring_pattern(2))
assert a.match('''MULTILINESTRING ((10 10, 20 20, 10 40),
(40 40, 30 30, 40 20, 30 10))''') is not None
def test_polygon(self):
from spyne.model.primitive.geo import _get_polygon_pattern
a=re.compile(_get_polygon_pattern(2))
assert a.match('POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))') is not None
def test_multipolygon(self):
from spyne.model.primitive.geo import _get_multipolygon_pattern
a=re.compile(_get_multipolygon_pattern(2))
assert a.match('''MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),
((15 5, 40 10, 10 20, 5 10, 15 5)))''') is not None
assert a.match('''MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),
((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),
(30 20, 20 25, 20 15, 30 20)))''') is not None
def test_boolean(self):
b = etree.Element('test')
XmlDocument().to_parent(None, Boolean, True, b, ns_test)
b = b[0]
self.assertEquals('true', b.text)
b = etree.Element('test')
XmlDocument().to_parent(None, Boolean, 0, b, ns_test)
b = b[0]
self.assertEquals('false', b.text)
b = etree.Element('test')
XmlDocument().to_parent(None, Boolean, 1, b, ns_test)
b = b[0]
self.assertEquals('true', b.text)
b = XmlDocument().from_element(None, Boolean, b)
self.assertEquals(b, True)
b = etree.Element('test')
XmlDocument().to_parent(None, Boolean, False, b, ns_test)
b = b[0]
self.assertEquals('false', b.text)
b = XmlDocument().from_element(None, Boolean, b)
self.assertEquals(b, False)
b = etree.Element('test')
XmlDocument().to_parent(None, Boolean, None, b, ns_test)
b = b[0]
self.assertEquals('true', b.get('{%s}nil' % ns.xsi))
b = XmlDocument().from_element(None, Boolean, b)
self.assertEquals(b, None)
def test_new_type(self):
"""Customized primitives go into namespace based on module name."""
custom_type = Unicode(pattern='123')
self.assertEqual(custom_type.get_namespace(), custom_type.__module__)
def test_default_nullable(self):
"""Test if default nullable changes nullable attribute."""
try:
self.assertTrue(Unicode.Attributes.nullable)
orig_default = Unicode.Attributes.NULLABLE_DEFAULT
Unicode.Attributes.NULLABLE_DEFAULT = False
self.assertFalse(Unicode.Attributes.nullable)
self.assertFalse(Unicode.Attributes.nillable)
finally:
Unicode.Attributes.NULLABLE_DEFAULT = orig_default
self.assertEqual(Unicode.Attributes.nullable, orig_default)
def test_simple_type_explicit_customization(self):
assert Unicode(max_len=5).__extends__ is not None
assert Unicode.customize(max_len=5).__extends__ is not None
def test_anydict_customization(self):
from spyne.model import json
assert isinstance(AnyDict.customize(store_as='json').Attributes.store_as, json)
def test_uuid_serialize(self):
value = uuid.UUID('12345678123456781234567812345678')
assert ProtocolBase().to_string(Uuid, value) == \
'12345678-1234-5678-1234-567812345678'
assert ProtocolBase().to_string(Uuid(serialize_as='hex'), value) == \
'12345678123456781234567812345678'
assert ProtocolBase().to_string(Uuid(serialize_as='urn'), value) == \
'urn:uuid:12345678-1234-5678-1234-567812345678'
assert ProtocolBase().to_string(Uuid(serialize_as='bytes'), value) == \
'\x124Vx\x124Vx\x124Vx\x124Vx'
assert ProtocolBase().to_string(Uuid(serialize_as='bytes_le'), value) == \
'xV4\x124\x12xV\x124Vx\x124Vx'
assert ProtocolBase().to_string(Uuid(serialize_as='fields'), value) == \
(305419896, 4660, 22136, 18, 52, 95073701484152)
assert ProtocolBase().to_string(Uuid(serialize_as='int'), value) == \
24197857161011715162171839636988778104
def test_uuid_deserialize(self):
value = uuid.UUID('12345678123456781234567812345678')
assert ProtocolBase().from_string(Uuid,
'12345678-1234-5678-1234-567812345678') == value
assert ProtocolBase().from_string(Uuid(serialize_as='hex'),
'12345678123456781234567812345678') == value
assert ProtocolBase().from_string(Uuid(serialize_as='urn'),
'urn:uuid:12345678-1234-5678-1234-567812345678') == value
assert ProtocolBase().from_string(Uuid(serialize_as='bytes'),
'\x124Vx\x124Vx\x124Vx\x124Vx') == value
assert ProtocolBase().from_string(Uuid(serialize_as='bytes_le'),
'xV4\x124\x12xV\x124Vx\x124Vx') == value
assert ProtocolBase().from_string(Uuid(serialize_as='fields'),
(305419896, 4660, 22136, 18, 52, 95073701484152)) == value
assert ProtocolBase().from_string(Uuid(serialize_as='int'),
24197857161011715162171839636988778104) == value
def test_uuid_validate(self):
assert Uuid.validate_string(Uuid,
'12345678-1234-5678-1234-567812345678')
assert Uuid.validate_native(Uuid,
uuid.UUID('12345678-1234-5678-1234-567812345678'))
def test_datetime_serialize_as(self):
i = 1234567890123456
v = datetime.datetime.fromtimestamp(i / 1e6)
assert ProtocolBase().to_string(
DateTime(serialize_as='sec'), v) == i//1e6
assert ProtocolBase().to_string(
DateTime(serialize_as='sec_float'), v) == i/1e6
assert ProtocolBase().to_string(
DateTime(serialize_as='msec'), v) == i//1e3
assert ProtocolBase().to_string(
DateTime(serialize_as='msec_float'), v) == i/1e3
assert ProtocolBase().to_string(
DateTime(serialize_as='usec'), v) == i
def test_datetime_deserialize(self):
i = 1234567890123456
v = datetime.datetime.fromtimestamp(i / 1e6)
assert ProtocolBase().from_string(
DateTime(serialize_as='sec'), i//1e6) == \
datetime.datetime.fromtimestamp(i//1e6)
assert ProtocolBase().from_string(
DateTime(serialize_as='sec_float'), i/1e6) == v
assert ProtocolBase().from_string(
DateTime(serialize_as='msec'), i//1e3) == \
datetime.datetime.fromtimestamp(i/1e3//1000)
assert ProtocolBase().from_string(
DateTime(serialize_as='msec_float'), i/1e3) == v
assert ProtocolBase().from_string(
DateTime(serialize_as='usec'), i) == v
### Duration Data Type
## http://www.w3schools.com/schema/schema_dtypes_date.asp
# Duration Data type
# The time interval is specified in the following form "PnYnMnDTnHnMnS" where:
# P indicates the period (required)
# nY indicates the number of years
# nM indicates the number of months
# nD indicates the number of days
# T indicates the start of a time section (*required* if you are going to
# specify hours, minutes, seconds or microseconds)
# nH indicates the number of hours
# nM indicates the number of minutes
# nS indicates the number of seconds
class SomeBlob(ComplexModel):
__namespace__ = 'myns'
howlong = Duration()
class TestDurationPrimitive(unittest.TestCase):
def test_onehour_oneminute_onesecond(self):
answer = 'PT1H1M1S'
gg = SomeBlob()
gg.howlong = timedelta(hours=1, minutes=1, seconds=1)
element = etree.Element('test')
XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace())
element = element[0]
print(gg.howlong)
print(etree.tostring(element, pretty_print=True))
assert element[0].text == answer
data = element.find('{%s}howlong' % gg.get_namespace()).text
self.assertEquals(data, answer)
s1 = XmlDocument().from_element(None, SomeBlob, element)
assert total_seconds(s1.howlong) == total_seconds(gg.howlong)
def test_4suite(self):
# borrowed from 4Suite
tests_seconds = [
(0, u'PT0S'),
(1, u'PT1S'),
(59, u'PT59S'),
(60, u'PT1M'),
(3599, u'PT59M59S'),
(3600, u'PT1H'),
(86399, u'PT23H59M59S'),
(86400, u'P1D'),
(86400*60, u'P60D'),
(86400*400, u'P400D')
]
for secs, answer in tests_seconds:
gg = SomeBlob()
gg.howlong = timedelta(seconds=secs)
element = etree.Element('test')
XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace())
element = element[0]
print(gg.howlong)
print(etree.tostring(element, pretty_print=True))
assert element[0].text == answer
data = element.find('{%s}howlong' % gg.get_namespace()).text
self.assertEquals(data, answer)
s1 = XmlDocument().from_element(None, SomeBlob, element)
assert total_seconds(s1.howlong) == secs
for secs, answer in tests_seconds:
if secs > 0:
secs *= -1
answer = '-' + answer
gg = SomeBlob()
gg.howlong = timedelta(seconds=secs)
element = etree.Element('test')
XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace())
element = element[0]
print(gg.howlong)
print(etree.tostring(element, pretty_print=True))
assert element[0].text == answer
data = element.find('{%s}howlong' % gg.get_namespace()).text
self.assertEquals(data, answer)
s1 = XmlDocument().from_element(None, SomeBlob, element)
assert total_seconds(s1.howlong) == secs
def test_duration_positive_seconds_only(self):
answer = 'PT35S'
gg = SomeBlob()
gg.howlong = timedelta(seconds=35)
element = etree.Element('test')
XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace())
element = element[0]
print(gg.howlong)
print(etree.tostring(element, pretty_print=True))
assert element[0].text == answer
data = element.find('{%s}howlong' % gg.get_namespace()).text
self.assertEquals(data, answer)
s1 = XmlDocument().from_element(None, SomeBlob, element)
assert total_seconds(s1.howlong) == total_seconds(gg.howlong)
def test_duration_positive_minutes_and_seconds_only(self):
answer = 'PT5M35S'
gg = SomeBlob()
gg.howlong = timedelta(minutes=5, seconds=35)
element = etree.Element('test')
XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace())
element = element[0]
print(gg.howlong)
print(etree.tostring(element, pretty_print=True))
assert element[0].text == answer
data = element.find('{%s}howlong' % gg.get_namespace()).text
self.assertEquals(data, answer)
s1 = XmlDocument().from_element(None, SomeBlob, element)
assert total_seconds(s1.howlong) == total_seconds(gg.howlong)
def test_duration_positive_milliseconds_only(self):
answer = 'PT0.666000S'
gg = SomeBlob()
gg.howlong = timedelta(milliseconds=666)
element = etree.Element('test')
XmlDocument().to_parent(None, SomeBlob, gg, element, gg.get_namespace())
element = element[0]
print(gg.howlong)
print(etree.tostring(element, pretty_print=True))
assert element[0].text == answer
data = element.find('{%s}howlong' % gg.get_namespace()).text
self.assertEquals(data, answer)
s1 = XmlDocument().from_element(None, SomeBlob, element)
assert total_seconds(s1.howlong) == total_seconds(gg.howlong)
def test_duration_xml_duration(self):
dur = datetime.timedelta(days=5 + 30 + 365, hours=1, minutes=1,
seconds=12, microseconds=8e5)
str1 = 'P400DT3672.8S'
str2 = 'P1Y1M5DT1H1M12.8S'
self.assertEquals(dur, ProtocolBase().from_string(Duration, str1))
self.assertEquals(dur, ProtocolBase().from_string(Duration, str2))
self.assertEquals(dur, ProtocolBase().from_string(Duration,
ProtocolBase().to_string(Duration, dur)))
if __name__ == '__main__':
unittest.main()
| 36.617816 | 90 | 0.623244 | 23,101 | 0.906313 | 0 | 0 | 0 | 0 | 0 | 0 | 3,726 | 0.146181 |
92544ee0917001aa83d874e1a79b36634831aeca | 3,648 | py | Python | tests/test_humidifiersteamgas.py | marcelosalles/pyidf | c2f744211572b5e14e29522aac1421ba88addb0e | [
"Apache-2.0"
] | 19 | 2015-12-08T23:33:51.000Z | 2022-01-31T04:41:10.000Z | tests/test_humidifiersteamgas.py | marcelosalles/pyidf | c2f744211572b5e14e29522aac1421ba88addb0e | [
"Apache-2.0"
] | 2 | 2019-10-04T10:57:00.000Z | 2021-10-01T06:46:17.000Z | tests/test_humidifiersteamgas.py | marcelosalles/pyidf | c2f744211572b5e14e29522aac1421ba88addb0e | [
"Apache-2.0"
] | 7 | 2015-11-04T02:25:01.000Z | 2021-12-08T03:14:28.000Z | import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.humidifiers_and_dehumidifiers import HumidifierSteamGas
log = logging.getLogger(__name__)
class TestHumidifierSteamGas(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_humidifiersteamgas(self):
pyidf.validation_level = ValidationLevel.error
obj = HumidifierSteamGas()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_availability_schedule_name = "object-list|Availability Schedule Name"
obj.availability_schedule_name = var_availability_schedule_name
# real
var_rated_capacity = 0.0
obj.rated_capacity = var_rated_capacity
# real
var_rated_gas_use_rate = 0.0
obj.rated_gas_use_rate = var_rated_gas_use_rate
# real
var_thermal_efficiency = 0.50005
obj.thermal_efficiency = var_thermal_efficiency
# object-list
var_thermal_efficiency_modifier_curve_name = "object-list|Thermal Efficiency Modifier Curve Name"
obj.thermal_efficiency_modifier_curve_name = var_thermal_efficiency_modifier_curve_name
# real
var_rated_fan_power = 0.0
obj.rated_fan_power = var_rated_fan_power
# real
var_auxiliary_electric_power = 0.0
obj.auxiliary_electric_power = var_auxiliary_electric_power
# node
var_air_inlet_node_name = "node|Air Inlet Node Name"
obj.air_inlet_node_name = var_air_inlet_node_name
# node
var_air_outlet_node_name = "node|Air Outlet Node Name"
obj.air_outlet_node_name = var_air_outlet_node_name
# object-list
var_water_storage_tank_name = "object-list|Water Storage Tank Name"
obj.water_storage_tank_name = var_water_storage_tank_name
# alpha
var_inlet_water_temperature_option = "FixedInletWaterTemperature"
obj.inlet_water_temperature_option = var_inlet_water_temperature_option
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.humidifiersteamgass[0].name, var_name)
self.assertEqual(idf2.humidifiersteamgass[0].availability_schedule_name, var_availability_schedule_name)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].rated_capacity, var_rated_capacity)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].rated_gas_use_rate, var_rated_gas_use_rate)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].thermal_efficiency, var_thermal_efficiency)
self.assertEqual(idf2.humidifiersteamgass[0].thermal_efficiency_modifier_curve_name, var_thermal_efficiency_modifier_curve_name)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].rated_fan_power, var_rated_fan_power)
self.assertAlmostEqual(idf2.humidifiersteamgass[0].auxiliary_electric_power, var_auxiliary_electric_power)
self.assertEqual(idf2.humidifiersteamgass[0].air_inlet_node_name, var_air_inlet_node_name)
self.assertEqual(idf2.humidifiersteamgass[0].air_outlet_node_name, var_air_outlet_node_name)
self.assertEqual(idf2.humidifiersteamgass[0].water_storage_tank_name, var_water_storage_tank_name)
self.assertEqual(idf2.humidifiersteamgass[0].inlet_water_temperature_option, var_inlet_water_temperature_option) | 44.487805 | 136 | 0.741228 | 3,415 | 0.936129 | 0 | 0 | 0 | 0 | 0 | 0 | 314 | 0.086075 |
92560e59cb5045f93552b5f02a6b69d10b76c6c7 | 1,445 | py | Python | e2e_tests/tests/fixtures/pytorch_lightning_amp/model_def.py | gh-determined-ai/determined | 9a1ab33a3a356b69681b3351629fef4ab98ddb56 | [
"Apache-2.0"
] | 1,729 | 2020-04-27T17:36:40.000Z | 2022-03-31T05:48:39.000Z | e2e_tests/tests/fixtures/pytorch_lightning_amp/model_def.py | ChrisW09/determined | 5c37bfe9cfcc69174ba29a3f1a115c3e9e3632e0 | [
"Apache-2.0"
] | 1,940 | 2020-04-27T17:34:14.000Z | 2022-03-31T23:02:28.000Z | e2e_tests/tests/fixtures/pytorch_lightning_amp/model_def.py | ChrisW09/determined | 5c37bfe9cfcc69174ba29a3f1a115c3e9e3632e0 | [
"Apache-2.0"
] | 214 | 2020-04-27T19:57:28.000Z | 2022-03-29T08:17:16.000Z | """
This example shows how to interact with the Determined PyTorch Lightning Adapter
interface to build a basic MNIST network. LightningAdapter utilizes the provided
LightningModule with Determined's PyTorch control loop.
"""
from determined.pytorch import PyTorchTrialContext, DataLoader
from determined.pytorch.lightning import LightningAdapter
import data
import mnist
class MNISTTrial(LightningAdapter):
def __init__(self, context: PyTorchTrialContext, *args, **kwargs) -> None:
lm = mnist.LitMNIST(
hidden_size=context.get_hparam('hidden_size'),
learning_rate=context.get_hparam('learning_rate'),
)
data_dir = f"/tmp/data-rank{context.distributed.get_rank()}"
self.dm = data.MNISTDataModule(
data_url=context.get_data_config()["url"],
data_dir=data_dir,
batch_size=context.get_per_slot_batch_size(),
)
super().__init__(context, lightning_module=lm, *args, **kwargs)
self.dm.prepare_data()
def build_training_data_loader(self) -> DataLoader:
self.dm.setup()
dl = self.dm.train_dataloader()
return DataLoader(dl.dataset, batch_size=dl.batch_size, num_workers=dl.num_workers)
def build_validation_data_loader(self) -> DataLoader:
self.dm.setup()
dl = self.dm.val_dataloader()
return DataLoader(dl.dataset, batch_size=dl.batch_size, num_workers=dl.num_workers)
| 37.051282 | 91 | 0.707266 | 1,068 | 0.7391 | 0 | 0 | 0 | 0 | 0 | 0 | 307 | 0.212457 |
925679157584e0c93b47453d951d15d7ead1a249 | 1,299 | py | Python | graph.py | tannerb/genetic_math | 96fd0fb2b988a22ff0399b4e8386570ae377e284 | [
"MIT"
] | null | null | null | graph.py | tannerb/genetic_math | 96fd0fb2b988a22ff0399b4e8386570ae377e284 | [
"MIT"
] | null | null | null | graph.py | tannerb/genetic_math | 96fd0fb2b988a22ff0399b4e8386570ae377e284 | [
"MIT"
] | null | null | null | # graph
from datetime import date
import numpy as np
from bokeh.client import push_session
from bokeh.io import output_server, show, vform
from bokeh.palettes import RdYlBu3
from bokeh.plotting import figure, curdoc, vplot, output_server
from bokeh.models import ColumnDataSource
from bokeh.models.widgets import DataTable, DateFormatter, TableColumn
from random import randint
# create a plot and style its properties
p = figure(x_range=(0, 100), y_range=(0, 100))
p.border_fill_color = 'black'
p.background_fill_color = 'black'
p.outline_line_color = None
p.grid.grid_line_color = None
# add a text renderer to out plot (no data yet)
r = p.text(x=[], y=[], text=[], text_color=[], text_font_size="20pt",
text_baseline="middle", text_align="center")
session = push_session(curdoc())
data = dict(
dates=[date(2014, 3, i+1) for i in range(10)],
downloads=[randint(0, 100) for i in range(10)],
)
source = ColumnDataSource(data)
columns = [
TableColumn(field="dates", title="Date", formatter=DateFormatter()),
TableColumn(field="downloads", title="Downloads"),
]
data_table = DataTable(source=source, columns=columns, width=400, height=280)
curdoc().add_root(vform(data_table))
session.show()
| 28.23913 | 78 | 0.69746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 168 | 0.12933 |
9257db98b395b4b7c78f60feda8d5c9358ca061b | 709 | py | Python | ADTs/Node.py | pacevedom/three-in-line-AI | 9e42c1512bbfccbf5bccd9adc4a5da8bc595d4e5 | [
"MIT"
] | null | null | null | ADTs/Node.py | pacevedom/three-in-line-AI | 9e42c1512bbfccbf5bccd9adc4a5da8bc595d4e5 | [
"MIT"
] | null | null | null | ADTs/Node.py | pacevedom/three-in-line-AI | 9e42c1512bbfccbf5bccd9adc4a5da8bc595d4e5 | [
"MIT"
] | 1 | 2021-11-28T15:42:44.000Z | 2021-11-28T15:42:44.000Z | class Node:
def __init__(self, value, child, parent):
self._value = value
self._child = child
self._parent = parent
def get_value(self):
#Return the value of a node
return self._value
def get_child(self):
#Return the value of a node
return self._child
def get_parent(self):
#Return the parent of a node
return self._parent
def set_value(self, value):
#Change the value of a node
self._value = value
def set_child(self, child):
#Change the value of a node
self._child = child
def set_parent(self, parent):
#Change the parent reference
self._parent = parent | 24.448276 | 45 | 0.602257 | 709 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 164 | 0.231312 |
92588b0247061718ca53cbfd3f3046c34ac25694 | 9,001 | py | Python | deployment/cloudformation/data.py | azavea/cac-tripplanner | 08eaa7e909484ac04687e078fbecba7e7fdc452e | [
"Apache-2.0"
] | 13 | 2015-08-07T13:54:53.000Z | 2020-10-20T18:59:01.000Z | deployment/cloudformation/data.py | azavea/cac-tripplanner | 08eaa7e909484ac04687e078fbecba7e7fdc452e | [
"Apache-2.0"
] | 1,177 | 2015-01-15T16:46:38.000Z | 2022-03-29T13:37:37.000Z | deployment/cloudformation/data.py | azavea/cac-tripplanner | 08eaa7e909484ac04687e078fbecba7e7fdc452e | [
"Apache-2.0"
] | 7 | 2015-01-19T20:10:38.000Z | 2019-10-16T05:57:47.000Z | """Handles template generation for Cac Data Plane stack"""
from troposphere import (
Parameter,
Ref,
Output,
Tags,
GetAtt,
ec2,
rds,
route53
)
from .utils.constants import RDS_INSTANCE_TYPES
from majorkirby import StackNode
class BaseFactory(object):
"""Base class for factories to put things into Troposphere templates
In subclasses, ensure that self.parameters, self.resources, and self.outputs are
populated by __init__(). The insert_[parameters|resources|outputs] functions may
also be overridden if necessary.
"""
def __init__(self):
self.parameters = []
self.resources = []
self.outputs = []
def insert_parameters(self, template):
"""Add parameters to template and return a list of inserted params
:param template: troposphere.Template object to insert params into.
:return: List of troposphere.Parameter objects that were inserted."""
inserted = []
for param in self.parameters:
# Skip parameters that already exist in the template; multiple
# factories may rely on the same parameter.
if param.title in template.parameters:
continue
inserted.append(template.add_parameter(param))
return inserted
def insert_resources(self, template):
"""Add resources to template and return a list of inserted params
:param template: troposphere.Template object to insert resources into
:return: List of troposphere.Resource objects that were inserted."""
# This will raise an exception on duplicate keys, unlike with
# parameters; two factories shouldn't attempt to create the same
# resources.
return [template.add_resource(rsrc) for rsrc in self.resources]
def insert_outputs(self, template):
"""Add outputs to template and return a list of inserted outputs
:param template: troposphere.Template object to insert Outputs into
:return: List of troposphere.Output objects that were inserted."""
# This will raise an exception on duplicate keys, unlike with
# parameters
return [template.add_output(output) for output in self.outputs]
def populate_template(self, template):
"""Convenience method to fully populate a template from this factory.
:param template: troposphere.Template object to populate
:return: Template with populated parameters, resources, and outputs"""
self.insert_parameters(template)
self.insert_resources(template)
self.insert_outputs(template)
return template # Not strictly necessary but allows nesting functions
class RDSFactory(BaseFactory):
"""Can add a Cac RDS instance to a Template"""
def __init__(self, tags=dict()):
super(RDSFactory, self).__init__()
self.tags = tags
# Largely copied from
# https://github.com/cloudtools/troposphere/blob/master/examples/RDS_VPC.py
# Each parameter is followed by the resources which depend on it.
# VPC and security groups
vpcid = Parameter(
'VpcId',
Type='String',
Description='Id of existing VPC'
)
private_hosted_zone_id = Parameter(
'PrivateHostedZoneId',
Type='String',
Description='Private hosted zone id'
)
db_security_group = ec2.SecurityGroup(
'sgDatabase',
GroupDescription='Security group for RDS DB Instance.',
VpcId=Ref(vpcid),
Tags=Tags(Name='Database', **self.tags)
)
# Subnets
subnets = Parameter(
'AppServerSubnets',
Type='CommaDelimitedList',
Description='List of SubnetIds spanning at least two AZs in VPC'
)
subnet_group = rds.DBSubnetGroup(
'CacDbSubnetGroup',
DBSubnetGroupDescription='Subnets available for Cac RDS instance',
SubnetIds=Ref(subnets),
Tags=Tags(Name='RDSSubnetGroup', **self.tags)
)
# Database
db_name = Parameter(
'DbName',
Description='Name of the database to be created',
Type='String',
MinLength='5',
MaxLength='63',
AllowedPattern='[a-zA-Z_][a-zA-Z0-9_]*',
ConstraintDescription='Name must begin with a letter and contain only alphanumerics'
)
db_user = Parameter(
'DbUser',
NoEcho=True,
Description='Database admin user account',
Type='String',
MinLength='5',
MaxLength='16',
AllowedPattern='[a-zA-Z][a-zA-Z0-9]*',
ConstraintDescription='Name must begin with a letter and contain only alphanumerics'
)
db_password = Parameter(
'DbPassword',
NoEcho=True,
Description='Database admin account password',
Type='String',
MinLength='8',
)
db_instance_class = Parameter(
'DbInstanceClass',
Default='db.m3.medium',
Description='Database instance class',
Type='String',
AllowedValues=RDS_INSTANCE_TYPES
)
db_storage = Parameter(
'DbStorage',
Description='Available database storage (GB)',
Default='100',
Type='Number',
MaxValue='1024',
ConstraintDescription='Storage space must be less than 1024GB',
)
db_dns_name = Parameter(
'DbDNSName',
Type='String',
Description='Private DNS name for database'
)
database = rds.DBInstance(
'CacDb',
DBName=Ref(db_name),
AllocatedStorage=Ref(db_storage),
DBInstanceClass=Ref(db_instance_class),
Engine='postgres',
EngineVersion='9.4',
MasterUsername=Ref(db_user),
MasterUserPassword=Ref(db_password),
DBSubnetGroupName=Ref(subnet_group),
VPCSecurityGroups=[Ref(db_security_group)],
MultiAZ=True,
Tags=Tags(Name='CacDB', **self.tags)
)
db_dns_record = route53.RecordSetType(
'rsDatabase',
Name=Ref(db_dns_name),
ResourceRecords=[GetAtt('CacDb', 'Endpoint.Address')],
TTL=600,
Type='CNAME',
HostedZoneId=Ref(private_hosted_zone_id),
)
# Outputs
rds_endpoint = Output(
'CacDbEndpoint',
Description='Endpoint to which Postgres clients should connect',
Value=GetAtt('CacDb', 'Endpoint.Address')
)
database_name = Output(
'CacDbName',
Description='Name of database created on Cac RDS instance',
Value=Ref(db_name)
)
db_sg = Output(
'DatabaseSecurityGroup',
Description='Security Group of Database',
Value=GetAtt('sgDatabase', 'GroupId')
)
self.parameters = [vpcid, private_hosted_zone_id, subnets, db_name,
db_user, db_password, db_instance_class,
db_storage, db_dns_name]
self.resources = [db_security_group, subnet_group, database,
db_dns_record]
self.outputs = [rds_endpoint, database_name, db_sg]
class DataPlaneGenerator(StackNode):
"""Create a template for the Cac data plane"""
INPUTS = {'Tags': ['global:Tags'],
'BastionSecurityGroup': ['global:BastionSecurityGroup', 'VPC:BastionSecurityGroup'],
'VpcId': ['global:VpcId', 'VPC:VpcId'],
'AppServerSubnets': ['global:AppServerSubnets', 'VPC:DefaultAppServerPrivateSubnets'],
'DbName': ['global:DbName'],
'DbUser': ['global:DbUser'],
'DbPassword': ['global:DbPassword'],
'DbInstanceClass': ['global:DbInstanceClass'],
'DbStorage': ['global:DbStorage'],
'PrivateHostedZoneId': ['global:PrivateHostedZoneId',
'R53PrivateHostedZone:PrivateHostedZoneId'],
'DbDNSName': ['global:DbDNSName'],
'StackType': ['global:StackType']
}
DEFAULTS = {'Tags': {},
'DbName': 'cac',
'DbStorage': 150,
'DbInstanceClass': 'db.m3.medium',
'StackType': 'Development'
}
NAME = 'DataPlane'
ATTRIBUTES = {'StackType': 'StackType'}
def set_up_stack(self):
"""Sets up the stack"""
super(DataPlaneGenerator, self).set_up_stack()
self.add_description('Data Plane Stack for Cac')
self.rds_stack = RDSFactory()
self.rds_stack.populate_template(self)
for key in self.parameters:
self.input_wiring[key] = key
| 36.004 | 100 | 0.59249 | 8,733 | 0.970226 | 0 | 0 | 0 | 0 | 0 | 0 | 3,714 | 0.412621 |
9259b7ed50eae624a0ff1eb302547c63e7d7b373 | 1,113 | py | Python | tests/test_default_currencies_provider.py | pedroburon/python-monon | 670a5c5e98171838d43fd1f72d0afbf7a70b937b | [
"MIT"
] | 1 | 2017-07-10T15:32:48.000Z | 2017-07-10T15:32:48.000Z | tests/test_default_currencies_provider.py | pedroburon/python-monon | 670a5c5e98171838d43fd1f72d0afbf7a70b937b | [
"MIT"
] | 5 | 2017-07-10T16:34:28.000Z | 2017-07-10T16:41:38.000Z | tests/test_default_currencies_provider.py | pedroburon/python-monon | 670a5c5e98171838d43fd1f72d0afbf7a70b937b | [
"MIT"
] | null | null | null |
from unittest import TestCase
from decimal import Decimal, ROUND_UP
from monon.currency import DefaultCurrenciesProvider
class DefaultCurrenciesProviderTestCase(TestCase):
def setUp(self):
self.provider = DefaultCurrenciesProvider()
self.isocode = 'USD'
def test_decimal_places(self):
self.assertEqual(2, self.provider.get_decimal_places(self.isocode))
def test_symbol(self):
self.assertEqual('$', self.provider.get_symbol(self.isocode))
def test_validate_currency(self):
self.assertIsNone(self.provider.validate_currency(self.isocode))
def test_format_positive_amount(self):
amount = Decimal('43321.123')
expected = '$43321.123'
self.assertEqual(expected, self.provider.format_amount(self.isocode, amount))
def test_format_negative_amount(self):
amount = Decimal('-1234.123')
expected = '-$1234.123'
self.assertEqual(expected, self.provider.format_amount(self.isocode, amount))
def test_rounding(self):
self.assertEqual(ROUND_UP, self.provider.get_rounding(self.isocode))
| 30.916667 | 89 | 0.716083 | 987 | 0.886792 | 0 | 0 | 0 | 0 | 0 | 0 | 54 | 0.048518 |
925a517d59e52167dcdd4e1d3358f0d6bda53758 | 29,242 | py | Python | appion/bin/imageloader.py | leschzinerlab/myami-3.2-freeHand | 974b8a48245222de0d9cfb0f433533487ecce60d | [
"MIT"
] | null | null | null | appion/bin/imageloader.py | leschzinerlab/myami-3.2-freeHand | 974b8a48245222de0d9cfb0f433533487ecce60d | [
"MIT"
] | null | null | null | appion/bin/imageloader.py | leschzinerlab/myami-3.2-freeHand | 974b8a48245222de0d9cfb0f433533487ecce60d | [
"MIT"
] | 1 | 2019-09-05T20:58:37.000Z | 2019-09-05T20:58:37.000Z | #!/usr/bin/env python
#pythonlib
import os
import sys
import shutil
import time
import numpy
import math
import glob
#appion
from appionlib import appionLoop2
from appionlib import apDatabase
from appionlib import apDisplay
from appionlib import apDBImage
from appionlib import apProject
from appionlib import apEMAN
from appionlib import apFile
#leginon
import leginon.leginondata
import leginon.projectdata
import leginon.leginonconfig
import leginon.ddinfo
#pyami
from pyami import mrc
from pyami import fileutil
class ImageLoader(appionLoop2.AppionLoop):
#=====================
def __init__(self):
"""
appionScript OVERRIDE
"""
self.processcount = 0
self.refdata = {}
appionLoop2.AppionLoop.__init__(self)
#=====================
def setupParserOptions(self):
"""
standard appionScript
"""
### id info
self.parser.add_option("--userid", dest="userid", type="int",
help="Leginon User database ID", metavar="INT")
### redefine preset option to default to upload
self.parser.remove_option("--preset")
self.parser.add_option("--preset", dest="preset", default='upload',
help="Image preset associated with all uploaded images, e.g. --preset=upload", metavar="PRESET")
self.parser.add_option("--dir", dest="imgdir", type="string", metavar="DIR",
help="directory containing MRC files for upload")
self.filetypes = ("mrc","dm3","dm2","tif")
self.parser.add_option("--filetype", dest="filetype", metavar="TYPE",
help="input image filetype",
type="choice", choices=self.filetypes, default="mrc")
self.parser.add_option("--append-session", dest="appendsession", default=True,
action="store_true", help="Append session to image names")
self.parser.add_option("--no-append-session", dest="appendsession", default=True,
action="store_false", help="Do not append session to image names")
self.parser.add_option("--invert", dest="invert", default=False,
action="store_true", help="Invert image density")
self.parser.add_option("--norm", dest="normimg", type="string", metavar="PATH",
help="normalization image to apply to each upload")
self.parser.add_option("--dark", dest="darkimg", type="string", metavar="PATH",
help="dark image to apply to each upload frame")
### mode 1: command line params
self.parser.add_option("--tiltgroup", dest="tiltgroup", type="int", default=1,
help="Number of image per tilt series, default=1", metavar="INT")
self.parser.add_option("--apix", dest="apix", type="float", metavar="FLOAT",
help="angstroms per pixel")
self.parser.add_option("--df", dest="df", type="float", metavar="DEFOCUS",
help="nominal defocus (negative, in microns)")
self.parser.add_option("--mag", dest="mag", type="int", metavar="MAG",
help="nominal magnification")
self.parser.add_option("--kv", dest="kv", type="int", metavar="INT",
help="high tension (in kilovolts)")
self.parser.add_option("--cs", dest="cs", type="float", metavar="#.#",
default=2.0, help="spherical aberration constant (in mm), e.g., --cs=2.0")
self.parser.add_option("--binx", dest="binx", type="int", metavar="INT",
default=1, help="binning in x (default=1)")
self.parser.add_option("--biny", dest="biny", type="int", metavar="INT",
default=1, help="binning in y (default=1)")
### mode 2: batch script
self.parser.add_option("--batch", "--batchparams", dest="batchscript", type="str",
help="File containing image parameters", metavar="FILE")
#=====================
def checkConflicts(self):
"""
standard appionScript
"""
if self.params['batchscript'] is None:
#mode 1: command line params
if self.params['apix'] is None:
apDisplay.printError("If not specifying a parameter file, supply apix")
if self.params['df'] is None:
apDisplay.printError("If not specifying a parameter file, supply defocus of the images")
if self.params['df'] > 0:
apDisplay.printWarning("defocus is being switched to negative")
self.params['df']*=-1
if self.params['df'] > -0.1:
apDisplay.printError("defocus must be in microns")
if self.params['mag'] is None:
apDisplay.printError("If not specifying a parameter file, supply magnification")
if self.params['kv'] is None:
apDisplay.printError("If not specifying a parameter file, supply a high tension")
if self.params['kv'] > 1000:
apDisplay.printError("High tension must be in kilovolts (e.g., 120)")
if self.params['cs'] < 0.0:
apDisplay.printError("Cs value must be in mm (e.g., 2.0)")
if self.params['imgdir'] is None:
apDisplay.printError("If not specifying a parameter file, specify directory containing images")
if not os.path.exists(self.params['imgdir']):
apDisplay.printError("specified path '%s' does not exist\n"%self.params['imgdir'])
elif not os.path.isfile(self.params["batchscript"]):
#mode 2: batch script
apDisplay.printError("Could not find Batch parameter file: %s"%(self.params["batchscript"]))
if self.params['sessionname'] is None:
apDisplay.printError("Please provide a Session name, e.g., --session=09feb12b")
if self.params['projectid'] is None:
apDisplay.printError("Please provide a Project database ID, e.g., --projectid=42")
if self.params['description'] is None:
apDisplay.printError("Please provide a Description, e.g., --description='awesome data'")
if self.params['userid'] is None:
self.params['userid'] = self.getLeginonUserId()
if self.params['normimg'] is not None:
if not os.path.exists(self.params['normimg']):
apDisplay.printError("specified image path for normalization '%s' does not exist\n"%self.params['normimg'])
if self.params['normimg'] is None and self.params['darkimg'] is not None:
apDisplay.printError("Only dark but not normalization image is not enough forcorrection")
#This really is not conflict checking but to set up new session.
#There is no place in Appion script for this special case
sessionq = leginon.leginondata.SessionData(name=self.params['sessionname'])
sessiondatas = sessionq.query()
if len(sessiondatas) > 0:
### METHOD 1 : session already exists
apDisplay.printColor("Add images to an existing session", "cyan")
sessiondata = sessiondatas[0]
### what about linking an existing session with project id to a new project id
oldprojectid = apProject.getProjectIdFromSessionName(self.params['sessionname'])
if oldprojectid != self.params['projectid']:
apDisplay.printError("You cannot assign an existing session (PID %d) to a different project (PID %d)"%
(oldprojectid, self.params['projectid']))
if self.params['rundir'] is not None and self.params['rundir'] != sessiondata['image path']:
apDisplay.printError("Specified Rundir is different from current session path\n%s\n%s"
%( self.params['rundir'], sessiondata['image path']))
### only allows uploading more images if all images are uploaded through appion host.
instrumentq = leginon.leginondata.InstrumentData(hostname='appion',name='AppionTEM')
appiontems = instrumentq.query()
allappionscopeems = []
for appiontem in appiontems:
scopeemq = leginon.leginondata.ScopeEMData(session=sessiondatas[0],tem=appiontem)
appionscopeems = scopeemq.query()
if appionscopeems:
allappionscopeems.extend(appionscopeems)
scopeemq = leginon.leginondata.ScopeEMData(session=sessiondatas[0])
allscopeems = scopeemq.query()
if len(allscopeems) > len(allappionscopeems):
apDisplay.printError("You can only add more images to an existing session that contains only appion uploads")
else:
### METHOD 2 : create new session
apDisplay.printColor("Creating a new session", "cyan")
try:
directory = leginon.leginonconfig.mapPath(leginon.leginonconfig.IMAGE_PATH)
except AttributeError:
apDisplay.printWarning("Could not set directory")
directory = ''
if self.params['userid'] is not None:
userdata = leginon.leginondata.UserData.direct_query(self.params['userid'])
else:
userdata = None
sessiondata = self.createSession(userdata, self.params['sessionname'], self.params['description'], directory)
self.linkSessionProject(sessiondata, self.params['projectid'])
self.session = sessiondata
return
#=====================
def commitToDatabase(self,imagedata):
"""
standard appionScript
"""
return
#=====================
def setRunDir(self):
"""
standard appionScript
"""
self.params['rundir'] = self.session['image path']
#=====================
#===================== Appion Loop Hacks
#=====================
#=====================
def getLeginonUserId(self):
"""
standard appionScript
"""
try:
### sometimes this crashes
username = os.getlogin()
except:
return None
userq = leginon.leginondata.UserData()
userq['username'] = username
userdatas = userq.query(results=1)
if not userdatas or len(userdatas) == 0:
return None
return userdatas[0].dbid
#=====================
def preLoopFunctions(self):
"""
standard appionLoop
"""
self.c_client = apDBImage.ApCorrectorClient(self.session,True)
self.getAppionInstruments()
if self.params['normimg']:
# need at least normimg to upload reference. darkimg can be faked
self.uploadRefImage('norm', self.params['normimg'])
self.uploadRefImage('dark', self.params['darkimg'])
#=====================
def run(self):
"""
appionLoop OVERRIDE
processes all images
"""
self.pixelsizes = {}
### get images from upload image parameters file
self.getAllImages()
os.chdir(self.params['rundir'])
self.preLoopFunctions()
### start the loop
self.badprocess = False
self.stats['startloop'] = time.time()
apDisplay.printColor("\nBeginning Main Loop", "green")
imgnum = 0
while imgnum < len(self.batchinfo):
self.stats['startimage'] = time.time()
info = self.readUploadInfo(self.batchinfo[imgnum])
imgnum += 1
### CHECK IF IT IS OKAY TO START PROCESSING IMAGE
if not self._startLoop(info):
continue
### START any custom functions HERE:
imgdata, results = self.loopProcessImage(info)
### WRITE db data
if self.badprocess is False:
if self.params['commit'] is True:
apDisplay.printColor(" ==== Committing data to database ==== ", "blue")
self.loopCommitToDatabase(imgdata)
self.commitResultsToDatabase(imgdata, results)
else:
apDisplay.printWarning("not committing results to database, all data will be lost")
apDisplay.printMsg("to preserve data start script over and add 'commit' flag")
self.writeResultsToFiles(imgdata, results)
else:
apDisplay.printWarning("IMAGE FAILED; nothing inserted into database")
self.badprocess = False
### FINISH with custom functions
self._writeDoneDict(imgdata['filename'])
load = os.getloadavg()[0]
if load > 2.0:
apDisplay.printMsg("Load average is high %.2f"%(load))
sleeptime = min(load, 60)
apDisplay.printMsg("Sleeping %.1f seconds"%(sleeptime))
time.sleep(load)
self._printSummary()
self.postLoopFunctions()
self.close()
#=====================
def getAllImages(self):
"""
appionLoop OVERRIDE
"""
if self.params['batchscript']:
self.batchinfo = self.readBatchUploadInfo()
else:
self.batchinfo = self.setBatchUploadInfo()
self.stats['imagecount'] = len(self.batchinfo)
#=====================
def _startLoop(self, info):
"""
appionLoop OVERRIDE
initilizes several parameters for a new image
and checks if it is okay to start processing image
"""
if info is None:
self.stats['lastimageskipped'] = True
self.stats['skipcount'] += 1
return False
name = info['filename']
# check to see if image of the same name is already in leginon
imgq = leginon.leginondata.AcquisitionImageData(session=self.session, filename=name)
results = imgq.query(readimages=False)
if results:
apDisplay.printWarning("File %s.mrc exists at the destination" % name)
apDisplay.printWarning("Skip Uploading")
self.stats['lastimageskipped'] = True
self.stats['skipcount'] += 1
return False
#calc images left
self.stats['imagesleft'] = self.stats['imagecount'] - self.stats['count']
#only if an image was processed last
if(self.stats['lastcount'] != self.stats['count']):
apDisplay.printColor( "\nStarting image "+str(self.stats['count'])\
+" ( skip:"+str(self.stats['skipcount'])+", remain:"\
+str(self.stats['imagesleft'])+" ) file: "\
+apDisplay.short(name), "green")
self.stats['lastcount'] = self.stats['count']
if apDisplay.isDebugOn():
self._checkMemLeak()
# check to see if image has already been processed
if self._alreadyProcessed(info):
return False
self.stats['waittime'] = 0
return True
def newImagePath(self, rootname):
'''
Returns full path for uploaded image and frames
'''
extension = '.mrc'
newname = rootname+extension
newframename = rootname+'.frames'+extension
newimagepath = os.path.join(self.session['image path'], newname)
if self.session['frame path']:
newframepath = os.path.join(self.session['frame path'], newframename)
else:
newframepath = newimagepath
return newimagepath, newframepath
def getImageDimensions(self, mrcfile):
'''
Returns dictionary of x,y dimension for an mrc image/image stack
'''
mrcheader = mrc.readHeaderFromFile(mrcfile)
x = int(mrcheader['nx'].astype(numpy.uint16))
y = int(mrcheader['ny'].astype(numpy.uint16))
return {'x': x, 'y': y}
def getNumberOfFrames(self, mrcfile):
'''
Returns number of frames of an mrc image/image stack
'''
mrcheader = mrc.readHeaderFromFile(mrcfile)
return max(1,int(mrcheader['nz'].astype(numpy.uint16)))
def makeFrameDir(self,newdir):
fileutil.mkdirs(newdir)
def copyFrames(self,source,destination):
apFile.safeCopy(source, destination)
def prepareImageForUpload(self,origfilepath,newframepath=None,nframes=1):
### In order to obey the rule of first save image then insert
### database record, image need to be read as numpy array, not copied
### single image should not overload memory
apDisplay.printMsg("Reading original image: "+origfilepath)
if nframes <= 1:
imagearray = mrc.read(origfilepath)
else:
apDisplay.printMsg('Summing %d frames for image upload' % nframes)
imagearray = mrc.sumStack(origfilepath)
apDisplay.printMsg('Copying frame stack %s to %s' % (origfilepath,newframepath))
self.copyFrames(origfilepath,newframepath)
return imagearray
def correctImage(self,rawarray,nframes):
if 'norm' in self.refdata.keys() and self.refdata['norm']:
normarray = self.refdata['norm']['image']
if 'dark' in self.refdata.keys() and self.refdata['dark']:
darkarray = self.refdata['dark']['image']*nframes/self.refdata['dark']['camera']['nframes']
else:
darkarray = numpy.zeros(rawarray.shape)
apDisplay.printMsg('Normalizing image before upload')
return self.c_client.normalizeImageArray(rawarray, darkarray, normarray, is_counting=False)
else:
# no norm/dark to correct
return rawarray
#=====================
def processImage(self, imginfo):
"""
standard appionLoop
"""
self.updatePixelSizeCalibration(imginfo)
origimgfilepath = imginfo['original filepath']
newimgfilepath, newframepath = self.newImagePath(imginfo['filename'])
nframes = self.getNumberOfFrames(origimgfilepath)
if nframes > 1:
if not self.session['frame path']:
apDisplay.printError('Can not upload frame movies: Frame path for this session not defined. Please start a new session')
self.makeFrameDir(self.session['frame path'])
## read the image/summed file into memory and copy frames if available
imagearray = self.prepareImageForUpload(origimgfilepath,newframepath,nframes)
imagearray = self.correctImage(imagearray,nframes)
imgdata = self.makeImageData(imagearray,imginfo,nframes)
if self.isTomoTilts():
self.makeTomographyPredictionData(imgdata)
pixeldata = None
return imgdata, pixeldata
#=====================
#===================== custom functions
#=====================
#=====================
def publish(self,data,dbforce=False):
"""
sinedon already does this check, but since we want
the results back whether commit or not, we need to do it here.
"""
results = data.query(readimages=False)
if not results or dbforce:
if self.params['commit'] is True:
data.insert(force=dbforce)
return data
return results[0]
#=====================
def createSession(self, user, name, description, directory):
imagedirectory = os.path.join(leginon.leginonconfig.unmapPath(directory), name, 'rawdata').replace('\\', '/')
initializer = {
'name': name,
'comment': description,
'user': user,
'hidden': False,
'image path': imagedirectory,
'frame path': leginon.ddinfo.getRawFrameSessionPathFromSessionPath(imagedirectory)
}
sessionq = leginon.leginondata.SessionData(initializer=initializer)
sessiondata = self.publish(sessionq)
# session become unreserved if is committed
reservationq = leginon.leginondata.SessionReservationData(name=sessiondata['name'],reserved=False)
self.publish(reservationq,True)
return sessiondata
#=====================
def linkSessionProject(self, sessiondata, projectid):
projectexpq = leginon.projectdata.projectexperiments()
projectexpq['project'] = leginon.projectdata.projects.direct_query(projectid)
projectexpq['session'] = sessiondata
if self.params['commit'] is True:
projectexpq.insert()
#=====================
def readBatchUploadInfo(self):
# in this example, the batch script file should be separated by tab
# see example in function readUploadInfo for format
batchfilename = self.params['batchscript']
if not os.path.exists(batchfilename):
apDisplay.printError('Batch file %s not exist' % batchfilename)
return []
batchfile = open(batchfilename,'r')
lines = batchfile.readlines()
batchfile.close()
batchinfo = []
count = 0
for line in lines:
count += 1
#remove white space at ends
sline = line.strip()
if ' ' in sline:
apDisplay.printWarning("There is a space in the batch file on line %d"%(count))
#remove white space at ends
cols = sline.split('\t')
if len(cols) > 1:
batchinfo.append(cols)
else:
apDisplay.printWarning("Skipping line %d"%(count))
return batchinfo
#=====================
def setBatchUploadInfo(self):
# instead of specifying a batch script file, the same values
# are applied to all images for upload
batchinfo = []
imgdir = os.path.join(self.params['imgdir'],"*."+self.params['filetype'])
upfiles = glob.glob(imgdir)
if not upfiles:
apDisplay.printError("No images for upload in '%s'"%self.params['imgdir'])
if self.donedict and len(self.donedict) > 1:
apDisplay.printMsg("Cleaning up alreadly uploaded images")
newupfiles = []
count = 0
for imgfile in upfiles:
count += 1
if count % 10 == 0:
sys.stderr.write("..%d "%(len(newupfiles)))
basename = os.path.basename(imgfile)
justbase = os.path.splitext(basename)[0]
newfile = self.params['sessionname']+"_"+justbase
try:
self.donedict[newfile]
except:
newupfiles.append(imgfile)
sys.stderr.write("\n")
if len(newupfiles) > 0:
apDisplay.printMsg("Removed %d of %d files :: %d remain to process"%
(len(upfiles)-len(newupfiles), len(upfiles), len(newupfiles)))
upfiles = newupfiles
upfiles.sort()
for upfile in upfiles:
fname = os.path.abspath(upfile)
apix = "%.4e"%(self.params['apix']*1e-10)
binx = "%d"%(self.params['binx'])
biny = "%d"%(self.params['biny'])
mag = "%d"%(self.params['mag'])
df = "%.4e"%(self.params['df']*1e-6)
ht = "%d"%(self.params['kv']*1000)
cols = [fname, apix, binx, biny, mag, df, ht]
batchinfo.append(cols)
return batchinfo
#=====================
def readUploadInfo(self,info=None):
if info is None:
# example
info = ['test.mrc','2e-10','1','1','50000','-2e-6','120000','0.0','20.0']
apDisplay.printMsg('reading image info for %s'%(os.path.abspath(info[0])))
try:
uploadedInfo = {}
uploadedInfo['original filepath'] = os.path.abspath(info[0])
uploadedInfo['unbinned pixelsize'] = float(info[1])
if uploadedInfo['unbinned pixelsize'] > 1e-6:
apDisplay.printError("pixel size is bigger than a micron, that is ridiculous")
uploadedInfo['binning'] = {'x':int(info[2]),'y':int(info[3])}
uploadedInfo['magnification'] = int(info[4])
uploadedInfo['defocus'] = float(info[5])
uploadedInfo['high tension'] = int(info[6])
if len(info) > 7:
uploadedInfo['stage a'] = float(info[7])*math.pi/180.0
else:
uploadedInfo['stage a'] = 0.0
if len(info) > 8:
uploadedInfo['dose'] = float(info[8])*1e+20
else:
uploadedInfo['dose'] = None
# add other items in the dictionary and set to instrument in the function
# setInfoToInstrument if needed
except:
apDisplay.printError("Bad batch file parameters")
if not os.path.isfile(uploadedInfo['original filepath']):
apDisplay.printWarning("Original File %s does not exist" % uploadedInfo['original filepath'])
apDisplay.printWarning("Skip Uploading")
return None
uploadedInfo['filename'] = self.setNewFilename(uploadedInfo['original filepath'])
newimgfilepath = os.path.join(self.params['rundir'],uploadedInfo['filename']+".tmp.mrc")
### convert to mrc in new session directory if not mrc:
if self.params['filetype'] != "mrc":
if not os.path.isfile(newimgfilepath):
emancmd = "proc2d %s %s edgenorm flip mrc"%(uploadedInfo['original filepath'], newimgfilepath)
apEMAN.executeEmanCmd(emancmd)
if not os.path.exists(newimgfilepath):
apDisplay.printError("image conversion to mrc did not execute properly")
uploadedInfo['original filepath'] = newimgfilepath
dims = self.getImageDimensions(uploadedInfo['original filepath'])
nframes = self.getNumberOfFrames(uploadedInfo['original filepath'])
if self.params['invert'] is True:
if nframes == 1:
tmpimage = mrc.read(uploadedInfo['original filepath'])
# invert image density
tmpimage *= -1.0
mrc.write(tmpimage,uploadedInfo['original filepath'])
else:
apDisplay.printError('Inverting a stack is not implemented')
# works for both single and stack
uploadedInfo['dimension'] = dims
uploadedInfo['session'] = self.session
uploadedInfo['pixel size'] = uploadedInfo['unbinned pixelsize']*uploadedInfo['binning']['x']
return uploadedInfo
#=====================
def setNewFilename(self, original_filepath):
keep_old_name = True
if keep_old_name:
fullname = os.path.basename(original_filepath)
found = fullname.rfind('.'+self.params['filetype'])
if found > 0:
name = fullname[:found]
else:
name = fullname
else:
imgq = leginon.leginondata.AcquisitionImageData(session=self.session)
results = imgq.query(readimages=False)
if results:
imgcount = len(results)
else:
imgcount = 0
name = self.params['sessionname']+'_%05dupload' % (imgcount+1)
if self.params['appendsession'] is True:
name = self.params['sessionname']+"_"+name
return name
#=====================
def getTiltSeries(self):
if self.params['tiltgroup'] is None or self.params['tiltgroup']==1:
return None
else:
divide = float(self.processcount)/self.params['tiltgroup']
self.processcount += 1
residual = divide - math.floor(divide)
tiltq = leginon.leginondata.TiltSeriesData(session=self.session)
results = tiltq.query(results=1,readimages=False)
if residual > 0:
return results[0]
else:
if results:
series = results[0]['number']+1
else:
series = 1
return self.makeTiltSeries(series)
def isTomoTilts(self):
return self.params['tiltgroup'] and self.params['tiltgroup'] > 2
def makeTiltSeries(self, series):
tiltq = leginon.leginondata.TiltSeriesData(session=self.session,number=series)
if self.isTomoTilts():
# go back one since it is alread advanced
this_index = self.processcount - 1
end_index = self.params['tiltgroup'] + this_index
if end_index > len(self.batchinfo):
apDisplay.printError('Not enough images to determine tilt series parameter')
this_tilt_group_info = self.batchinfo[this_index:end_index]
tilts = map((lambda x:float(x[7])),this_tilt_group_info)
tiltq['tilt min'] = min(tilts)
tiltq['tilt max'] = max(tilts)
tiltq['tilt start'] = tilts[0]
tiltq['tilt step'] = tilts[1] - tilts[0]
return self.publish(tiltq)
def getTiltGroupInfo(self,tilts):
return min(tilts),max(tilts),tilts[0],tilts[1]-tilts[0]
def makeTomographyPredictionData(self,imgdata):
predictq = leginon.leginondata.TomographyPredictionData(session=self.session,image=imgdata)
predictq['pixel size'] = float(self.batchinfo[0][1])
predictq['correlation'] = {'x':0.0,'y':0.0}
# Only need phi. Maybe need user to input this
predictq['predicted position'] = {'x':0.0,'y':0.0,'z':0.0,'z0':0.0,'phi':0.0,'optical axis':0.0}
return self.publish(predictq)
#=====================
def getAppionInstruments(self):
instrumentq = leginon.leginondata.InstrumentData()
instrumentq['hostname'] = "appion"
instrumentq['name'] = "AppionTEM"
instrumentq['cs'] = self.params['cs'] * 1e-3
self.temdata = self.publish(instrumentq)
instrumentq = leginon.leginondata.InstrumentData()
instrumentq['hostname'] = "appion"
instrumentq['name'] = "AppionCamera"
self.camdata = self.publish(instrumentq)
return
def uploadRefImage(self,reftype,refpath):
info = self.readUploadInfo(self.batchinfo[0])
if refpath is None:
nframes = 1
if reftype == 'dark':
imagearray = numpy.zeros((info['dimension']['y'],info['dimension']['x']))
else:
imagearray = numpy.ones((info['dimension']['y'],info['dimension']['x']))
else:
nframes = self.getNumberOfFrames(refpath)
imagearray = self.prepareImageForUpload(refpath,None,nframes)
scopedata = self.makeScopeEMData(info)
cameradata = self.makeCameraEMData(info,nframes)
imagedata = {'image':imagearray,'scope':scopedata,'camera':cameradata}
self.refdata[reftype] = self.c_client.storeCorrectorImageData(imagedata, reftype, 0)
def makeScopeEMData(self,info):
# ScopeEMData
scopedata = leginon.leginondata.ScopeEMData(session=self.session,tem =self.temdata)
scopedata['defocus'] = info['defocus']
scopedata['magnification'] = info['magnification']
scopedata['high tension'] = info['high tension']
if 'stage a' in info.keys():
tiltseriesdata = leginon.leginondata.TiltSeriesData(session=self.session)
scopedata['stage position'] = {'x':0.0,'y':0.0,'z':0.0,'a':info['stage a']}
else:
scopedata['stage position'] = {'x':0.0,'y':0.0,'z':0.0,'a':0.0}
return scopedata
def makeCameraEMData(self,info,nframes):
# CameraEMData
cameradata = leginon.leginondata.CameraEMData(session=self.session,ccdcamera=self.camdata)
cameradata['dimension'] = info['dimension']
cameradata['binning'] = info['binning']
cameradata['offset'] = {'x':0,'y':0}
cameradata['save frames'] = (nframes > 1)
cameradata['nframes'] = nframes
cameradata['frame time'] = 100
cameradata['exposure time'] = cameradata['frame time'] * nframes
return cameradata
#=====================
def makeImageData(self,imagearray,info,nframes):
scopedata = self.makeScopeEMData(info)
cameradata = self.makeCameraEMData(info,nframes)
presetdata = leginon.leginondata.PresetData(session=self.session,tem=self.temdata,ccdcamera= self.camdata)
# PresetData
presetdata['name'] = self.params['preset']
presetdata['magnification'] = info['magnification']
presetdata['dose'] = info['dose']
# ImageData
imgdata = leginon.leginondata.AcquisitionImageData(session=self.session,scope=scopedata,camera=cameradata,preset=presetdata)
imgdata['tilt series'] = self.getTiltSeries()
imgdata['filename'] = info['filename']
imgdata['label'] = 'upload'
# single image should not overload memory
imgdata['image'] = imagearray
# references
for key in self.refdata.keys():
imgdata[key] = self.refdata[key]
self.publish(imgdata)
return imgdata
#=====================
def updatePixelSizeCalibration(self,info):
# This updates the pixel size for the magnification on the
# instruments before the image is published. Later query will look up the
# pixelsize calibration closest and before the published image
mag = info['magnification']
pixelsize = info['unbinned pixelsize']
caldata = leginon.leginondata.PixelSizeCalibrationData()
caldata['magnification'] = mag
caldata['pixelsize'] = pixelsize
caldata['comment'] = 'based on uploaded pixel size'
caldata['session'] = self.session
caldata['tem'] = self.temdata
caldata['ccdcamera'] = self.camdata
# If this pixel size is not what last entered in this upload,
# force db insert even if the same values exists because someone might
# have changed the calibration earlier and now you need to change it back
if mag in self.pixelsizes.keys() and pixelsize == self.pixelsizes[mag]:
return
else:
self.publish(caldata, dbforce=True)
self.pixelsizes[mag] = pixelsize
apDisplay.printMsg("Sleeping 1 second")
time.sleep(1.0)
#=====================
#=====================
#=====================
if __name__ == '__main__':
imgLoop = ImageLoader()
imgLoop.run()
| 37.203562 | 126 | 0.692839 | 28,585 | 0.977532 | 0 | 0 | 0 | 0 | 0 | 0 | 10,103 | 0.345496 |
925b2a1e34a13b512d3f03e220fa0f35f79533d6 | 3,926 | py | Python | modules/exporter.py | MarzioMonticelli/python-cryptonet | 598c6e61fd7165f3a03ffce3f1de9f69d8645681 | [
"MIT"
] | 19 | 2020-04-09T15:01:43.000Z | 2022-03-28T07:39:40.000Z | modules/exporter.py | MarzioMonticelli/python-cryptonet | 598c6e61fd7165f3a03ffce3f1de9f69d8645681 | [
"MIT"
] | 4 | 2020-04-22T14:19:53.000Z | 2021-05-25T13:12:57.000Z | modules/exporter.py | MarzioMonticelli/python-cryptonet | 598c6e61fd7165f3a03ffce3f1de9f69d8645681 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: Marzio Monticelli (1459333)
"""
from os import path
from . import model as md
from tensorflow import keras
class Exporter:
def __init__(self, verbosity = False):
self.verbosity = verbosity
def load(self, base_dir = 'storage/models/', model_name = 'model'):
model_path = base_dir+model_name+".hdf5"
if path.exists(model_path):
if(self.verbosity) :
print("Loading saved model "+model_path+"...")
model = keras.models.load_model(model_path)
if(self.verbosity) :
print("Loaded with success")
return model
else:
return False
def export(self, model, accuracy_filter = 98,
test = None, test_labels = None,
base_dir = 'storage/models/', model_name = 'model'):
model_path = base_dir+model_name+".hdf5"
if(accuracy_filter > 0):
m = md.Model()
score_acc_val = m.getAccuracy(model, test, test_labels)
if self.verbosity :
print("Model accuracy: " + str(score_acc_val))
if score_acc_val >= accuracy_filter :
model.save(model_path)
if self.verbosity:
print("Model " + model_path + " exported with success.")
elif (self.verbosity) :
print("Model not exported. Accuracy ("+str(score_acc_val)+"%) lower than " +
str(accuracy_filter) + "%.")
else:
model.save(model_path)
if self.verbosity:
print("Model " + model_path + " exported with success.")
def exportBestOf(self, train, train_labels, test, test_labels, params,
base_dir = 'storage/models/', model_name = 'model',
accuracy_filter = 98,
num_test = 10
):
if num_test > 1 :
print("")
print("================================================================")
print("Saving the best model in " + str(num_test) + " runs...")
print("================================================================")
m = md.Model()
model = self.load(base_dir, model_name)
if model == False:
(h, model) = m.fit(train, train_labels, test, test_labels,params)
self.export(model,-1, None, None, base_dir, model_name)
score_acc_val = m.getAccuracy(model, test, test_labels)
print("Model accuracy: " + str(score_acc_val))
for i in range(num_test):
step = i+1
if self.verbosity:
print("")
print("Step " + str(step) + "/" + str(num_test) +
" (" +str((step*100)//num_test) + "%" +")" )
print("")
(h,model) = m.fit(train, train_labels, test, test_labels,params)
saved_model = self.load(base_dir, model_name)
saved_score_acc_val = m.getAccuracy(saved_model,test, test_labels)
self.export(model, saved_score_acc_val, test, test_labels, base_dir, model_name)
print("")
print("================================================================")
print("Process completed !")
print("================================================================")
else:
print("Error. Use Exporter.export instead.")
| 35.369369 | 96 | 0.439379 | 3,750 | 0.955171 | 0 | 0 | 0 | 0 | 0 | 0 | 764 | 0.1946 |
925cbff7b4fe629ab439be60df8604f24ff66bc7 | 130 | py | Python | kbc_pul/experiments_utils/file_utils.py | ML-KULeuven/KBC-as-PU-Learning | a00f606bd40ca06af0a5627e65a4582859976918 | [
"Apache-2.0"
] | 4 | 2021-12-14T16:13:47.000Z | 2022-01-21T13:14:14.000Z | kbc_pul/experiments_utils/file_utils.py | ML-KULeuven/KBC-as-PU-Learning | a00f606bd40ca06af0a5627e65a4582859976918 | [
"Apache-2.0"
] | null | null | null | kbc_pul/experiments_utils/file_utils.py | ML-KULeuven/KBC-as-PU-Learning | a00f606bd40ca06af0a5627e65a4582859976918 | [
"Apache-2.0"
] | null | null | null | import os
def print_file_exists(filename: str) -> None:
print(f"? file exists: {filename}\n-> {os.path.exists(filename)}")
| 18.571429 | 70 | 0.676923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 59 | 0.453846 |
925d10b9338d7e7d834dd03a2a709b1fb7059381 | 5,913 | py | Python | combinatorial_gwas/phenotypes/__init__.py | hoangthienan95/combinatorial_GWAS | c6d51f6214f96773b2a271c706ed7026152e0fbb | [
"Apache-2.0"
] | null | null | null | combinatorial_gwas/phenotypes/__init__.py | hoangthienan95/combinatorial_GWAS | c6d51f6214f96773b2a271c706ed7026152e0fbb | [
"Apache-2.0"
] | null | null | null | combinatorial_gwas/phenotypes/__init__.py | hoangthienan95/combinatorial_GWAS | c6d51f6214f96773b2a271c706ed7026152e0fbb | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: notebooks/package/phenotypes.ipynb (unless otherwise specified).
__all__ = ['QueryDataframe', 'parameters', 'catalog_all', 'catalog_all', 'read_csv_compressed', 'get_GWAS_result_link',
'heritability_Neale', 'display_cols', 'quality_heritability_phenos', 'quality_heritability_phenos',
'icd10_pheno_matrix', 'icd10_primary_cols', 'icd10_pheno_matrix', 'upsample_pheno', 'get_phenotype',
'get_GWAS_snps_for_trait']
# Cell
from ..data_catalog import get_catalog, get_parameters
import combinatorial_gwas
from pathlib import Path
import pandas as pd
from dataclasses import dataclass
from functools import partial
import numpy as np
from typing import List, Union
from fastcore.utils import partialler
import logging
# Cell
@pd.api.extensions.register_dataframe_accessor("pheno")
@dataclass
class QueryDataframe():
df: pd.DataFrame
def query(self, **column_dict:dict):
query_str = " and ".join([f"({col} {cond})" for col, cond in column_dict.items()])
return self.df.query(query_str)
# Cell
parameters = get_parameters()
parameters
# Cell
catalog_all = get_catalog()
catalog_all = catalog_all.reload()
catalog_all.list()
# Cell
read_csv_compressed= partialler(pd.read_csv, sep="\t", compression= "gzip")
get_GWAS_result_link = partialler(parameters['template_gwas_result_file_link'].format)
# Cell
heritability_Neale = catalog_all.load("heritability_trait_level_summary")
heritability_Neale.head()
# Cell
display_cols = ['description', 'h2_liability', 'h2_sig', 'confidence', 'n_cases', 'n_controls', 'prevalence']
# Cell
quality_heritability_phenos = heritability_Neale.pheno.query(h2_sig = "in ['z7', 'z4']", source= " == 'icd10'", confidence= "in ['medium', 'high']").sort_values("h2_liability", ascending = False)
quality_heritability_phenos = quality_heritability_phenos.set_index("phenotype")
quality_heritability_phenos.head()[display_cols]
# Cell
logging.warning("Loading ICD phenotype matrix, this might take a while")
icd10_pheno_matrix = catalog_all.load("ICD10_pheno_matrix")
#get the first 3 character of ICD code
icd10_primary_cols = icd10_pheno_matrix.columns[icd10_pheno_matrix.columns.str.contains("primary")]
icd10_pheno_matrix = icd10_pheno_matrix.astype(str).apply(lambda x: x.str.slice(0,3))
logging.warning("Finished loading ICD10 matrix")
# Cell
def upsample_pheno(pheno_df, balance_pheno, max_samples, random_state):
weights = pheno_df[balance_pheno].replace(pheno_df[balance_pheno].value_counts(normalize=True).to_dict())
pheno_df_upsampled = pheno_df.sample(max_samples, replace=True, weights = 1/weights, random_state=random_state)
return pheno_df_upsampled
def get_phenotype(icd10_codes: Union[str, List[str]] ="I84", samples:np.array=None, max_samples:int = None, balance_pheno: str = None, random_state=42):
"""
if samples argument is provided from genetic file, then find common set of samples and output ordered phenotype
if `max_samples` is provided, then over-sample the data so that we have `max_samples/2` for both cases and controls
"""
icd10_codes = [icd10_codes] if not isinstance(icd10_codes, list) else icd10_codes
pheno_df_list = [icd10_pheno_matrix[icd10_primary_cols].isin([icd10_code]).any(axis=1).astype(int) for icd10_code in icd10_codes]
pheno_df = pd.concat(pheno_df_list, axis=1)
pheno_df.columns = icd10_codes
if samples is not None:
geno_pheno_sample_index_mask = np.isin(samples.astype(int), pheno_df.index)
pheno_geno_samples_common_set = samples[geno_pheno_sample_index_mask].astype(int)
pheno_df_ordered = pheno_df.loc[list(pheno_geno_samples_common_set), :]
pheno_df_ordered = pheno_df_ordered.loc[~pheno_df_ordered.index.duplicated(keep="first"),:]
sample_index = np.argwhere(geno_pheno_sample_index_mask).reshape(-1)
if max_samples is not None:
if balance_pheno is None:
raise ValueError("Need to specify `balance_pheno` param: phenotype to balance during subsampling")
pheno_df_ordered = upsample_pheno(pheno_df_ordered, balance_pheno, max_samples, random_state)
sorter = np.argsort(samples.astype(int))
sample_index = sorter[np.searchsorted(samples.astype(int), pheno_df_ordered.index, sorter=sorter)]
assert np.allclose(samples[sample_index].astype(int), pheno_df_ordered.index), "sample mismatch between genotype and phenotype, something wrong with the `get_phenotype` function!"
pheno_df_ordered.index = pheno_df_ordered.index.astype(str)
return pheno_df_ordered
return pheno_df
# Cell
def get_GWAS_snps_for_trait(phenotype_code= "I84", chromosome:Union[int, List[int]] = 21, sort_val_cols_list: List[str] = ["pval"], ascending_bool_list: List[bool] = [True], id_only= True):
chromosome = [chromosome] if not isinstance(chromosome, list) else chromosome
chromosome_str = [f"{single_chromosome}:" for single_chromosome in chromosome]
gwas_result_df = read_csv_compressed(get_GWAS_result_link(phenotype_code=phenotype_code))
gwas_result_df = gwas_result_df.loc[gwas_result_df["variant"].str.startswith(tuple(chromosome_str)), :]
gwas_result_df = gwas_result_df.reset_index(drop=True).reset_index().rename(columns = {"index":"position_rank"})
gwas_result_df = gwas_result_df.sort_values(sort_val_cols_list, ascending = ascending_bool_list)
variant_id_df = gwas_result_df["variant"].str.split(":",expand=True)
variant_id_df["chr1_4"] =variant_id_df[[1,2,3]].apply("_".join, axis=1)
variant_id_df[1] = variant_id_df[1].astype(int)
gwas_result_df[["chr", "position", "major_allele"]] = variant_id_df[[0, 1, 2]]
gwas_result_df["full_id"] = variant_id_df[[0, "chr1_4"]].apply(":".join, axis=1)
if id_only:
return gwas_result_df["full_id"].values
else:
return gwas_result_df
| 46.559055 | 195 | 0.753425 | 217 | 0.036699 | 0 | 0 | 284 | 0.04803 | 0 | 0 | 1,483 | 0.250803 |
925dbb41d905b1329c3789b6eec66daa27db7a18 | 15,553 | py | Python | eprime2events/cimaq_convert_eprime_to_bids_event.py | MarieStLaurent/cimaq_memory | d7e65cb5ca767b35b7cf35d0aa3209f58338854d | [
"MIT"
] | 1 | 2020-11-27T16:02:57.000Z | 2020-11-27T16:02:57.000Z | eprime2events/cimaq_convert_eprime_to_bids_event.py | FrancoisNadeau/cimaq_memory | 128e34f67b6f550185b78d76dc78e034cb382c35 | [
"MIT"
] | 10 | 2019-10-22T08:49:00.000Z | 2022-03-12T00:02:05.000Z | eprime2events/cimaq_convert_eprime_to_bids_event.py | FrancoisNadeau/cimaq_memory | 128e34f67b6f550185b78d76dc78e034cb382c35 | [
"MIT"
] | 5 | 2019-08-02T14:56:11.000Z | 2021-09-01T02:57:27.000Z | #!/usr/bin/env python
# encoding: utf-8
import os
import re
import sys
import argparse
import glob
import logging
from numpy import nan as NaN
import pandas as pd
import shutil
import zipfile
def get_arguments():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="",
epilog="""
Convert behavioural data from cimaq to bids format
Input: Folder with zip files
""")
parser.add_argument(
"-d", "--idir",
required=True, nargs="+",
help="Folder to be sorted")
parser.add_argument(
"-o", "--odir",
required=True, nargs="+",
help="Output folder - if doesn\'t exist it will be created.")
parser.add_argument(
'--log_level', default='INFO',
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
help='Log level of the logging class.')
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit()
else:
return args
def get_all_ids(iFolder):
""" List all ZipFile and get all IDs
Parameters:
----------
iFolder: string (input folder)
Return:
----------
ids: list of tuple (behavioral ID, IRM ID)
"""
if not os.path.exists(iFolder):
sys.exit('This folder doesn\'t exist: {}'.format(iFolder))
return
ids = []
allZipFiles = glob.glob(os.path.join(iFolder, '*.zip'))
for currZipFile in allZipFiles:
currZipFile = os.path.basename(currZipFile)
ids.append((currZipFile.split('_')[0], currZipFile.split('_')[1]))
if not ids:
sys.exit('This folder doesn\'t contain any zip files')
return
else:
return ids
def set_subject_data(bID, iFolder, oFolder):
"""
Parameters:
----------
bID: string (PSCID used to identify participants during data collection)
datadir: string (input folder)
oFolder: string (output folder)
Return:
----------
sub_files: list (three input files)
"""
logging.debug('Subject PSCID": {}'.format(bID))
prefix = ['Output-Responses-Encoding_CIMAQ_*',
'Onset-Event-Encoding_CIMAQ_*',
'Output_Retrieval_CIMAQ_*']
sub_files = []
s_dir = glob.glob(os.path.join(iFolder, bID+'*IRM.zip'))
if len(s_dir) != 1:
logging.error(' Multiple directories match \
this subject PSCID: {}'.format(bID))
else:
s_path = os.path.join(oFolder, bID+'*')
s_out = glob.glob(s_path)
if not s_out:
z_ref = zipfile.ZipFile(s_dir[0], 'r')
z_ref.extractall(oFolder)
z_ref.close()
s_out = glob.glob(s_path)
if len(s_out) == 1:
s_out = s_out[0]
for nPrefix in prefix:
file = glob.glob(os.path.join(s_out, nPrefix))
if len(file) == 1:
sub_files.append(file[0])
else:
logging.error('Multiple files found'.format(bID))
else:
logging.error('Multiple folders found'.format(bID))
return sub_files
def cleanMain(mainFile):
"""
Parameters:
----------
mainFile: pandas object
Return:
----------
mainFile: pandas object
"""
# remove first three junk rows (blank trials): CTL0, Enc00 and ENc000
mainFile.drop([0, 1, 2], axis=0, inplace=True)
# re-label columns
mainFile.rename(columns={'TrialNumber': 'trial_number',
'Category': 'trial_type',
'OldNumber': 'stim_id',
'CorrectSource': 'position_correct',
'Stim_RESP': 'response',
'Stim_RT': 'response_time'}, inplace=True)
# remove redundant columns
mainFile.drop(['TrialCode', 'Stim_ACC'], axis=1, inplace=True)
# re-order columns
cols = ['trial_number', 'trial_type', 'response', 'response_time',
'stim_id', 'position_correct']
mainFile = mainFile[cols]
# change in-scan reaction time from ms to s
mainFile['response_time'] = mainFile['response_time'].astype('float64',
copy=False)
mainFile['response_time'] = mainFile['response_time'].div(1000)
# insert new columns
colNames = ['onset', 'duration', 'offset', 'stim_file', 'stim_category',
'stim_name', 'recognition_accuracy',
'recognition_responsetime', 'position_response',
'position_accuracy', 'position_responsetime']
dtype = [NaN, NaN, NaN, 'None', 'None', 'None', -1, NaN, -1, -1, NaN]
colIndex = [0, 1, 2, 8, 9, 10, 11, 12, 14, 15, 16]
for i in range(0, 11):
mainFile.insert(loc=colIndex[i],
column=colNames[i],
value=dtype[i],
allow_duplicates=True)
return mainFile # modified in-place
def cleanOnsets(onsets):
"""
Description:
Label columns and remove first six junk rows
(3 junk trials; 2 rows per trial).
Parameters:
----------
onsets: pandas object
Return:
----------
onsets: pandas object
"""
# add column headers
onsets.columns = ["TrialNum", "Condition", "TrialNum_perCondi",
"ImageID", "Trial_part", "onsetSec", "durationSec"]
onsets.drop([0, 1, 2, 3, 4, 5], axis=0, inplace=True)
return onsets
def cleanRetriev(ret):
"""
Parameters:
----------
ret: pandas object
Return:
----------
ret: pandas object
"""
# Change column headers
ret.rename(columns={'category': 'old_new',
'Stim': 'stim_file',
'OldNumber': 'stim_id',
'Recognition_ACC': 'recognition_accuracy',
'Recognition_RESP': 'recognition_response',
'Recognition_RT': 'recognition_responsetime',
'Spatial_RESP': 'position_response',
'Spatial_RT': 'position_responsetime',
'Spatial_ACC(à corriger voir output-encodage)': 'position_accuracy'},
inplace=True)
# re-order columns
cols = ['old_new', 'stim_file', 'stim_id', 'recognition_response',
'recognition_accuracy', 'recognition_responsetime',
'position_response', 'position_accuracy', 'position_responsetime']
ret = ret[cols]
# Transform reaction time columns from ms to s
ret[['recognition_responsetime']] = ret[['recognition_responsetime']].astype('float64', copy=False) # string is object in pandas, str in Python
ret[['position_responsetime']] = ret[['position_responsetime']].astype('float64', copy=False)
ret['recognition_responsetime'] = ret['recognition_responsetime'].div(1000)
ret['position_responsetime'] = ret['position_responsetime'].div(1000)
# Clean up eprime programming mistake: replace position_response and position_responsetime values
# with NaN if subject perceived image as 'new' (the image was not probed for position).
# There should be no response or RT value there, values were carried over from previous trial (not reset in eprime)
# CONFIRMED w Isabel: subject must give a position answer when probed (image considered OLD) before eprime moves to the next trial.
i = ret[ret['recognition_response'] == 2].index
ret.loc[i, 'position_responsetime'] = NaN
ret.loc[i, 'position_response'] = -1
# clean up eprime mistake (change Old67 condition ('old_new') from New to OLD)
q = ret[ret['stim_id'] == 'Old67'].index
ret.loc[q, 'old_new'] = 'OLD'
# insert new columns
colNames = ['trial_number', 'stim_category', 'stim_name',
'recognition_performance', 'position_correct']
dtype = [-1, 'None', 'None', 'None', -1]
colIndex = [0, 4, 5, 9, 10]
for j in range(0, 5):
ret.insert(loc=colIndex[j], column=colNames[j], value=dtype[j],
allow_duplicates=True)
# Extract info and fill trial_number, stim_category and stim_name columns
k = ret.index
ret.loc[k, 'trial_number'] = k+1
# format: category_imageName.bmp w some space, _ and - in image names
stimInfo = ret.loc[k, 'stim_file']
for s in k:
ret.loc[s, 'stim_category'] = re.findall('(.+?)_', stimInfo[s])[0]
ret.loc[s, 'stim_name'] = re.findall('_(.+?)[.]', stimInfo[s])[0]
# Fill recognition_performance column based on actual and perceived novelty
m = ret[ret['old_new'] == 'OLD'].index.intersection(ret[ret['recognition_accuracy'] == 1].index)
ret.loc[m, 'recognition_performance'] = 'Hit'
n = ret[ret['old_new'] == 'OLD'].index.intersection(ret[ret['recognition_accuracy'] == 0].index)
ret.loc[n, 'recognition_performance'] = 'Miss'
o = ret[ret['old_new'] == 'New'].index.intersection(ret[ret['recognition_accuracy'] == 1].index)
ret.loc[o, 'recognition_performance'] = 'CR'
p = ret[ret['old_new'] == 'New'].index.intersection(ret[ret['recognition_accuracy'] == 0].index)
ret.loc[p, 'recognition_performance'] = 'FA'
# return cleaned up input Dataframe
return ret
def addOnsets(main, enc):
"""
Parameters:
----------
main:
enc: pandas objects
Return:
----------
main: pandas object
"""
# make main file indexable by trial number:
main.set_index('trial_number', inplace=True)
# copy trial onset and offset times from enc into main
# note: fixation's onset time is the trial task's offset time
for i in enc.index:
trialNum = enc.loc[i, 'TrialNum']
if enc.loc[i, 'Trial_part'] == 'Fixation':
main.loc[trialNum, 'offset'] = enc.loc[i, 'onsetSec']
else:
main.loc[trialNum, 'onset'] = enc.loc[i, 'onsetSec']
# Calculate trial duration time from onset and offset times
main['duration'] = main['offset']-main['onset']
# reset main's searchable index to default
main.reset_index(level=None, drop=False, inplace=True)
return main
def addPostScan(main, ret):
"""
Parameters:
----------
main: panda object
ret: panda object
Return:
----------
mainMerged: pandas object
"""
# split main's rows (trials) into sublist based on Condition
mainEnc = main[main['trial_type'] == 'Enc'].copy()
mainCTL = main[main['trial_type'] == 'CTL'].copy()
# make mainEnc indexable by picture id
mainEnc.set_index('stim_id', inplace=True)
# import post-scan data from ret into mainEnc
for i in ret[ret['old_new'] == 'OLD'].index:
stimID = ret.loc[i, 'stim_id']
mainEnc.loc[stimID, 'stim_category'] = ret.loc[i, 'stim_category']
mainEnc.loc[stimID, 'stim_name'] = ret.loc[i, 'stim_name']
mainEnc.loc[stimID, 'recognition_accuracy'] = ret.loc[i, 'recognition_accuracy']
mainEnc.loc[stimID, 'recognition_responsetime'] = ret.loc[i, 'recognition_responsetime']
mainEnc.loc[stimID, 'position_response'] = ret.loc[i, 'position_response']
mainEnc.loc[stimID, 'position_responsetime'] = ret.loc[i, 'position_responsetime']
# calculate post-scan source (position) accuracy;
# -1 = control task; 0 = missed trial; 1 = wrong source (image recognized but wrong quadrant remembered);
# 2 = image recognized with correct source
mainEnc['position_accuracy'] = 0
for j in mainEnc[mainEnc['recognition_accuracy'] == 1].index:
if mainEnc.loc[j, 'position_correct'] == mainEnc.loc[j, 'position_response']:
mainEnc.loc[j, 'position_accuracy'] = 2
else:
mainEnc.loc[j, 'position_accuracy'] = 1
# import source accuracy info from mainEnc into ret (in-place)
for i in ret[ret['old_new'] == 'OLD'].index:
picID = ret.loc[i, 'stim_id']
ret.loc[i, 'position_correct'] = mainEnc.loc[picID, 'position_correct']
ret.loc[i, 'position_accuracy'] = mainEnc.loc[picID,
'position_accuracy']
# reset mainEnc searchable index to default
# and re-order columns to match order in mainCTL
mainEnc.reset_index(level=None, drop=False, inplace=True)
cols = ['trial_number', 'onset', 'duration', 'offset', 'trial_type',
'response', 'response_time', 'stim_id', 'stim_file',
'stim_category', 'stim_name', 'recognition_accuracy',
'recognition_responsetime', 'position_correct',
'position_response', 'position_accuracy', 'position_responsetime']
mainEnc = mainEnc[cols]
# Re-merge mainEnc and mainCTL and re-order by trial number
mainMerged = mainEnc.append(mainCTL, ignore_index=True)
mainMerged.sort_values('trial_number', axis=0, ascending=True,
inplace=True)
return mainMerged
def extract_taskFile(bID, sID, file_list, output):
"""
Parameters:
----------
bID: string (subject PSCID, id used during data collection)
sID: string (subject DCCID, id used in Loris)
file_list: list (three input files)
output: string (output Folder)
Return:
----------
None
"""
# import data from three text files into pandas DataFrames
encMain = pd.read_csv(file_list[0], sep='\t')
manualEdits = ['3303819', '5477234', '6417837', '7674650']
if bID in manualEdits:
encOnsets = pd.read_csv(file_list[1], sep='\t', header=None)
else:
encOnsets = pd.read_fwf(file_list[1], infer_nrows=210,
delim_whitespace=True,
header=None)
retriev = pd.read_csv(file_list[2], sep='\t', encoding='ISO-8859-1')
# clean up each file
encMain = cleanMain(encMain)
encOnsets = cleanOnsets(encOnsets)
retriev = cleanRetriev(retriev)
# import onset times from encOnset into encMain
encMain = addOnsets(encMain, encOnsets)
# import post-scan performance data from retriev into encMain
encMain = addPostScan(encMain, retriev)
# export encMain and retriev into tsv files (output directorty)
encMain.to_csv(output+'/sub-'+sID+'_ses-4_task-memory_events.tsv',
sep='\t', header=True, index=False)
retriev.to_csv(output+'/PostScanBehav_pscid'+bID+'_dccid'+sID+'.tsv',
sep='\t', header=True, index=False)
def main():
args = get_arguments()
logging.basicConfig(level=args.log_level)
oFolder = args.odir[0]
iFolder = args.idir[0]
# Create oFolder if not exists
if not os.path.exists(oFolder):
os.mkdir(oFolder)
all_ids = get_all_ids(iFolder)
# Create tmp folder to temporaly store unziped files
tmpFolder = os.path.join(oFolder, 'tmp')
if not os.path.exists(tmpFolder):
os.mkdir(tmpFolder)
# Create taskFiles folder where all output files will be saved
fileFolder = os.path.join(oFolder, 'taskfiles')
if not os.path.exists(fileFolder):
os.mkdir(fileFolder)
# loop over zip files
for (idBEH, idMRI) in all_ids:
s_files = set_subject_data(idBEH, iFolder, tmpFolder)
if(len(s_files) == 3):
extract_taskFile(idBEH, idMRI, s_files, fileFolder)
shutil.rmtree(tmpFolder, ignore_errors=True)
else:
logging.info('missing files for subject ({},{})'.format(idBEH,
idMRI))
#
if __name__ == '__main__':
sys.exit(main())
| 37.387019 | 148 | 0.604642 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7,392 | 0.475248 |
925ee23c45a2c6df1775b5f4dad4041e0eaa6777 | 5,631 | py | Python | noise_filtering.py | tahoangthang/Wikidata2Text | 722b528451e92b3072bf57260fdb8f0962a89929 | [
"Apache-2.0"
] | 2 | 2021-08-16T21:52:49.000Z | 2021-08-18T12:08:41.000Z | noise_filtering.py | thangth1102/Wikidata2Text | 722b528451e92b3072bf57260fdb8f0962a89929 | [
"Apache-2.0"
] | null | null | null | noise_filtering.py | thangth1102/Wikidata2Text | 722b528451e92b3072bf57260fdb8f0962a89929 | [
"Apache-2.0"
] | 2 | 2021-11-15T14:41:13.000Z | 2021-12-23T10:04:31.000Z | #........................................................................................................
# Title: Wikidata claims (statements) to natural language (a part of Triple2Text/Ontology2Text task)
# Author: Ta, Hoang Thang
# Email: tahoangthang@gmail.com
# Lab: https://www.cic.ipn.mx
# Date: 12/2019
#........................................................................................................
from collections import Counter
import spacy
from spacy import displacy
nlp = spacy.load('en_core_web_lg')
# average of page length
def page_length_average(page_list):
count_list = []
for p in page_list:
count_list.append([len(p.split()), p])
return Counter([x[0] for x in count_list])
#get a distance list of tokens by sentences from different 2 items in tuples (s, o, i) or (i, s, o)
def list_distance_frequency(item1, item2, page_list):
list_ = []
for p in page_list:
temp_list = p[14].split()
index1 = -1
index2 = -1
dist = -1
for i, val in enumerate(temp_list):
if (val==item1):
index1 = i
if (val==item2):
index2 = i
dist = abs(index1 - index2)
if (dist > 0):
list_.append([dist, p])
return list_
#filter page list by an increase percent with threshold = 0.1 (or 10%)
def filter_by_threshold(list_rev, total):
t = 0
s = list_rev[0]/total
list_n = []
print("No.", "Total\t" , "Percent\t\t", "Increase percent\t\t", "Previous increase percent")
for x in list_rev:
t += x
print(x, total, "\t\t", t/total, "\t\t", ((t/total) - s)/s, "\t\t", s)
list_n.append([x, t/total, ((t/total) - s)/s, s])
s = t/total
list_r = []
for i, val in enumerate(list_n):
if (val[2] > 0.1 or i == 0):
list_r.append(val[0])
return list_r
#calculate the mean of distances
def calculate_mean(list_r, counter):
list_m = []
for x in counter:
key = x
value = counter[key]
if (value in list_r):
list_m.append(key)
print("list_m: ", list_m)
return mean(list_m)
#get the largest distance in group
def calculate_largest_distance(list_r, counter):
list_m = []
for x in counter:
key = x
value = counter[key]
if (value in list_r):
list_m.append(key)
return max(list_m)
#remove noises based on distances and an increase percent with threshold = 0.1 (or 10%) (future)
def remove_noise(page_list):
list1 = list_distance_frequency("[s]", "[o0]", page_list)
total = len(list1)
print("total: ", total)
ct1 = Counter([x[0] for x in list1])
print(ct1)
list_rev = {v:k for k, v in ct1.items()}
list_rev = sorted(list_rev.keys(), reverse=True)
print("list_rev:", list_rev)
list_r = filter_by_threshold(list_rev, total)
largest_dist = calculate_largest_distance(list_r, ct1)
list_l = []
for x in list1:
if (x[0] <= largest_dist):
#list_l.append(x[1][14])
list_l.append(x[1])
return largest_dist, list_l
# remove redundant words based on dependency parsing
def remove_redundant_words(page_list):
stop_words = ["[s]", "[o0]", "[o1]", "[o2]", "[o3]", "[i:"] #lỗi chỗ này
results = []
for p in page_list:
list_words = []
delete_words = []
doc = nlp(p[2]) #raw sentence
options = {"compact":True}
displacy.render(doc, style="dep", options = options)
#check for redundant words
for token in doc:
list_words.append(token.text)
if (token.dep_ == "amod" and token.i != 0): #adjective modifier
if (token.pos_ == "PROPN"): #pass if pronoun
continue
if (token.text[0].isupper() == True): #pass if pronoun, case of Japanese
continue
delete_words.append([token.text, token.i])
if (token.dep_ == "advmod" and token.i != 0): #adverb modifier
delete_words.append([token.text, token.i])
if (token.dep_ == "nummod" and token.i != 0): #number modifier
delete_words.append([token.text, token.i])
#print(token.text, token.lemma_, token.pos_, token.tag_, token.dep_, token.shape_, token.is_alpha, token.is_stop)
#print(token.dep_, "(", token.head, "-", token.head.i+1, ",", token.text, "-", token.i+1, ")", sep="")
result_words = p[13].split() #p13 has ~ tokens with p2
for x in delete_words:
for i2, val2 in enumerate(list_words):
try: # pass error with ; in a sentence
if (x[1] == i2):
result_words[i2] = ""
except:
continue
result_words2 = []
for i, val in enumerate(result_words):
temp = result_words[i]
if (len(result_words2)==0):
result_words2.append(temp)
elif (temp != result_words2[len(result_words2)-1] and temp in stop_words):
result_words2.append(temp)
elif (temp not in stop_words):
result_words2.append(temp)
s = ' '.join(str(e.strip()) for e in result_words2 if e!='')
results.append(s)
return results
| 35.19375 | 126 | 0.520156 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,535 | 0.272356 |
926055753876c29c748828d168304c3ef6c8a5b4 | 578 | py | Python | Unit 8 Libraries/shapes.py | ItsMrTurtle/PythonChris | 4513dea336e68f48fabf480ad87bc538a323c2cd | [
"MIT"
] | null | null | null | Unit 8 Libraries/shapes.py | ItsMrTurtle/PythonChris | 4513dea336e68f48fabf480ad87bc538a323c2cd | [
"MIT"
] | null | null | null | Unit 8 Libraries/shapes.py | ItsMrTurtle/PythonChris | 4513dea336e68f48fabf480ad87bc538a323c2cd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu May 28 15:30:04 2020
@author: Christopher Cheng
"""
class Circle (object):
def __init__(self):
self.radius = 0
def change_radius(self, radius):
self.radius = radius
def get_radius (self):
return self.radius
class Rectangle(object):
# A rectangle object with a width and height
def __init__(self,length,width):
self.length = length
self.width = width
def set_length(self,length):
self.length = length
def set_width(self,width):
self.width = width
| 23.12 | 48 | 0.626298 | 477 | 0.82526 | 0 | 0 | 0 | 0 | 0 | 0 | 138 | 0.238754 |
9262f09ddc9af9e13ef91b7856117a5ac6fea12c | 456 | py | Python | locale/pot/api/core/_autosummary/pyvista-StructuredGrid-reconstruct_surface-1.py | tkoyama010/pyvista-doc-translations | 23bb813387b7f8bfe17e86c2244d5dd2243990db | [
"MIT"
] | 4 | 2020-08-07T08:19:19.000Z | 2020-12-04T09:51:11.000Z | locale/pot/api/core/_autosummary/pyvista-UnstructuredGrid-reconstruct_surface-1.py | tkoyama010/pyvista-doc-translations | 23bb813387b7f8bfe17e86c2244d5dd2243990db | [
"MIT"
] | 19 | 2020-08-06T00:24:30.000Z | 2022-03-30T19:22:24.000Z | locale/pot/api/core/_autosummary/pyvista-RectilinearGrid-reconstruct_surface-1.py | tkoyama010/pyvista-doc-translations | 23bb813387b7f8bfe17e86c2244d5dd2243990db | [
"MIT"
] | 1 | 2021-03-09T07:50:40.000Z | 2021-03-09T07:50:40.000Z | # Create a point cloud out of a sphere and reconstruct a surface
# from it.
#
import pyvista as pv
points = pv.wrap(pv.Sphere().points)
surf = points.reconstruct_surface()
#
pl = pv.Plotter(shape=(1,2))
_ = pl.add_mesh(points)
_ = pl.add_title('Point Cloud of 3D Surface')
pl.subplot(0,1)
_ = pl.add_mesh(surf, color=True, show_edges=True)
_ = pl.add_title('Reconstructed Surface')
pl.show()
#
# See :ref:`surface_reconstruction_example` for more examples
| 26.823529 | 64 | 0.734649 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 188 | 0.412281 |
92630e4c8ee42e9ce01f451373ba14d583adb08d | 3,296 | py | Python | src/tests/test_cli.py | GatorQue/cogit | fe5dc4c73901bf28dceb23d4808d831dec30a1c5 | [
"MIT"
] | null | null | null | src/tests/test_cli.py | GatorQue/cogit | fe5dc4c73901bf28dceb23d4808d831dec30a1c5 | [
"MIT"
] | null | null | null | src/tests/test_cli.py | GatorQue/cogit | fe5dc4c73901bf28dceb23d4808d831dec30a1c5 | [
"MIT"
] | null | null | null | # *- coding: utf-8 -*-
# pylint: disable=wildcard-import, unused-wildcard-import, missing-docstring
# pylint: disable=redefined-outer-name, no-self-use, bad-continuation
""" Test '__main__' CLI stub.
See http://click.pocoo.org/3/testing/
"""
# Copyright © 2017 Ryan Lindeman <ryanlindeman+cogit@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, print_function
#import os
import sys
import sh
import pytest
from click.testing import CliRunner
from markers import *
from cogit import __version__ as version
from cogit import __main__ as main
#from cogit import commands
UsageError = sh.ErrorReturnCode_2 # pylint: disable=no-member
@pytest.fixture
def cmd():
"""Command fixture."""
return sh.Command(main.__app_name__)
@cli
@integration
def test_cli_help(cmd):
result = cmd('--help')
lines = result.stdout.decode('ascii').splitlines()
assert main.__app_name__ in lines[0].split(), "Command name is reported"
@cli
@integration
def test_cli_version(cmd):
result = cmd('--version')
stdout = result.stdout.decode('ascii')
reported_version = stdout.split()[1]
py_version = sys.version.split()[0]
assert version in stdout, "Version string contains version"
assert reported_version[:len(version)] == version, "Version is 2nd field"
assert py_version in stdout, "Python version is reported"
@cli
@integration
def test_cli_invalid_option(cmd):
with pytest.raises(UsageError):
cmd('--this-is-certainly-not-a-supported-option')
@cli
@integration
def test_cli_invalid_sub_command(cmd):
with pytest.raises(UsageError):
cmd.sub_command_that_does_not_exist()
@cli
def test_cmd_missing():
runner = CliRunner()
result = runner.invoke(main.cli)
assert result.exit_code == 0
@cli
def test_cmd_help():
runner = CliRunner()
result = runner.invoke(main.cli, args=('help',))
if result.exit_code:
print(vars(result))
print('~' * 78)
print(result.output_bytes)
print('~' * 78)
#words = result.output.split()
assert result.exit_code == 0
#assert 'configuration' in words
#assert any(i.endswith(os.sep + 'cli.conf') for i in words), \
# "Some '.conf' files listed in " + repr(words)
| 29.963636 | 80 | 0.724515 | 0 | 0 | 0 | 0 | 1,575 | 0.477707 | 0 | 0 | 1,808 | 0.548377 |
9263aacfdcc819b5a43c66114bde3dce544b96d4 | 367 | py | Python | tests/opytimizer/math/test_hypercomplex.py | macoldibelli/opytimizer | ca0574d520ecc17b1ac875bc6271d466c88d18ac | [
"MIT"
] | null | null | null | tests/opytimizer/math/test_hypercomplex.py | macoldibelli/opytimizer | ca0574d520ecc17b1ac875bc6271d466c88d18ac | [
"MIT"
] | null | null | null | tests/opytimizer/math/test_hypercomplex.py | macoldibelli/opytimizer | ca0574d520ecc17b1ac875bc6271d466c88d18ac | [
"MIT"
] | null | null | null | import numpy as np
import pytest
from opytimizer.math import hypercomplex
def test_norm():
array = np.array([[1, 1]])
norm_array = hypercomplex.norm(array)
assert norm_array > 0
def test_span():
array = np.array([[0.5, 0.75, 0.5, 0.9]])
lb = [0]
ub = [10]
span_array = hypercomplex.span(array, lb, ub)
assert span_array > 0
| 14.68 | 49 | 0.623978 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
926441ea90cbc6c22b98d9270a3adf3f083c5bbf | 889 | py | Python | card/models.py | tyhunt99/card-collector-db | 932bd829eb46f9492e6a25326140823629161bab | [
"MIT"
] | null | null | null | card/models.py | tyhunt99/card-collector-db | 932bd829eb46f9492e6a25326140823629161bab | [
"MIT"
] | 4 | 2020-06-05T20:53:52.000Z | 2022-02-10T08:32:51.000Z | card/models.py | tyhunt99/card-collector-db | 932bd829eb46f9492e6a25326140823629161bab | [
"MIT"
] | null | null | null | import datetime
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
class Collection(models.Model):
'''
A collection of cards.
'''
name = models.CharField(max_length=250)
class Card(models.Model):
first_name = models.CharField(max_length=150)
last_name = models.CharField(max_length=150)
team = models.CharField(max_length=100)
year = models.PositiveIntegerField(
validators=[
MinValueValidator(1887), # first produced baseball card (source?)
MaxValueValidator(datetime.datetime.now().year),
],
help_text='Use the following format: <YYYY>',
)
company = models.CharField(max_length=250)
value = models.DecimalField(
decimal_places=2,
max_digits=50,
default=0,
)
collection = models.ManyToManyField(Collection)
| 26.939394 | 78 | 0.68054 | 765 | 0.860517 | 0 | 0 | 0 | 0 | 0 | 0 | 112 | 0.125984 |
92649b09690dc30270898659d76b1423039bb414 | 1,870 | py | Python | nns_based_approach/tv_recon_network/cg.py | schote/P1-Temp-Reg | 2e19099f969e31ec4e2c503ae3cf7bc983f3559b | [
"Apache-2.0"
] | null | null | null | nns_based_approach/tv_recon_network/cg.py | schote/P1-Temp-Reg | 2e19099f969e31ec4e2c503ae3cf7bc983f3559b | [
"Apache-2.0"
] | null | null | null | nns_based_approach/tv_recon_network/cg.py | schote/P1-Temp-Reg | 2e19099f969e31ec4e2c503ae3cf7bc983f3559b | [
"Apache-2.0"
] | null | null | null | import torch
from scipy.sparse.linalg import LinearOperator, cg
from typing import Callable, Optional
from torch import Tensor
import numpy as np
import time
class CG(torch.autograd.Function):
@staticmethod
def forward(ctx, z: Tensor, AcquisitionModel, beta: Tensor, y, G: Callable, GH: Callable, GHG: Optional[Callable]=None,x0:Optional[Tensor]=None) -> Tensor:
tmp = AcquisitionModel.adjoint(y)
if GHG is None:
GHG = lambda x: GH(G(x))
b = tmp.as_array().ravel() + (beta * GH(z)).numpy().ravel()
if x0 is not None:
x0 = x0.numpy().ravel()
def AHA(x):
tmp.fill(x)
return AcquisitionModel.adjoint(AcquisitionModel.direct(tmp)).as_array().ravel()
H = LinearOperator(
shape=(np.prod(b.shape), np.prod(b.shape)),
dtype=np.complex64,
matvec=lambda x: AHA(x)+(beta * GHG(torch.from_numpy(x).reshape(tmp.shape).unsqueeze(0))).numpy().ravel()
)
sol = cg(H, b,tol=1e-3,x0=x0)
xprime = sol[0].reshape(tmp.shape)
ctx.H = H
ctx.G = G
ctx.GH = GH
xprime_tensor = torch.from_numpy(xprime)
ctx.save_for_backward(beta, xprime_tensor, z)
return xprime_tensor
@staticmethod
def backward(ctx, grad_output):
beta, xprime, z = ctx.saved_tensors
b = grad_output.unsqueeze(0).numpy().ravel()
old=time.time()
grad = torch.from_numpy(cg(ctx.H, b,tol=1e-3, x0=b)[0]).reshape(grad_output.shape)
print('backward cg',time.time()-old)
gz = gbeta = None
if ctx.needs_input_grad[0]:
gz = beta * ctx.G(grad.unsqueeze(0))
if ctx.needs_input_grad[2]:
gbeta = (-ctx.GH(ctx.G(xprime.unsqueeze(0)) - z.unsqueeze(0)) * grad).sum().real
return gz, None, gbeta, None, None, None, None, None
| 39.787234 | 159 | 0.602674 | 1,710 | 0.914439 | 0 | 0 | 1,665 | 0.890374 | 0 | 0 | 13 | 0.006952 |
9266e4c4574268ab0b44736b2d4aa4d3367192ba | 897 | py | Python | AFLW/fddb_symbol_gen.py | kli-nlpr/FaceDetection-ConvNet-3D | f9251c48eb40c5aec8fba7455115c355466555be | [
"Apache-2.0"
] | 159 | 2016-08-23T22:13:26.000Z | 2021-10-24T01:31:35.000Z | AFLW/fddb_symbol_gen.py | mrgloom/FaceDetection-ConvNet-3D | f9251c48eb40c5aec8fba7455115c355466555be | [
"Apache-2.0"
] | 10 | 2016-08-23T05:59:07.000Z | 2018-05-24T02:31:41.000Z | AFLW/fddb_symbol_gen.py | mrgloom/FaceDetection-ConvNet-3D | f9251c48eb40c5aec8fba7455115c355466555be | [
"Apache-2.0"
] | 77 | 2016-08-21T00:35:00.000Z | 2021-06-01T05:03:34.000Z | import mxnet as mx
def get_vgg16_gen():
relu_feature = mx.symbol.Variable(name="relu_feature")
box_predict = mx.symbol.Variable(name="box_predict")
ground_truth = mx.symbol.Variable(name="ground_truth")
bbox_label = mx.symbol.Variable(name="bbox_label")
ell_label = mx.symbol.GenEllLabel(*[box_predict, bbox_label, ground_truth], spatial_scale=0.5, name="ell_label")
# roi warping
roi_warping = mx.symbol.ROIWarping(*[relu_feature, box_predict, ground_truth], warped_shape=(28, 28),
spatial_scale=0.5, name="roi_warping")
roi_warping_pool = mx.symbol.Pooling(
data=roi_warping, pool_type="max", kernel=(4, 4), stride=(4, 4), name="roi_warping_pool"
)
roi_warping_flatten = mx.symbol.Flatten(data=roi_warping_pool)
loss_all = mx.symbol.Group([roi_warping_flatten, ell_label])
return loss_all
| 37.375 | 116 | 0.692308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.125975 |
926717c790dcf9ea71f2c1067ee097894058f6ba | 7,904 | py | Python | pythutils/mediautils.py | JolleJolles/pyutilspack | cbc6aa7f4f2a6b51c4cb689671fbdd53ab5f58c8 | [
"Apache-2.0"
] | 2 | 2019-03-07T19:00:44.000Z | 2019-07-27T05:20:59.000Z | pythutils/mediautils.py | JolleJolles/pyutilspack | cbc6aa7f4f2a6b51c4cb689671fbdd53ab5f58c8 | [
"Apache-2.0"
] | 1 | 2019-12-24T12:49:09.000Z | 2019-12-24T12:49:09.000Z | pythutils/mediautils.py | JolleJolles/pyutilspack | cbc6aa7f4f2a6b51c4cb689671fbdd53ab5f58c8 | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python
# Copyright (c) 2018 - 2019 Jolle Jolles <j.w.jolles@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
import os
import cv2
import numpy as np
from pythutils.fileutils import get_ext
from pythutils.mathutils import closenr, sort_points
def check_media(source, internal=False):
"""Runs some basic checks on a mediafile or stream"""
ext = get_ext(str(source))
ftype = None
if ext in [".mov",".mp4",".avi"]:
ftype = "vid"
if ext in [".jpg", ".png", ".jpeg", ".bmp"]:
ftype = "img"
if type(source) == int:
ftype = "stream"
if ftype == None:
print("File neither video or image file..")
return False
if ftype == "img" or ftype == "vid":
filedir = os.path.dirname(source)
if filedir != "":
if not os.path.isdir(filedir):
print("File directory does not exist..")
return False
if not os.path.isfile(source):
print("File does not exist..")
return False
if ftype == "vid" or ftype == "stream":
cap = cv2.VideoCapture(source)
flag, frame = cap.read()
if not flag:
print("Video source opened but failed to read images..")
return False
if not internal:
print("Mediafile okay.. ", end = "")
return True
def getimg(mediafile):
"""Acquires a numpy array from a video or image"""
try:
cap = cv2.VideoCapture(mediafile)
_, img = cap.read()
except:
img = cv2.imread(mediafile)
return img
def get_vid_params(mediafile):
"""Gets video parameters from file or video instance"""
if type(mediafile) is str:
if get_ext(mediafile) not in [".mov",".mp4",".avi"]:
raise TypeError("File not a video..")
mediafile = cv2.VideoCapture(mediafile)
if not mediafile.read()[0]:
raise RuntimeError("Video could not be read..")
fps = int(mediafile.get(cv2.CAP_PROP_FPS))
width = int(mediafile.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(mediafile.get(cv2.CAP_PROP_FRAME_HEIGHT))
fcount = int(mediafile.get(cv2.CAP_PROP_FRAME_COUNT))
return fps, width, height, fcount
def videowriter(filein, w, h, fps, resizeval = 1):
"""Creates a vidout instance using the opencv VideoWriter class"""
ext = get_ext(filein)
fileout = filein[:-len(ext)]+".mp4" if ext!="" else filein+".mp4"
viddims = (w, h) if resizeval == 1 else (int(w*resizeval), int(h*resizeval))
fourcc = cv2.VideoWriter_fourcc(*"mp4v")
vidout = cv2.VideoWriter(fileout, fourcc, fps, viddims)
return vidout
def safe_framecount(vidfile):
"""Saves video frame counter that counts frame-by-frame"""
cap = cv2.VideoCapture(vidfile)
vidlength = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
count = 0
while True:
ret, frame = cap.read()
if not ret:
break
count += 1
print("video had", vidlength-count, "non-existing frames.. ", end = "")
return count
def crop(image, pt1, pt2=None):
"""Crops image based on based on top left and bottom right corner"""
if pt2 == None:
pt2 = pt1[1]
pt1 = pt1[0]
cropped = image[pt1[1]:pt2[1], pt1[0]:pt2[0]]
return cropped
def fourpt_transform(image, pts):
"""
Perspective transform a section of an image based on four coordinates
to obtain a top-down view
"""
rect = sort_points(pts)
(tl, tr, br, bl) = rect
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxHeight = max(int(heightA), int(heightB))
dst = np.array([[0, 0], [maxWidth - 1, 0],
[maxWidth - 1, maxHeight - 1],
[0, maxHeight - 1]], dtype = "float32")
M = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, M, (maxWidth, maxHeight))
return warped
def checkroi(roi, resolution):
"""Make sure roi coordinates are within resolution"""
x1 = max(roi[0][0],1)
y1 = max(roi[0][1],1)
x2 = min(roi[1][0],resolution[0])
y2 = min(roi[1][1],resolution[1])
return ((x1,y1),(x2,y2))
def zoom_to_roi(zoom, resolution):
"""Gets region of interest coordinates from x,y,w,h zoom parameters"""
x1 = int(zoom[0] * resolution[0])
x2 = int((zoom[0]+zoom[2]) * resolution[0])
y1 = int(zoom[1] * resolution[1])
y2 = int((zoom[1]+zoom[3]) * resolution[1])
return ((x1,y1),(x2,y2))
def roi_to_zoom(roi, resolution):
"""Gets x,y,w,h zoom parameters from region of interest coordinates"""
((x1,y1),(x2,y2)) = roi
z0 = round(x1 / resolution[0],2)
z1 = round(y1 / resolution[1],2)
z2 = round((x2-x1) / resolution[0],2)
z3 = round((y2-y1) / resolution[1],2)
return (z0, z1, z2, z3)
def picamconv(resolution, maxres = (1632, 1232)):
"""Adapts video resolution to work with raspberry pi camera"""
width = min(closenr(resolution[0],32), maxres[0])
height = min(closenr(resolution[1],16), maxres[1])
return (width, height)
def fix_vidshape(res1,res2):
"""Compares two resolutions and get missing x and y coords"""
xmin,ymin = 0,0
xmult = (res2[0]/res1[0])
ymult = (res2[1]/res1[1])
if xmult > ymult:
xmin = int((res2[0]-(res1[0]*ymult))/2)
if ymult > xmult:
ymin = int((res2[0]-(res1[0]*xmult))/2)
return xmin, ymin
def newdims(img = None, resize = 1, dims = None):
"""Returns new dimensions of an image array based on resize value"""
if dims is None:
if img is None:
print("No img or dims provided..")
return
else:
dims = (img.shape[1],img.shape[0])
width = int(dims[0] * resize)
height = int(dims[1] * resize)
return (width, height)
def imgresize(img, resize = 1, dims = None, back = False):
"""
Returns resized image based on resizevalue or provided dimensions
Parameters
----------
img : numpy array
resize : float, default = 1
Multiplier for image size
dims : tuple, default = None
Dimensions of the to-be returned image
back : bool, default = False
If the inverse of the resize value should be used
"""
if dims is None:
resize = 1/resize if back else resize
dims = newdims(img, resize)
interpol = cv2.INTER_CUBIC if resize > 1 else cv2.INTER_AREA
img = cv2.resize(img, dims, interpolation = interpol)
return img
def add_transimg(bgimg, transimg, offsets):
"""
Adds a semi-transparent (4-channel) image to a 3-channel background
image. Images need to be arrays.
"""
h, w, c = transimg.shape
fix = np.zeros((h, w, 3), np.uint8)
a = transimg[:, :, 3] / 255 #alpha
o = offsets
fix[:,:,0] = (1.-a)*bgimg[o[1]:o[1]+h, o[0]:o[0]+w, 0]+a*transimg[:,:,0]
fix[:,:,1] = (1.-a)*bgimg[o[1]:o[1]+h, o[0]:o[0]+w, 1]+a*transimg[:,:,1]
fix[:,:,2] = (1.-a)*bgimg[o[1]:o[1]+h, o[0]:o[0]+w, 2]+a*transimg[:,:,2]
bgimg[o[1]:o[1]+h, o[0]:o[0]+w] = fix
return bgimg
| 27.068493 | 80 | 0.600076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,368 | 0.299595 |
926a661ce3065265a3050354d8147de32f7843f6 | 4,313 | py | Python | pythoncode/example2.py | morganwillisaws/codeguru | b146c59ad5444a9d980a8d1eb26ae18ad19acf13 | [
"MIT-0"
] | 1 | 2021-09-17T23:00:37.000Z | 2021-09-17T23:00:37.000Z | pythoncode/example2.py | morganwillisaws/codeguru | b146c59ad5444a9d980a8d1eb26ae18ad19acf13 | [
"MIT-0"
] | 2 | 2021-03-02T17:40:15.000Z | 2021-03-30T14:50:36.000Z | pythoncode/example2.py | morganwillisaws/codeguru | b146c59ad5444a9d980a8d1eb26ae18ad19acf13 | [
"MIT-0"
] | 1 | 2021-08-08T10:28:28.000Z | 2021-08-08T10:28:28.000Z | import boto3
import subprocess
successes = 0
# Dummy AWS Handler to kick off high level processes
def lambda_handler(source_region, destination_region, credentials):
session = boto3.Session()
# Load Records into KINESIS
CLIENT_NAME = 'kinesis'
kinesis = session.client(CLIENT_NAME, region_name=source_region, aws_access_key_id=credentials,
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'])
process_kinesis(kinesis, "some_file_path.txt")
# Get SNS Topic ARNs
CLIENT_NAME = 'sns'
for region in [source_region, destination_region]:
sns = session.client(CLIENT_NAME, region_name=region, aws_access_key_id=credentials,
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'])
topic_arns = list_sns(sns)
print(len(topic_arns))
# Sync Source DDB to Destination Region
CLIENT_NAME = 'dynamodb'
source_ddb = session.client(CLIENT_NAME, region_name=source_region, aws_access_key_id=credentials['AccessKeyId'],
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'])
destination_ddb = session.client(CLIENT_NAME, region_name=destination_region)
sync_ddb_table(source_ddb, destination_ddb)
# Scan returns paginated results, so only partial data will be copied
def sync_ddb_table(source_ddb, destination_ddb):
response = source_ddb.scan(
TableName="table1"
)
for item in response['Items']:
destination_ddb.put_item(
TableName="table2",
Item=item
)
# This code uses a mutable default argument and modifies it to return. This would leak results across calls
def list_sns(sns, topics=[]):
response = sns.list_topics()
for topic_arn in response["Topics"]:
topics.append(topic_arn["TopicArn"])
return topics
# Infinite loop because a list is modified while being iterated over, Indices are not updated.
def infinite_loop():
words = ['aws', 'amazon', 'codeguru']
for w in words:
if len(w) > 4:
words.insert(0, w)
return words
# Prefer DefaultDict over setDefult
def setdefault_example():
std_dict = dict()
for k, v in enumerate(range(5)):
std_dict.setdefault(k, []).append(v)
return std_dict
# This method reads multiple file paths, open each file to load data, but forgets to close it, leading to resource leaks
# Further, it selectively processes content based on string find condition. The find() operation can be simply replaced
# with a membership tests because one does not need to know the position at which the search keyword appears.
def process_kinesis(kinesis, file_list_path):
flp = open(file_list_path, 'r')
for line in flp:
file_path = line.strip('\n').strip('\r\n')
fp = open(file_path, 'r')
for content in fp:
if content.find("kinesis") != -1:
record = load_kinesis_record(content)
save_kinesis_record(kinesis, record)
# Do not call this function unless you're sure that the "cmd" is secure to run
# This function can be misused to carry out shell injection attacks.
# Further, the code is simply passing an exception, which is not the best practice
# Further, the code keeps track of successful loads by writing to a global variable; which can lead to inaccuracies in
# case of concurrent read/writes to the global variable.
def load_kinesis_record(cmd, mode='subprocess'):
global successes
kinesis_record = None
try:
if mode == "subprocess":
kinesis_record = subprocess.call(cmd, shell=True)
else:
kinesis_record = eval(cmd)
successes += 1
except Exception as e:
pass
return kinesis_record
# This code saves records to Kinesis, but does not check and retry for failed records
# Further, it simply re-raises the caught exception without any additional steps. This redundancy will be flagged.
def save_kinesis_record(kinesis_client, record):
try:
kinesis_client.put_records(record)
except:
raise
| 38.508929 | 120 | 0.688152 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,655 | 0.383724 |
926d81ed11d37c488b31b61c0181835d4dec72e0 | 11,550 | py | Python | runtime/module_resolution.py | cheery/lever | 6fa8cd6afec440b32232f87236b0457fb8bfb8b1 | [
"MIT"
] | 136 | 2015-12-18T21:11:59.000Z | 2022-02-21T19:47:36.000Z | runtime/module_resolution.py | cheery/lever | 6fa8cd6afec440b32232f87236b0457fb8bfb8b1 | [
"MIT"
] | 1 | 2021-05-07T11:17:02.000Z | 2021-05-07T18:15:07.000Z | runtime/module_resolution.py | cheery/lever | 6fa8cd6afec440b32232f87236b0457fb8bfb8b1 | [
"MIT"
] | 13 | 2016-03-06T18:27:17.000Z | 2020-06-18T14:27:32.000Z | from space import *
import base
import bon
import evaluator
import core
import os
import pathobj
import stdlib
import sys
class ModuleScope(Object):
def __init__(self, local, parent=None, frozen=False):
self.cache = {} # maps absolute path -> module cache entry
self.local = local
self.parent = parent
self.frozen = frozen # if frozen, the scope relies on cache.
self.compile_file = null
self.base_module = None
def setcache(self, m_path, module, mtime):
m = ModuleCache(m_path, module, mtime)
self.cache[pathobj.stringify(m_path)] = m
return m
def getcache(self, m_path):
s = pathobj.stringify(m_path)
try:
return self.cache[s]
except KeyError as k:
return None
def getattr(self, name):
if name == u"parent":
return self.parent if self.parent is not None else null
if name == u"local":
return self.local
if name == u"frozen":
return boolean(self.frozen)
if name == u"base_module":
if self.base_module is None:
return null
return self.base_module
if name == u"compile_file":
return self.compile_file
return Object.getattr(self, name)
def setattr(self, name, value):
if name == u"base_module":
if len(self.cache) > 0:
raise unwind(LTypeError(u"Cannot change base_module in active module scope"))
self.base_module = cast_n(value, Module, u"ModuleScope.base_module")
return null
return Object.setattr(self, name, value)
def listattr(self):
listing = Object.listattr(self)
listing.extend([
String(u"parent"),
String(u"local"),
String(u"frozen"),
String(u"base_module"),
String(u"compile_file"),
])
return listing
def getitem(self, item):
if isinstance(item, String):
if item.string in self.cache:
return self.cache[item.string]
raise OldError(u"%s not in module scope" % item.repr())
def iter(self):
return ScopeIterator(self.cache.iterkeys())
#
@ModuleScope.instantiator2(signature(pathobj.Path, ModuleScope, Object, optional=2))
def _(local, parent, options):
scope = ModuleScope(local, parent)
if options:
key = String(u"compile_file")
if options.contains(key):
scope.compile_file = options.getitem(key)
return scope
class ScopeIterator(Object):
_immutable_fields_ = ['iterator']
def __init__(self, iterator):
self.iterator = iterator
def iter(self):
return self
@ScopeIterator.builtin_method
@signature(ScopeIterator)
def next(self):
return String(self.iterator.next())
class ModuleCache(Object):
def __init__(self, path, module, mtime):
self.path = path
self.module = module
self.mtime = mtime
def getattr(self, name):
if name == u"path":
return self.path
if name == u"module":
return self.module
if name == u"mtime":
return Float(self.mtime)
return Object.getattr(self, name)
def listattr(self):
listing = Object.listattr(self)
listing.extend([
String(u"path"),
String(u"module"),
String(u"mtime"),
])
return listing
@ModuleCache.builtin_method
@signature(ModuleCache)
def get_moduleinfo(self):
return moduleinfo(self.path)
root_module = ModuleScope(pathobj.parse(u"builtin:/"), frozen=True)
root_module.base_module = base.module
for py_module in stdlib.import_all_modules():
assert isinstance(py_module.module, Module), "dependency cycle somewhere"
p = pathobj.concat(root_module.local, pathobj.parse(py_module.module.name))
py_module.module.setattr_force(u"doc", pathobj.parse(u"doc:/" + py_module.module.name))
root_module.setcache(p, py_module.module, 0.0)
import naming
naming.breath_first_search(py_module.module, 1.0)
base.module.setattr_force(u"doc", pathobj.parse(u"doc:/base"))
root_module.setcache(pathobj.parse(u"builtin:/" + base.module.name), base.module, 0.0)
# the importer poststage for base module will take place in
# entry generation at runtime/main.py because there are so many
# items added into the base module all around the system.
import main
def start(main_script):
assert isinstance(main_script, String)
lib_scope = ModuleScope(
pathobj.concat(core.get_ec().lever_path, pathobj.parse(u"lib")),
root_module)
lib_scope.compile_file = LazyLoader(lib_scope)
main_path = pathobj.os_parse(resuffix(main_script.string, u".lc", u""))
mi = moduleinfo(pathobj.abspath(main_path))
scope = ModuleScope(mi.directory, lib_scope)
this = Module(mi.name.string, {}, extends=base.module) # base.module
if not (mi.lc_present or mi.cb_present):
raise OldError(u"main module not present")
scope.setcache(main_path, this, max(mi.lc_mtime, mi.cb_mtime))
mi.default_config(this, scope)
mi.loadit(this, scope)
return this
class LazyLoader(Object):
def __init__(self, lib_scope):
self.lib_scope = lib_scope
def call(self, argv):
lib_scope = self.lib_scope
mi = moduleinfo(pathobj.concat(lib_scope.local, pathobj.parse(u"compiler")))
this = Module(mi.name.string, {}, extends=base.module) # base.module
mi.default_config(this, lib_scope)
mi.loadit(this, lib_scope)
lib_scope.compile_file = this.getattr(u"compile_file")
return lib_scope.compile_file.call(argv)
# plans:
# allow modules derive or create new scopes and isolate themselves.
# module path
def moduleinfo(module_path):
module_path = pathobj.abspath(module_path)
module_name = module_path.getattr(u"basename")
assert isinstance(module_name, String)
s = pathobj.os_stringify(module_path).encode('utf-8')
is_dir = False
if os.path.isdir(s):
w = os.path.join(s, "init")
if os.path.exists(w + ".lc.cb") or os.path.exists(w + ".lc"):
is_dir = True
s = w
else:
module_path = pathobj.directory(module_path)
cb_path = s + ".lc.cb"
cb_present = os.path.exists(cb_path)
cb_mtime = 0.0
lc_path = s + ".lc"
lc_present = os.path.exists(lc_path)
lc_mtime = 0.0
if cb_present:
cb_mtime = os.path.getmtime(cb_path)
if lc_present:
lc_mtime = os.path.getmtime(lc_path)
# This ignores outdated bytecode objects.
if cb_present and lc_present:
cb_present = not cb_mtime < lc_mtime
return ModuleInfo(
module_name, module_path,
pathobj.os_parse(cb_path.decode('utf-8')), cb_present, cb_mtime,
pathobj.os_parse(lc_path.decode('utf-8')), lc_present, lc_mtime,
)
class ModuleInfo(Object):
def __init__(self, name, directory, cb_path, cb_present, cb_mtime, lc_path, lc_present, lc_mtime):
self.name = name
self.directory = directory
self.cb_path = cb_path
self.cb_present = cb_present
self.cb_mtime = cb_mtime
self.lc_path = lc_path
self.lc_present = lc_present
self.lc_mtime = lc_mtime
def default_config(self, module, scope):
module.setattr(u"dir", self.directory)
module.setattr(u"name", self.name)
module.setattr(u"import", Import(self.directory, scope))
return module
def loadit(self, module, scope):
if not self.cb_present:
while scope.compile_file is null and scope.parent is not None:
scope = scope.parent
if scope.compile_file is null:
raise OldError(u"Lever bytecode compiler stale or missing: " + self.lc_path.repr())
scope.compile_file.call([self.cb_path, self.lc_path])
self.cb_mtime = os.path.getmtime(pathobj.os_stringify(self.cb_path).encode('utf-8'))
self.cb_present = True
program = evaluator.loader.from_object(bon.open_file(self.cb_path), self.cb_path)
res = program.call([module])
return res
def getattr(self, name):
if name == u"present":
return boolean(self.cb_present or self.lc_present)
if name == u"mtime":
return Float(max(self.lc_mtime, self.cb_mtime))
return Object.getattr(self, name)
class Import(Object):
def __init__(self, local, scope):
self.local = local
self.scope = scope
def call(self, argv):
if len(argv) != 1:
raise OldError(u"wrong number of arguments to import")
name = argv[0]
if isinstance(name, pathobj.Path):
raise OldError(u"no direct loading yet")
elif not isinstance(name, String):
raise OldError(u"expected string")
# import resolution:
# local/script.lc
path = pathobj.concat(self.local, pathobj.to_path(name))
cache = self.scope.getcache(path)
if cache:
return cache.module
if not self.scope.frozen:
mi = moduleinfo(path)
if mi.lc_present or mi.cb_present:
base_module = get_base_module(self.scope)
this = Module(name.string, {}, extends=base_module) # base.module
self.scope.setcache(path, this, max(mi.lc_mtime, mi.cb_mtime))
mi.default_config(this, self.scope)
mi.loadit(this, self.scope)
return this
# scope/
scope = self.scope
while scope is not None:
path = pathobj.concat(scope.local, pathobj.to_path(name))
cache = scope.getcache(path)
if cache:
return cache.module
if not scope.frozen:
mi = moduleinfo(path)
if mi.lc_present or mi.cb_present:
base_module = get_base_module(scope)
this = Module(name.string, {}, extends=base_module) # base.module
scope.setcache(path, this, max(mi.lc_mtime, mi.cb_mtime))
mi.default_config(this, scope)
mi.loadit(this, scope)
return this
scope = scope.parent
raise OldError(u"module '%s' not present" % name.string)
def getattr(self, name):
if name == u'scope':
return self.scope
if name == u"local":
return self.local
return Object.getattr(self, name)
def get_base_module(scope):
while scope.parent and scope.base_module is None:
scope = scope.parent
return scope.base_module
@Import.instantiator2(signature(pathobj.Path, ModuleScope))
def _(local, scope):
return Import(local, scope)
@ModuleScope.builtin_method
@signature(ModuleScope, String)
def reimport(scope, obj):
if obj.string not in scope.cache:
raise OldError(u"Cannot reimport, module not present")
mc = scope.cache[obj.string]
mi = moduleinfo(mc.path)
mi.default_config(mc.module, scope)
mi.loadit(mc.module, scope)
mc.mtime = max(mi.lc_mtime, mi.cb_mtime)
return mc.module
def resuffix(string, suffix, new_suffix=u""):
if string.endswith(suffix):
i = max(0, len(string) - len(suffix))
return string[0:i] + new_suffix
return string + new_suffix
base.module.setattr_force(u"ModuleScope", ModuleScope.interface)
base.module.setattr_force(u"Import", Import.interface)
| 35 | 102 | 0.63013 | 7,093 | 0.614113 | 0 | 0 | 1,032 | 0.089351 | 0 | 0 | 1,290 | 0.111688 |
926e172260da95c8e0bed936db852d904ce5ea3f | 6,249 | py | Python | datamine/loaders/liqtool.py | Saran33/datamine_python | 396a01883fe98f31e32d506d50e4eeaa2de06466 | [
"BSD-3-Clause"
] | 39 | 2019-05-15T19:22:03.000Z | 2022-03-08T08:54:51.000Z | datamine/loaders/liqtool.py | Saran33/datamine_python | 396a01883fe98f31e32d506d50e4eeaa2de06466 | [
"BSD-3-Clause"
] | 9 | 2019-02-26T03:50:27.000Z | 2021-07-24T15:31:38.000Z | datamine/loaders/liqtool.py | Saran33/datamine_python | 396a01883fe98f31e32d506d50e4eeaa2de06466 | [
"BSD-3-Clause"
] | 11 | 2019-04-16T12:32:29.000Z | 2021-08-28T15:09:51.000Z | from . import Loader
import pandas as pd
from datetime import datetime, timedelta
start = datetime(1970, 1, 1) # Unix epoch start time
class LiqLoader(Loader):
dataset = 'LIQTOOL'
fileglob = 'LIQTOOL_*.csv.gz'
index = 'tradedate'
dtypes = {'category': ('symbol', 'time_zone'),
'int64': ('lot_1_size', 'lot_2_size', 'lot_3_size', 'lot_4_size', 'lot_5_size',
'lot_6_size', 'lot_7_size', 'lot_8_size', 'lot_9_size', 'lot_10_size',
'lot_11_size', 'lot_12_size', 'lot_13_size', 'lot_14_size', 'lot_15_size',
'lot_16_size', 'lot_17_size', 'lot_18_size', 'lot_19_size', 'lot_20_size',
'lot_21_size', 'lot_22_size', 'lot_23_size', 'lot_24_size', 'lot_25_size', 'frontmonth'),
'float': ('avg_level_1_spread', 'avg_level_1_midprice', 'avg_level_1_weightedprice', 'avg_level_1_ask_price', 'avg_level_1_bid_price', 'avg_level_1_ask_quantity', 'avg_level_1_bid_quantity', 'avg_level_1_ask_orders', 'avg_level_1_bid_orders',
'avg_level_2_spread', 'avg_level_2_midprice', 'avg_level_2_weightedprice', 'avg_level_2_ask_price', 'avg_level_2_bid_price', 'avg_level_2_ask_quantity', 'avg_level_2_bid_quantity', 'avg_level_2_ask_orders', 'avg_level_2_bid_orders',
'avg_level_3_spread', 'avg_level_3_midprice', 'avg_level_3_weightedprice', 'avg_level_3_ask_price', 'avg_level_3_bid_price', 'avg_level_3_ask_quantity', 'avg_level_3_bid_quantity', 'avg_level_3_ask_orders', 'avg_level_3_bid_orders',
'avg_level_4_spread', 'avg_level_4_midprice', 'avg_level_4_weightedprice', 'avg_level_4_ask_price', 'avg_level_4_bid_price', 'avg_level_4_ask_quantity', 'avg_level_4_bid_quantity', 'avg_level_4_ask_orders', 'avg_level_4_bid_orders',
'avg_level_5_spread', 'avg_level_5_midprice', 'avg_level_5_weightedprice', 'avg_level_5_ask_price', 'avg_level_5_bid_price', 'avg_level_5_ask_quantity', 'avg_level_5_bid_quantity', 'avg_level_5_ask_orders', 'avg_level_5_bid_orders',
'avg_level_6_spread', 'avg_level_6_midprice', 'avg_level_6_weightedprice', 'avg_level_6_ask_price', 'avg_level_6_bid_price', 'avg_level_6_ask_quantity', 'avg_level_6_bid_quantity', 'avg_level_6_ask_orders', 'avg_level_6_bid_orders',
'avg_level_7_spread', 'avg_level_7_midprice', 'avg_level_7_weightedprice', 'avg_level_7_ask_price', 'avg_level_7_bid_price', 'avg_level_7_ask_quantity', 'avg_level_7_bid_quantity', 'avg_level_7_ask_orders', 'avg_level_7_bid_orders',
'avg_level_8_spread', 'avg_level_8_midprice', 'avg_level_8_weightedprice', 'avg_level_8_ask_price', 'avg_level_8_bid_price', 'avg_level_8_ask_quantity', 'avg_level_8_bid_quantity', 'avg_level_8_ask_orders', 'avg_level_8_bid_orders',
'avg_level_9_spread', 'avg_level_9_midprice', 'avg_level_9_weightedprice', 'avg_level_9_ask_price', 'avg_level_9_bid_price', 'avg_level_9_ask_quantity', 'avg_level_9_bid_quantity', 'avg_level_9_ask_orders', 'avg_level_9_bid_orders',
'avg_level_10_spread', 'avg_level_10_midprice', 'avg_level_10_weightedprice', 'avg_level_10_ask_price', 'avg_level_10_bid_price', 'avg_level_10_ask_quantity', 'avg_level_10_bid_quantity', 'avg_level_10_ask_orders', 'avg_level_10_bid_orders',
'lot_1_buy_ctt', 'lot_1_sell_ctt', 'lot_1_buy_depth', 'lot_1_sell_depth',
'lot_2_buy_ctt', 'lot_2_sell_ctt', 'lot_2_buy_depth', 'lot_2_sell_depth',
'lot_3_buy_ctt', 'lot_3_sell_ctt', 'lot_3_buy_depth', 'lot_3_sell_depth',
'lot_4_buy_ctt', 'lot_4_sell_ctt', 'lot_4_buy_depth', 'lot_4_sell_depth',
'lot_5_buy_ctt', 'lot_5_sell_ctt', 'lot_5_buy_depth', 'lot_5_sell_depth',
'lot_6_buy_ctt', 'lot_6_sell_ctt', 'lot_6_buy_depth', 'lot_6_sell_depth',
'lot_7_buy_ctt', 'lot_7_sell_ctt', 'lot_7_buy_depth', 'lot_7_sell_depth',
'lot_8_buy_ctt', 'lot_8_sell_ctt', 'lot_8_buy_depth', 'lot_8_sell_depth',
'lot_9_buy_ctt', 'lot_9_sell_ctt', 'lot_9_buy_depth', 'lot_9_sell_depth',
'lot_10_buy_ctt', 'lot_10_sell_ctt', 'lot_10_buy_depth', 'lot_10_sell_depth',
'lot_11_buy_ctt', 'lot_11_sell_ctt', 'lot_11_buy_depth', 'lot_11_sell_depth',
'lot_12_buy_ctt', 'lot_12_sell_ctt', 'lot_12_buy_depth', 'lot_12_sell_depth',
'lot_13_buy_ctt', 'lot_13_sell_ctt', 'lot_13_buy_depth', 'lot_13_sell_depth',
'lot_14_buy_ctt', 'lot_14_sell_ctt', 'lot_14_buy_depth', 'lot_14_sell_depth',
'lot_15_buy_ctt', 'lot_15_sell_ctt', 'lot_15_buy_depth', 'lot_15_sell_depth',
'lot_16_buy_ctt', 'lot_16_sell_ctt', 'lot_16_buy_depth', 'lot_16_sell_depth',
'lot_17_buy_ctt', 'lot_17_sell_ctt', 'lot_17_buy_depth', 'lot_17_sell_depth',
'lot_18_buy_ctt', 'lot_18_sell_ctt', 'lot_18_buy_depth', 'lot_18_sell_depth',
'lot_19_buy_ctt', 'lot_19_sell_ctt', 'lot_19_buy_depth', 'lot_19_sell_depth',
'lot_20_buy_ctt', 'lot_20_sell_ctt', 'lot_20_buy_depth', 'lot_20_sell_depth',
'lot_21_buy_ctt', 'lot_21_sell_ctt', 'lot_21_buy_depth', 'lot_21_sell_depth',
'lot_22_buy_ctt', 'lot_22_sell_ctt', 'lot_22_buy_depth', 'lot_22_sell_depth',
'lot_23_buy_ctt', 'lot_23_sell_ctt', 'lot_23_buy_depth', 'lot_23_sell_depth',
'lot_24_buy_ctt', 'lot_24_sell_ctt', 'lot_24_buy_depth', 'lot_24_sell_depth',
'lot_25_buy_ctt', 'lot_25_sell_ctt', 'lot_25_buy_depth', 'lot_25_sell_depth',),
'date': ('unixtime',),
'date:%Y%m%d': ('tradedate',)}
def _load(self, file):
df = pd.read_csv(file, low_memory = False)
df['unixtime'] = df['unix_in_sec'].apply(lambda x: start + timedelta(seconds=x))
df = df.drop(['unix_in_sec'], axis=1)
return(df)
liqLoader = LiqLoader()
| 99.190476 | 265 | 0.666187 | 6,077 | 0.972476 | 0 | 0 | 0 | 0 | 0 | 0 | 4,381 | 0.701072 |
926f6eee1bd751e76192f8e1d3068aec789d5c41 | 1,069 | py | Python | setup.py | maraujop/django-rules | 702839e494a65f72f550982867e2f011952737c4 | [
"BSD-3-Clause"
] | 20 | 2015-05-17T16:33:39.000Z | 2021-12-25T03:47:21.000Z | setup.py | maraujop/django-rules | 702839e494a65f72f550982867e2f011952737c4 | [
"BSD-3-Clause"
] | null | null | null | setup.py | maraujop/django-rules | 702839e494a65f72f550982867e2f011952737c4 | [
"BSD-3-Clause"
] | 9 | 2016-02-12T21:39:10.000Z | 2019-11-01T11:01:41.000Z | # -*- coding: utf-8 -*-
import os
import sys
reload(sys).setdefaultencoding("UTF-8")
from setuptools import setup, find_packages
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
version = '0.2'
setup(
name='django-rules',
version=version,
description="Flexible per-object authorization backend for Django",
long_description=read('README.textile'),
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
"Framework :: Django",
"Environment :: Web Environment",
],
keywords=['authorization', 'backends', 'django', 'rules', 'permissions'],
author='Miguel Araujo',
author_email='miguel.araujo.perez@gmail.com',
url='http://github.com/maraujop/django-rules',
license='BSD',
packages=find_packages(),
zip_safe=False,
)
| 29.694444 | 77 | 0.662301 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 520 | 0.486436 |