hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b2d17e05c973d4afec2889fae68a1be4d13ef5c7 | 2,440 | py | Python | src/lib/template.py | emil-jacero/powerdns-auth-docker | 922f08d6c2182cd8497fc869e42a6218ecc1b105 | [
"MIT"
] | null | null | null | src/lib/template.py | emil-jacero/powerdns-auth-docker | 922f08d6c2182cd8497fc869e42a6218ecc1b105 | [
"MIT"
] | 2 | 2021-05-08T13:30:42.000Z | 2022-02-06T22:28:54.000Z | src/lib/template.py | emil-jacero/powerdns-auth-docker | 922f08d6c2182cd8497fc869e42a6218ecc1b105 | [
"MIT"
] | null | null | null | import os
import jinja2
import logging
from lib.config import Config
class Template:
def __init__(self, env_search_term="ENV"):
self.log_name = f'{Config.logger_name}.{self.__class__.__name__}'
self.log = logging.getLogger(self.log_name)
self.path = None
self.name = None
self.env_search_term = env_search_term
self.variables = self.get_variables()
def get_variables(self):
variables = {}
autosecondary = {}
for k, v in os.environ.items():
if "AUTOSECONDARY" in k:
obj = {k: v}
autosecondary.update(obj)
elif f"{self.env_search_term}_" in k:
k = k.replace(f"{self.env_search_term}_", "").replace("_", "-").lower()
obj = {k: v}
variables.update(obj)
return variables, autosecondary
def render_template(self, template, output_file):
"""
Takes template, output file and dictionary of variables.
Renders template with variables to the specified output file.
"""
self.path = os.path.dirname(template)
self.name = os.path.basename(template)
self.log.debug(f"Template path: {'Path_not_provided' if self.path is '' else self.path}")
self.log.debug(f"Template name: {self.name}")
# Remove file if exists
if os.path.exists(output_file):
self.log.info(f"Removing old file [{output_file}]")
os.remove(output_file)
# Write rendered template into file
self.log.info(f"Rendering template {template} to {output_file}")
data, autosecondary = self.variables
with open(output_file, 'w') as f:
f.write(self._load_template(self.name, self.path).render(data=data, autosecondary=autosecondary))
def _load_template(self, name, path=None):
"""
Takes template name and a path to the template directory
"""
# Guessing templates directory
if path is None or path == "":
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'templates')
self.log.info(f"Missing path to templates. Using default...")
self.log.info(f"Template path: {path}")
else:
self.log.info(f"Template path: {path}")
env = jinja2.Environment(loader=jinja2.FileSystemLoader(path))
return env.get_template(name)
| 38.730159 | 109 | 0.607787 | 2,367 | 0.970082 | 0 | 0 | 0 | 0 | 0 | 0 | 756 | 0.309836 |
b2d18501d17a461813526ffc739b3386958593ec | 2,829 | py | Python | backend/fief/schemas/workspace.py | fief-dev/fief | cbfeec11da7a03aa345cb7ceb088b5d8ec9d6ab1 | [
"MIT"
] | 1 | 2022-02-13T17:39:42.000Z | 2022-02-13T17:39:42.000Z | backend/fief/schemas/workspace.py | fief-dev/fief | cbfeec11da7a03aa345cb7ceb088b5d8ec9d6ab1 | [
"MIT"
] | 1 | 2022-02-13T14:46:24.000Z | 2022-02-13T14:46:24.000Z | backend/fief/schemas/workspace.py | fief-dev/fief | cbfeec11da7a03aa345cb7ceb088b5d8ec9d6ab1 | [
"MIT"
] | null | null | null | from typing import Optional
from pydantic import BaseModel, root_validator, validator
from fief.crypto.encryption import decrypt
from fief.db.types import DatabaseType
from fief.errors import APIErrorCode
from fief.schemas.generics import UUIDSchema
from fief.settings import settings
def validate_all_database_settings(cls, values):
database_type = values.get("database_type")
database_settings = [
values.get("database_host"),
values.get("database_port"),
values.get("database_username"),
values.get("database_password"),
values.get("database_name"),
]
if database_type is None and not any(database_settings):
return values
if database_type is None and any(database_settings):
raise ValueError(APIErrorCode.WORKSPACE_CREATE_MISSING_DATABASE_SETTINGS)
database_name = values.get("database_name")
if database_type == DatabaseType.SQLITE:
if database_name is None:
raise ValueError(APIErrorCode.WORKSPACE_CREATE_MISSING_DATABASE_SETTINGS)
else:
if not all(database_settings):
raise ValueError(APIErrorCode.WORKSPACE_CREATE_MISSING_DATABASE_SETTINGS)
return values
class WorkspaceCheckConnection(BaseModel):
database_type: DatabaseType
database_host: str
database_port: int
database_username: str
database_password: str
database_name: str
_validate_all_database_settings = root_validator(allow_reuse=True)(
validate_all_database_settings
)
class WorkspaceCreate(BaseModel):
name: str
database_type: Optional[DatabaseType]
database_host: Optional[str]
database_port: Optional[int]
database_username: Optional[str]
database_password: Optional[str]
database_name: Optional[str]
_validate_all_database_settings = root_validator(allow_reuse=True)(
validate_all_database_settings
)
class BaseWorkspace(UUIDSchema):
name: str
domain: str
class Workspace(BaseWorkspace):
database_type: Optional[DatabaseType]
database_host: Optional[str]
database_port: Optional[int]
database_username: Optional[str]
database_password: Optional[str]
database_name: Optional[str]
@validator(
"database_host",
"database_username",
"database_password",
"database_name",
pre=True,
)
def decrypt_database_setting(cls, value: Optional[str]) -> Optional[str]:
if value is None:
return value
return decrypt(value, settings.encryption_key)
@validator("database_port", pre=True)
def decrypt_database_port(cls, value: Optional[str]) -> Optional[int]:
if value is None:
return value
return int(decrypt(value, settings.encryption_key))
class WorkspacePublic(BaseWorkspace):
pass
| 28.29 | 85 | 0.721456 | 1,611 | 0.569459 | 0 | 0 | 550 | 0.194415 | 0 | 0 | 196 | 0.069282 |
b2d1a8016c0b95e209c421ed0aa8314cc552c1ba | 491 | py | Python | art/migrations/0007_alter_artimage_project.py | rrozander/Art-Website | 2cedba90f2adc30d9e83e957903e890af7863eac | [
"MIT"
] | null | null | null | art/migrations/0007_alter_artimage_project.py | rrozander/Art-Website | 2cedba90f2adc30d9e83e957903e890af7863eac | [
"MIT"
] | null | null | null | art/migrations/0007_alter_artimage_project.py | rrozander/Art-Website | 2cedba90f2adc30d9e83e957903e890af7863eac | [
"MIT"
] | null | null | null | # Generated by Django 3.2.12 on 2022-03-01 23:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('art', '0006_auto_20220301_1452'),
]
operations = [
migrations.AlterField(
model_name='artimage',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='art.project'),
),
]
| 24.55 | 122 | 0.643585 | 364 | 0.741344 | 0 | 0 | 0 | 0 | 0 | 0 | 118 | 0.240326 |
b2d2f0394cea895eb88a51c785769332faca9031 | 844 | py | Python | blog/tasks.py | iloveyougit/ylink2 | a87d8fde79ab259012cd6486299fcf86e1afc740 | [
"MIT"
] | null | null | null | blog/tasks.py | iloveyougit/ylink2 | a87d8fde79ab259012cd6486299fcf86e1afc740 | [
"MIT"
] | null | null | null | blog/tasks.py | iloveyougit/ylink2 | a87d8fde79ab259012cd6486299fcf86e1afc740 | [
"MIT"
] | null | null | null |
from __future__ import absolute_import, unicode_literals
import string
from django.contrib.auth.models import User
from django.utils.crypto import get_random_string
from celery import shared_task, current_task
@shared_task
def create_random_user_accounts(total_user):
for i in range(total_user):
username = 'user_%s' % get_random_string(20, string.ascii_letters)
email = '%s@example.com' % username
password = get_random_string(50)
User.objects.create_user(username=username, email=email, password=password)
current_task.update_state(state='PROGRESS',
meta={'current': i, 'total': total_user,
'percent': int((float(i) / total_user) * 100)})
return {'current': total_user, 'total': total_user, 'percent': 100}
| 29.103448 | 87 | 0.667062 | 0 | 0 | 0 | 0 | 620 | 0.734597 | 0 | 0 | 85 | 0.100711 |
b2d418afad092a7839f43f08bf37f5d322277d2e | 392 | py | Python | fehler_auth/migrations/0003_auto_20220416_1626.py | dhavall13/fehler_core | dd27802d5b227a32aebcc8bfde68e78a69a36d66 | [
"MIT"
] | null | null | null | fehler_auth/migrations/0003_auto_20220416_1626.py | dhavall13/fehler_core | dd27802d5b227a32aebcc8bfde68e78a69a36d66 | [
"MIT"
] | null | null | null | fehler_auth/migrations/0003_auto_20220416_1626.py | dhavall13/fehler_core | dd27802d5b227a32aebcc8bfde68e78a69a36d66 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.27 on 2022-04-16 16:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fehler_auth', '0002_auto_20211002_1511'),
]
operations = [
migrations.AlterField(
model_name='invite',
name='email',
field=models.EmailField(max_length=255),
),
]
| 20.631579 | 52 | 0.604592 | 298 | 0.760204 | 0 | 0 | 0 | 0 | 0 | 0 | 101 | 0.257653 |
b2d5339e5e531cf1c00d606b9884958e7a82d30b | 5,637 | py | Python | models/gene.py | rogamba/neuropy | e5ee36126537c75e041d3413c45c6cc20d58a58e | [
"MIT"
] | null | null | null | models/gene.py | rogamba/neuropy | e5ee36126537c75e041d3413c45c6cc20d58a58e | [
"MIT"
] | null | null | null | models/gene.py | rogamba/neuropy | e5ee36126537c75e041d3413c45c6cc20d58a58e | [
"MIT"
] | null | null | null | from random import choice, gauss, random
class Gene(object):
def __init__(self, config, key):
self.config = config
self.key = key
def __str__(self):
attrib = ['key'] + [a for a in self.__gene_attributes__]
attrib = ['{0}={1}'.format(a, getattr(self, a)) for a in attrib]
return '{0}({1})'.format(self.__class__.__name__, ", ".join(attrib))
def clamp(self, name, value):
''' Limit the value in the specified range of min max
'''
min_value = self.config[ name+"_min_val" ]
max_value = self.config[ name+"_max_val" ]
return max(min(value, max_value), min_value)
def init_attribute(self, name):
''' Init the value of the given attribute (weight, bias)
'''
# Float attributes
#print("[gene] Init attributes")
#print(name)
if name in ['weight','bias','response']:
mean = self.config[ name+"_init_mean" ]
stdev = self.config[ name+"_init_stdev" ]
return self.clamp(name, gauss(mean, stdev))
# Boolean attributes
if name in ['enabled']:
default = self.config[ name+"_default" ]
return default if default != None else (random() < 0.5)
# Activation and aggregation attribute
if name in ['activation', 'aggregation']:
default = self.config[ name+"_default" ]
return default
def init_attributes(self):
''' Loop attributes and set its initial value
'''
for attr in self.__gene_attributes__:
setattr( self, attr, self.init_attribute(attr) )
def crossover(self, couple):
''' Creates a new gene randomly ingeriting attributes form its parents
'''
# Instantiate the gene object
child = self.__class__(self.config, self.key)
for attr in self.__gene_attributes__:
v1 = getattr(self, attr)
v2 = getattr(self, attr)
setattr( child, attr, v1 if random() > 0.5 else v2 )
return child
def copy(self):
''' Return a copied gene with its attributes
'''
new_gene = self.__class__(self.config, self.key)
for attr in self.__gene_attributes__:
setattr(new_gene, attr, getattr(self, attr))
return new_gene
class NodeGene(Gene):
__gene_attributes__ = [ 'bias','activation','aggregation','response' ]
def __init__(self, config, key=None, bias=None, activation=None, aggregation=None, response=None):
#print("[nodegene] Init new nodegene")
self.config = config
self.key = key
self.bias = bias
self.activation = activation
self.aggregation = aggregation
self.response = response
if self.bias == None or self.activation == None:
self.init_attributes()
def mutate(self):
''' Mutate bias, activation, aggregation, reponse
TODO: add mutation for activation, aggregation & reponse
'''
# Bias
r = random()
# Replace bias
replace_rate = float(self.config['bias_replace_rate'])
mutate_rate = float(self.config['bias_mutate_rate'])
if r < replace_rate:
#print(">>> mutate replace float")
self.bias = self.init_attribute('bias')
# Mutate bias
if r < replace_rate + mutate_rate:
#print(">>> mutate modify float")
self.bias = self.bias + gauss(0.0, self.config['bias_mutate_power'])
self.bias = self.clamp('bias',self.bias)
def distance(self, other):
#print("[nodegene] [distance] ")
#print("[nodegene] distance NODE1: "+str(self))
#print("[nodegene] distance NODE1: "+str(other))
d = abs(self.bias - other.bias) + abs(self.response - other.response)
#print("[nodegene] Checking distance: "+str(d))
if self.activation != other.activation:
d += 1.0
if self.aggregation != other.aggregation:
d += 1.0
return d * self.config['compatibility_weight_coefficient']
class EdgeGene(Gene):
__gene_attributes__ = [ 'weight','enabled' ]
def __init__(self, config, key=None, weight=None, enabled=None):
#print("[edgegene] Init new edgegene")
self.key=key
self.config = config
self.weight=weight
self.enabled=enabled
if self.weight == None or self.enabled == None:
self.init_attributes()
def __lt__(self, other):
return self.key < other.key
def mutate(self):
''' Mutate edge gene attributes: weight, enabled
'''
# Weight
r = random()
replace_rate = float(self.config['weight_replace_rate'])
mutate_rate = float(self.config['weight_mutate_rate'])
# Replace weight
if r < replace_rate:
#print(">>> mutate replace float")
self.weight = self.init_attribute('weight')
# Mutate weight
if r < replace_rate + mutate_rate:
#print(">>> mutate modify float")
self.weight = self.weight + gauss(0.0, self.config['weight_mutate_power'])
self.weight = self.clamp('weight',self.weight)
# Mutate enabled
r = random()
if r < self.config['enabled_mutate_rate']:
#print(">>> mutate bool")
self.enabled = (random() < 0.5)
def distance(self, other):
d = abs(self.weight - other.weight)
if self.enabled != other.enabled:
d += 1.0
return d * self.config['compatibility_weight_coefficient']
| 35.012422 | 102 | 0.583999 | 5,585 | 0.990775 | 0 | 0 | 0 | 0 | 0 | 0 | 1,609 | 0.285436 |
b2d6aec912e54487b7271a6bbbbeac36be760ac4 | 552 | py | Python | tapis_cli/commands/taccapis/v2/apps/mixins.py | bpachev/tapis-cli | c3128fb5b63ef74e06b737bbd95ef28fb24f0d32 | [
"BSD-3-Clause"
] | 8 | 2020-10-18T22:48:23.000Z | 2022-01-10T09:16:14.000Z | tapis_cli/commands/taccapis/v2/apps/mixins.py | bpachev/tapis-cli | c3128fb5b63ef74e06b737bbd95ef28fb24f0d32 | [
"BSD-3-Clause"
] | 238 | 2019-09-04T14:37:54.000Z | 2020-04-15T16:24:24.000Z | tapis_cli/commands/taccapis/v2/apps/mixins.py | bpachev/tapis-cli | c3128fb5b63ef74e06b737bbd95ef28fb24f0d32 | [
"BSD-3-Clause"
] | 5 | 2019-09-20T04:23:49.000Z | 2020-01-16T17:45:14.000Z | from tapis_cli.clients.services.mixins import ServiceIdentifier
__all__ = ['AppIdentifier']
class AppIdentifier(ServiceIdentifier):
service_id_type = 'App'
dest = 'app_id'
def validate_identifier(self, identifier, permissive=False):
try:
self.tapis_client.apps.get(appId=identifier)
return True
except Exception:
if permissive:
return False
else:
raise ValueError(
'No application exists with ID {}'.format(identifier))
| 27.6 | 74 | 0.615942 | 456 | 0.826087 | 0 | 0 | 0 | 0 | 0 | 0 | 62 | 0.112319 |
b2d6f5f40b8910601f5ded38d8738f9d70e406e6 | 835 | py | Python | agents/antifa.py | fan-weiwei/mercury-unicorn | 6c36d6baeaaee990a622caa0d7790dbd9982962c | [
"Apache-2.0"
] | null | null | null | agents/antifa.py | fan-weiwei/mercury-unicorn | 6c36d6baeaaee990a622caa0d7790dbd9982962c | [
"Apache-2.0"
] | null | null | null | agents/antifa.py | fan-weiwei/mercury-unicorn | 6c36d6baeaaee990a622caa0d7790dbd9982962c | [
"Apache-2.0"
] | null | null | null | from agents.agent import Agent
from random import randint
class Antifa(Agent):
def __init__(self):
super().__init__()
self.is_spy = False
def __str__(self):
return 'Basic Antifa'
def assign_mission(self, board):
number_to_assign = board.number_to_assign()
board.add_to_mission(self.seating_position)
while len(board.players_on_mission) < number_to_assign:
random_index = randint(0,board.number_of_players - 1)
if random_index not in board.players_on_mission:
board.add_to_mission(random_index)
def play_mission(self, board):
""" No other option but pass for the good guys """
return 'Pass'
def vote(self, board):
if board.stall_counter == 4:
return 1
return randint(0, 1)
| 24.558824 | 65 | 0.635928 | 774 | 0.926946 | 0 | 0 | 0 | 0 | 0 | 0 | 70 | 0.083832 |
b2d75b157f57c7832de3185889e5c4f8fbd90377 | 234 | py | Python | faketranslate/metadata.py | HeywoodKing/faketranslate | 683821eccd0004305c9f1bbfa0aae16f5fbcd829 | [
"MIT"
] | null | null | null | faketranslate/metadata.py | HeywoodKing/faketranslate | 683821eccd0004305c9f1bbfa0aae16f5fbcd829 | [
"MIT"
] | null | null | null | faketranslate/metadata.py | HeywoodKing/faketranslate | 683821eccd0004305c9f1bbfa0aae16f5fbcd829 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
"""
@File : metadata.py
@Time : 2020/1/1
@Author : flack
@Email : opencoding@hotmail.com
@ide : PyCharm
@project : faketranslate
@description : 描述
""" | 23.4 | 40 | 0.491453 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 237 | 0.995798 |
b2d8b14f8188a2112f4bdf4db0fef92891d9717a | 6,062 | py | Python | Scaffold_Code/test scripts/test10_vm.py | tzortzispanagiotis/nbc-blockchain-python | 4e59bfd3f8aa6fb72ce89f430909a1d5c90629e2 | [
"MIT"
] | null | null | null | Scaffold_Code/test scripts/test10_vm.py | tzortzispanagiotis/nbc-blockchain-python | 4e59bfd3f8aa6fb72ce89f430909a1d5c90629e2 | [
"MIT"
] | null | null | null | Scaffold_Code/test scripts/test10_vm.py | tzortzispanagiotis/nbc-blockchain-python | 4e59bfd3f8aa6fb72ce89f430909a1d5c90629e2 | [
"MIT"
] | 1 | 2021-03-20T20:18:40.000Z | 2021-03-20T20:18:40.000Z | import requests, json, time
from multiprocessing.dummy import Pool
pool = Pool(100)
transactions0 = []
transactions1 = []
transactions2 = []
transactions3 = []
transactions4 = []
transactions5 = []
transactions6 = []
transactions7 = []
transactions8 = []
transactions9 = []
nodeid = {
'id0': 0,
'id1': 1,
'id2': 2,
'id3': 3,
'id4': 4,
'id5': 5,
'id6': 6,
'id7': 7,
'id8': 8,
'id9': 9,
}
node = {
'0': 'http://192.168.0.1:5000',
'1': 'http://192.168.0.2:5001',
'2': 'http://192.168.0.3:5002',
'3': 'http://192.168.0.4:5003',
'4': 'http://192.168.0.5:5004',
'5': 'http://192.168.0.1:5005',
'6': 'http://192.168.0.2:5006',
'7': 'http://192.168.0.3:5007',
'8': 'http://192.168.0.4:5008',
'9': 'http://192.168.0.5:5009'
}
def trans(transactions, src_id):
for t in transactions:
temp = {
"id": nodeid[t[0]],
"amount": int(t[1])
}
body = json.dumps(temp)
r = requests.post(node[src_id]+'/createtransaction', data=body)
if __name__ == '__main__':
with open('../assignment_docs/transactions/10nodes/transactions0.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions0.append(line.split(' '))
# print(transactions0)
with open('../assignment_docs/transactions/10nodes/transactions1.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions1.append(line.split(' '))
# print(transactions1)
with open('../assignment_docs/transactions/10nodes/transactions2.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions2.append(line.split(' '))
# print(transactions2)
with open('../assignment_docs/transactions/10nodes/transactions3.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions3.append(line.split(' '))
# print(transactions3)
with open('../assignment_docs/transactions/10nodes/transactions4.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions4.append(line.split(' '))
# print(transactions4)
with open('../assignment_docs/transactions/10nodes/transactions5.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions5.append(line.split(' '))
with open('../assignment_docs/transactions/10nodes/transactions6.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions6.append(line.split(' '))
with open('../assignment_docs/transactions/10nodes/transactions7.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions7.append(line.split(' '))
with open('../assignment_docs/transactions/10nodes/transactions8.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions8.append(line.split(' '))
with open('../assignment_docs/transactions/10nodes/transactions9.txt') as f:
lines = [line.rstrip('\n') for line in f]
for line in lines:
transactions9.append(line.split(' '))
futures = []
r = requests.get(node['1']+'/selfregister')
r = requests.get(node['2']+'/selfregister')
r = requests.get(node['3']+'/selfregister')
r = requests.get(node['4']+'/selfregister')
r = requests.get(node['5']+'/selfregister')
r = requests.get(node['6']+'/selfregister')
r = requests.get(node['7']+'/selfregister')
r = requests.get(node['8']+'/selfregister')
r = requests.get(node['9']+'/selfregister')
r = requests.get(node['0']+'/timerstart')
r = requests.get(node['1']+'/timerstart')
r = requests.get(node['2']+'/timerstart')
r = requests.get(node['3']+'/timerstart')
r = requests.get(node['4']+'/timerstart')
r = requests.get(node['5']+'/timerstart')
r = requests.get(node['6']+'/timerstart')
r = requests.get(node['7']+'/timerstart')
r = requests.get(node['8']+'/timerstart')
r = requests.get(node['9']+'/timerstart')
target_url = node['0']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['1']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['2']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['3']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['4']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['5']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['6']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['7']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['8']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
target_url = node['9']+'/startwork'
futures.append(pool.apply_async(requests.get, [target_url]))
time.sleep(5)
futures = []
futures.append(pool.apply_async(trans, [transactions0,'0']))
futures.append(pool.apply_async(trans, [transactions1,'1']))
futures.append(pool.apply_async(trans, [transactions2,'2']))
futures.append(pool.apply_async(trans, [transactions3,'3']))
futures.append(pool.apply_async(trans, [transactions4,'4']))
futures.append(pool.apply_async(trans, [transactions5,'5']))
futures.append(pool.apply_async(trans, [transactions6,'6']))
futures.append(pool.apply_async(trans, [transactions7,'7']))
futures.append(pool.apply_async(trans, [transactions8,'8']))
futures.append(pool.apply_async(trans, [transactions9,'9']))
for future in futures:
future.get() | 35.040462 | 80 | 0.615473 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,644 | 0.271198 |
b2d93ba833cf1e0c7961b1add21037470175c381 | 1,550 | py | Python | api/standup/utils/email.py | adoval4/standup | 307200b46952c8129a36931103920d3200640b83 | [
"BSD-2-Clause"
] | null | null | null | api/standup/utils/email.py | adoval4/standup | 307200b46952c8129a36931103920d3200640b83 | [
"BSD-2-Clause"
] | 11 | 2020-02-12T02:27:29.000Z | 2022-03-12T00:08:22.000Z | api/standup/utils/email.py | adoval4/standup | 307200b46952c8129a36931103920d3200640b83 | [
"BSD-2-Clause"
] | null | null | null | # django
from django.core.mail import EmailMessage
from django.conf import settings
from django.template.loader import render_to_string
# utiltities
import threading
class EmailThread(threading.Thread):
"""
Class uses a thread to send email
"""
def __init__(self, subject, content, recipient_list, is_html):
self.subject = subject
self.recipient_list = recipient_list
self.content = content
self.is_html = is_html
threading.Thread.__init__(self)
def run (self):
msg = EmailMessage(
self.subject,
self.content,
settings.DEFAULT_FROM_EMAIL,
self.recipient_list
)
if self.is_html:
msg.content_subtype = "html"
msg.send()
def send_mail(subject, content, recipients, is_html=False):
"""
Sends email using EmailThread class
"""
EmailThread(subject, content, recipients, is_html).start()
def send_template_mail(subject, template_name, context, recipients, is_html):
"""
Send email using EmailThread class with a template
"""
if len(recipients) == 0:
return None
content = render_to_string(template_name, context)
send_mail(subject, content, recipients, is_html)
def send_html_template_mail(subject, template_name, context, recipients):
"""
Send email using EmailThread class with a html template
"""
send_template_mail(subject, template_name, context, recipients, True)
def send_text_template_mail(subject, template_name, context, recipients):
"""
Send email using EmailThread class with a plain text template
"""
send_template_mail(subject, template_name, context, recipients, False)
| 24.21875 | 77 | 0.762581 | 490 | 0.316129 | 0 | 0 | 0 | 0 | 0 | 0 | 310 | 0.2 |
b2d93cfb63dcf1ebc579a1abfad61711545c68bf | 628 | py | Python | app/main/controller/sample_controller.py | Eliotdoesprogramming/python.flask.sqlalchemy.Rest_Api_Template | 3f0a98ae4676aef9ecdf0df70eb9d1990fee6182 | [
"MIT"
] | null | null | null | app/main/controller/sample_controller.py | Eliotdoesprogramming/python.flask.sqlalchemy.Rest_Api_Template | 3f0a98ae4676aef9ecdf0df70eb9d1990fee6182 | [
"MIT"
] | null | null | null | app/main/controller/sample_controller.py | Eliotdoesprogramming/python.flask.sqlalchemy.Rest_Api_Template | 3f0a98ae4676aef9ecdf0df70eb9d1990fee6182 | [
"MIT"
] | null | null | null | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from service.api_service import Service
class SampleController(object):
def __init__(self,app:Flask,db:SQLAlchemy,service:Service) -> None:
self.app=app
self.db=db
self.service=service
self.add_routes(app)
def add_routes(self,app:Flask):
app.add_url_rule('/example',methods=['GET'],view_func=self.example)
app.add_url_rule('/example',methods=['POST'],view_func=self.add_example)
def example(self):
return self.service.example()
def add_example(self):
return self.service.add_example() | 39.25 | 80 | 0.703822 | 524 | 0.834395 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.049363 |
b2d981ecabea84bee53271f8bc9c6bdef3c97cef | 3,533 | py | Python | supervised/preprocessing/datetime_transformer.py | sourcery-ai-bot/mljar-supervised | f60f4ac65516ac759e4b84a198205480a56ada64 | [
"MIT"
] | null | null | null | supervised/preprocessing/datetime_transformer.py | sourcery-ai-bot/mljar-supervised | f60f4ac65516ac759e4b84a198205480a56ada64 | [
"MIT"
] | null | null | null | supervised/preprocessing/datetime_transformer.py | sourcery-ai-bot/mljar-supervised | f60f4ac65516ac759e4b84a198205480a56ada64 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import datetime
import json
class DateTimeTransformer(object):
def __init__(self):
self._new_columns = []
self._old_column = None
self._min_datetime = None
self._transforms = []
def fit(self, X, column):
self._old_column = column
self._min_datetime = np.min(X[column])
values = X[column].dt.year
if len(np.unique(values)) > 1:
self._transforms += ["year"]
new_column = column + "_Year"
self._new_columns += [new_column]
values = X[column].dt.month
if len(np.unique(values)) > 1:
self._transforms += ["month"]
new_column = column + "_Month"
self._new_columns += [new_column]
values = X[column].dt.day
if len(np.unique(values)) > 1:
self._transforms += ["day"]
new_column = column + "_Day"
self._new_columns += [new_column]
values = X[column].dt.weekday
if len(np.unique(values)) > 1:
self._transforms += ["weekday"]
new_column = column + "_WeekDay"
self._new_columns += [new_column]
values = X[column].dt.dayofyear
if len(np.unique(values)) > 1:
self._transforms += ["dayofyear"]
new_column = column + "_DayOfYear"
self._new_columns += [new_column]
values = X[column].dt.hour
if len(np.unique(values)) > 1:
self._transforms += ["hour"]
new_column = column + "_Hour"
self._new_columns += [new_column]
values = (X[column] - self._min_datetime).dt.days
if len(np.unique(values)) > 1:
self._transforms += ["days_diff"]
new_column = column + "_Days_Diff_To_Min"
self._new_columns += [new_column]
def transform(self, X):
column = self._old_column
if "year" in self._transforms:
new_column = column + "_Year"
X[new_column] = X[column].dt.year
if "month" in self._transforms:
new_column = column + "_Month"
X[new_column] = X[column].dt.month
if "day" in self._transforms:
new_column = column + "_Day"
X[new_column] = X[column].dt.day
if "weekday" in self._transforms:
new_column = column + "_WeekDay"
X[new_column] = X[column].dt.weekday
if "dayofyear" in self._transforms:
new_column = column + "_DayOfYear"
X[new_column] = X[column].dt.dayofyear
if "hour" in self._transforms:
new_column = column + "_Hour"
X[new_column] = X[column].dt.hour
if "days_diff" in self._transforms:
new_column = column + "_Days_Diff_To_Min"
X[new_column] = (X[column] - self._min_datetime).dt.days
X.drop(column, axis=1, inplace=True)
return X
def to_json(self):
return {
"new_columns": list(self._new_columns),
"old_column": self._old_column,
"min_datetime": str(self._min_datetime),
"transforms": list(self._transforms),
}
def from_json(self, data_json):
self._new_columns = data_json.get("new_columns", None)
self._old_column = data_json.get("old_column", None)
d = data_json.get("min_datetime", None)
self._min_datetime = None if d is None else pd.to_datetime(d)
self._transforms = data_json.get("transforms", [])
| 32.712963 | 69 | 0.565242 | 3,463 | 0.980187 | 0 | 0 | 0 | 0 | 0 | 0 | 350 | 0.099066 |
b2da249453d5ecc88736b8a335fffd1d8b76e78e | 10,592 | py | Python | tac/gui/dashboards/controller.py | fetchai/agents-tac | 9e7de7cf6a43fff789972f6d7a3ed906858009e0 | [
"Apache-2.0"
] | 29 | 2019-07-17T08:58:19.000Z | 2021-12-08T19:25:22.000Z | tac/gui/dashboards/controller.py | fetchai/agents-tac | 9e7de7cf6a43fff789972f6d7a3ed906858009e0 | [
"Apache-2.0"
] | 90 | 2019-07-03T09:19:15.000Z | 2022-01-20T10:37:48.000Z | tac/gui/dashboards/controller.py | fetchai/agents-tac | 9e7de7cf6a43fff789972f6d7a3ed906858009e0 | [
"Apache-2.0"
] | 8 | 2019-07-12T11:06:54.000Z | 2020-05-29T18:54:51.000Z | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Module containing the controller dashboard and related classes."""
import argparse
import json
import os
from typing import Optional, Dict
import numpy as np
from tac.gui.dashboards.base import start_visdom_server, Dashboard
from tac.agents.controller.base.states import Game
from tac.platform.game.stats import GameStats
DEFAULT_ENV_NAME = "tac_simulation_env_main"
class ControllerDashboard(Dashboard):
"""
Class to manage a Visdom dashboard for the controller agent.
It assumes that a Visdom server is running at the address and port provided in input
(default: http://localhost:8097)
"""
def __init__(
self,
game_stats: Optional[GameStats] = None,
visdom_addr: str = "localhost",
visdom_port: int = 8097,
env_name: Optional[str] = "tac_controller",
):
"""Instantiate a ControllerDashboard."""
super().__init__(visdom_addr, visdom_port, env_name)
self.game_stats = game_stats
self.agent_pbk_to_name = {} # type: Dict[str, str]
def update(self):
"""Update the dashboard."""
if not self._is_running():
raise Exception("Dashboard not running, update not allowed.")
self._update_registered_agents()
if self.game_stats is not None:
self._update_info()
self._update_utility_params()
self._update_current_holdings()
self._update_initial_holdings()
self._update_plot_scores()
self._update_plot_balance_history()
self._update_plot_price_history()
self._update_plot_eq_vs_mean_price()
self._update_plot_eq_vs_current_score()
self._update_adjusted_score()
@staticmethod
def from_datadir(datadir: str, env_name: str) -> "ControllerDashboard":
"""
Return a ControllerDashboard from a data directory.
:param datadir: the data directory
:param env_name: the environment name
:return: controller dashboard
"""
game_data_json_filepath = os.path.join(datadir, "game.json")
print("Loading data from {}".format(game_data_json_filepath))
game_data = json.load(open(game_data_json_filepath))
game = Game.from_dict(game_data)
game_stats = GameStats(game)
return ControllerDashboard(game_stats, env_name=env_name)
def _update_info(self):
window_name = "configuration_details"
self.viz.properties(
[
{
"type": "number",
"name": "# agents",
"value": self.game_stats.game.configuration.nb_agents,
},
{
"type": "number",
"name": "# goods",
"value": self.game_stats.game.configuration.nb_goods,
},
{
"type": "number",
"name": "tx fee",
"value": self.game_stats.game.configuration.tx_fee,
},
{
"type": "number",
"name": "# transactions",
"value": len(self.game_stats.game.transactions),
},
],
env=self.env_name,
win=window_name,
opts=dict(title="Configuration"),
)
def _update_registered_agents(self):
window_name = "registered_agents"
self.viz.properties(
[
{"type": "string", "name": "{}".format(agent_name), "value": ""}
for agent_name in self.agent_pbk_to_name.values()
],
env=self.env_name,
win=window_name,
opts=dict(title="Registered Agents"),
)
def _update_utility_params(self):
utility_params = self.game_stats.game.initialization.utility_params
utility_params = np.asarray(utility_params)
window_name = "utility_params"
self.viz.heatmap(
utility_params,
env=self.env_name,
win=window_name,
opts=dict(title="Utility Parameters", xlabel="Goods", ylabel="Agents"),
)
def _update_initial_holdings(self):
initial_holdings = self.game_stats.holdings_history()[0]
window_name = "initial_holdings"
self.viz.heatmap(
initial_holdings,
env=self.env_name,
win=window_name,
opts=dict(
title="Initial Holdings", xlabel="Goods", ylabel="Agents", stacked=True,
),
)
def _update_current_holdings(self):
initial_holdings = self.game_stats.holdings_history()[-1]
window_name = "final_holdings"
self.viz.heatmap(
initial_holdings,
env=self.env_name,
win=window_name,
opts=dict(
title="Current Holdings", xlabel="Goods", ylabel="Agents", stacked=True,
),
)
def _update_plot_scores(self):
keys, score_history = self.game_stats.score_history()
agent_names = [
self.game_stats.game.configuration.agent_pbk_to_name[agent_pbk]
for agent_pbk in keys
]
window_name = "score_history"
self.viz.line(
X=np.arange(score_history.shape[0]),
Y=score_history,
env=self.env_name,
win=window_name,
opts=dict(
legend=agent_names,
title="Scores",
xlabel="Transactions",
ylabel="Score",
),
)
def _update_plot_balance_history(self):
keys, balance_history = self.game_stats.balance_history()
agent_names = [
self.game_stats.game.configuration.agent_pbk_to_name[agent_pbk]
for agent_pbk in keys
]
window_name = "balance_history"
self.viz.line(
X=np.arange(balance_history.shape[0]),
Y=balance_history,
env=self.env_name,
win=window_name,
opts=dict(
legend=agent_names,
title="Balance history",
xlabel="Transactions",
ylabel="Money",
),
)
def _update_plot_price_history(self):
price_history = self.game_stats.price_history()
window_name = "price_history"
self.viz.line(
X=np.arange(price_history.shape[0]),
Y=price_history,
env=self.env_name,
win=window_name,
opts=dict(
legend=list(
self.game_stats.game.configuration.good_pbk_to_name.values()
),
title="Price history",
xlabel="Transactions",
ylabel="Price",
),
)
def _update_plot_eq_vs_mean_price(self):
good_names, eq_vs_mean_price = self.game_stats.eq_vs_mean_price()
window_name = "eq_vs_mean_price"
self.viz.bar(
X=eq_vs_mean_price,
env=self.env_name,
win=window_name,
opts=dict(
legend=["eq_price", "mean_price"],
title="Equilibrium vs Mean Prices",
xlabel="Goods",
ylabel="Price",
rownames=good_names,
),
)
def _update_plot_eq_vs_current_score(self):
keys, eq_vs_current_score = self.game_stats.eq_vs_current_score()
agent_names = [
self.game_stats.game.configuration.agent_pbk_to_name[agent_pbk]
for agent_pbk in keys
]
window_name = "eq_vs_current_score"
self.viz.bar(
X=eq_vs_current_score,
env=self.env_name,
win=window_name,
opts=dict(
legend=["eq_score", "current_score"],
title="Equilibrium vs Current Score",
xlabel="Agents",
ylabel="Score",
rownames=agent_names,
),
)
def _update_adjusted_score(self):
keys, adjusted_score = self.game_stats.adjusted_score()
window_name = "adjusted_score"
self.viz.bar(
X=adjusted_score,
env=self.env_name,
win=window_name,
opts=dict(
title="Adjusted Score",
xlabel="Agents",
ylabel="Score",
rownames=[
self.game_stats.game.configuration.agent_pbk_to_name[agent_pbk]
for agent_pbk in keys
],
),
)
def __enter__(self):
"""Enter the dashboard."""
self.start()
self.update()
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit the dashboard."""
self.stop()
def parse_args():
"""Parse the arguments."""
parser = argparse.ArgumentParser(
"dashboard", description="Data Visualization for the simulation outcome"
)
parser.add_argument(
"--datadir",
type=str,
required=True,
help="The path to the simulation data folder.",
)
parser.add_argument(
"--env_name",
type=str,
default=None,
help="The name of the environment to create.",
)
arguments = parser.parse_args()
return arguments
if __name__ == "__main__":
arguments = parse_args()
process = start_visdom_server()
d = ControllerDashboard.from_datadir(arguments.datadir, arguments.env_name)
d.start()
d.update()
while True:
try:
input()
except KeyboardInterrupt:
break
finally:
d.stop()
process.terminate()
| 31.244838 | 88 | 0.558912 | 8,510 | 0.803437 | 0 | 0 | 645 | 0.060895 | 0 | 0 | 2,521 | 0.23801 |
b2da2f7f294d67b0e66ebfb594c13ddc9e71fc29 | 13,568 | py | Python | timecat/apps/users/views.py | LinXueyuanStdio/memp | c6f6609cec7c54ec23881838dacb5f4ffba2e68c | [
"Apache-2.0"
] | null | null | null | timecat/apps/users/views.py | LinXueyuanStdio/memp | c6f6609cec7c54ec23881838dacb5f4ffba2e68c | [
"Apache-2.0"
] | null | null | null | timecat/apps/users/views.py | LinXueyuanStdio/memp | c6f6609cec7c54ec23881838dacb5f4ffba2e68c | [
"Apache-2.0"
] | null | null | null | import json
from django.urls import reverse
from django.shortcuts import render
from django.db.models import Q
from django.views.generic.base import View
from django.contrib.auth import authenticate,login,logout
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.hashers import make_password
from django.http import HttpResponse,HttpResponseRedirect
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from pure_pagination import Paginator, EmptyPage, PageNotAnInteger
from apps.operation.models import UserCourse,UserFavorite,UserMessage
from apps.organization.models import CourseOrg,Teacher
from apps.course.models import Course
from apps.utils.email_send import send_register_eamil
from apps.utils.mixin_utils import LoginRequiredMixin
from .models import CustomUser,EmailVerifyRecord
from .models import Banner
from .forms import LoginForm,RegisterForm,ForgetPwdForm,ModifyPwdForm
from .forms import UploadImageForm,UserInfoForm
from .serializers import UserSerializer
class UsersViewSet(viewsets.ModelViewSet):
queryset = CustomUser.objects.all()
serializer_class = UserSerializer
permission_classes = (IsAuthenticated,)
# 邮箱和用户名都可以登录
# 基与ModelBackend类,因为它有authenticate方法
class CustomBackend(ModelBackend):
def authenticate(self, request, username=None, password=None, **kwargs):
try:
# 不希望用户存在两个,get只能有一个。两个是get失败的一种原因 Q为使用并集查询
user = CustomUser.objects.get(Q(username=username)|Q(email=username))
# django的后台中密码加密:所以不能password==password
# CustomUser继承的AbstractUser中有def check_password(self, raw_password):
if user.check_password(password):
return user
except Exception as e:
return None
class IndexView(View):
'''首页'''
def get(self,request):
#轮播图
all_banners = Banner.objects.all().order_by('index')
#课程
courses = Course.objects.filter(is_banner=False)[:6]
#轮播课程
banner_courses = Course.objects.filter(is_banner=True)[:3]
#课程机构
course_orgs = Course.objects.all()[:15]
return render(request,'index.html',{
'all_banners':all_banners,
'courses':courses,
'banner_courses':banner_courses,
'course_orgs':course_orgs,
})
# 登录
class LoginView(View):
'''用户登录'''
def get(self,request):
return render(request, 'login.html')
def post(self,request):
# 实例化
login_form = LoginForm(request.POST)
if login_form.is_valid():
# 获取用户提交的用户名和密码
user_name = request.POST.get('username', None)
pass_word = request.POST.get('password', None)
# 成功返回user对象,失败None
user = authenticate(username=user_name, password=pass_word)
# 如果不是null说明验证成功
if user is not None:
if user.is_active:
# 只有注册激活才能登录
login(request, user)
return HttpResponseRedirect(reverse('index'))
else:
return render(request, 'login.html', {'msg': '用户名或密码错误', 'login_form': login_form})
# 只有当用户名或密码不存在时,才返回错误信息到前端
else:
return render(request, 'login.html', {'msg': '用户名或密码错误','login_form':login_form})
# form.is_valid()已经判断不合法了,所以这里不需要再返回错误信息到前端了
else:
return render(request,'login.html',{'login_form':login_form})
# 激活用户
class ActiveUserView(View):
def get(self, request, active_code):
# 查询邮箱验证记录是否存在
all_record = EmailVerifyRecord.objects.filter(code = active_code)
if all_record:
for record in all_record:
# 获取到对应的邮箱
email = record.email
# 查找到邮箱对应的user
user = CustomUser.objects.get(email=email)
user.is_active = True
user.save()
# 验证码不对的时候跳转到激活失败页面
else:
return render(request,'active_fail.html')
# 激活成功跳转到登录页面
return render(request, "login.html", )
class LogoutView(View):
'''用户登出'''
def get(self,request):
logout(request)
return HttpResponseRedirect(reverse('index'))
# 注册
class RegisterView(View):
'''用户注册'''
def get(self,request):
register_form = RegisterForm()
return render(request,'register.html',{'register_form':register_form})
def post(self,request):
register_form = RegisterForm(request.POST)
if register_form.is_valid():
user_name = request.POST.get('email', None)
# 如果用户已存在,则提示错误信息
if CustomUser.objects.filter(email = user_name):
return render(request, 'register.html', {'register_form':register_form,'msg': '用户已存在'})
pass_word = request.POST.get('password', None)
# 实例化一个user_profile对象
user_profile = CustomUser()
user_profile.username = user_name
user_profile.email = user_name
user_profile.is_active = False
# 对保存到数据库的密码加密
user_profile.password = make_password(pass_word)
user_profile.save()
send_register_eamil(user_name,'register')
return render(request,'login.html')
else:
return render(request,'register.html',{'register_form':register_form})
class ForgetPwdView(View):
'''找回密码'''
def get(self,request):
forget_form = ForgetPwdForm()
return render(request,'forgetpwd.html',{'forget_form':forget_form})
def post(self,request):
forget_form = ForgetPwdForm(request.POST)
if forget_form.is_valid():
email = request.POST.get('email',None)
send_register_eamil(email,'forget')
return render(request, 'send_success.html')
else:
return render(request,'forgetpwd.html',{'forget_form':forget_form})
class ResetView(View):
def get(self, request, active_code):
all_records = EmailVerifyRecord.objects.filter(code=active_code)
if all_records:
for record in all_records:
email = record.email
return render(request, "password_reset.html", {"email":email})
else:
return render(request, "active_fail.html")
return render(request, "login.html")
class ModifyPwdView(View):
'''修改用户密码'''
def post(self, request):
modify_form = ModifyPwdForm(request.POST)
if modify_form.is_valid():
pwd1 = request.POST.get("password1", "")
pwd2 = request.POST.get("password2", "")
email = request.POST.get("email", "")
if pwd1 != pwd2:
return render(request, "password_reset.html", {"email":email, "msg":"密码不一致!"})
user = CustomUser.objects.get(email=email)
user.password = make_password(pwd2)
user.save()
return render(request, "login.html")
else:
email = request.POST.get("email", "")
return render(request, "password_reset.html", {"email":email, "modify_form":modify_form })
class UserinfoView(LoginRequiredMixin, View):
"""
用户个人信息
"""
def get(self, request):
return render(request, 'usercenter-info.html', {})
def post(self, request):
user_info_form = UserInfoForm(request.POST, instance=request.user)
if user_info_form.is_valid():
user_info_form.save()
return HttpResponse('{"status":"success"}', content_type='application/json')
else:
return HttpResponse(json.dumps(user_info_form.errors), content_type='application/json')
# def post(self, request):
# user_info_form = UserInfoForm(request.POST)
# if user_info_form.is_valid():
# nick_name = request.POST.get('nick_name',None)
# gender = request.POST.get('gender',None)
# birthday = request.POST.get('birthday',None)
# adress = request.POST.get('address',None)
# mobile = request.POST.get('mobile',None)
# user = request.user
# user.nick_name = nick_name
# user.gender = gender
# user.birthday = birthday
# user.adress = adress
# user.mobile = mobile
# user.save()
# return HttpResponse('{"status":"success"}', content_type='application/json')
# else:
# return HttpResponse(json.dumps(user_info_form.errors), content_type='application/json')
class UploadImageView(LoginRequiredMixin,View):
'''用户图像修改'''
def post(self,request):
#上传的文件都在request.FILES里面获取,所以这里要多传一个这个参数
image_form = UploadImageForm(request.POST,request.FILES)
if image_form.is_valid():
image = image_form.cleaned_data['image']
request.user.image = image
request.user.save()
return HttpResponse('{"status":"success"}', content_type='application/json')
else:
return HttpResponse('{"status":"fail"}', content_type='application/json')
class UpdatePwdView(View):
"""
个人中心修改用户密码
"""
def post(self, request):
modify_form = ModifyPwdForm(request.POST)
if modify_form.is_valid():
pwd1 = request.POST.get("password1", "")
pwd2 = request.POST.get("password2", "")
if pwd1 != pwd2:
return HttpResponse('{"status":"fail","msg":"密码不一致"}', content_type='application/json')
user = request.user
user.password = make_password(pwd2)
user.save()
return HttpResponse('{"status":"success"}', content_type='application/json')
else:
return HttpResponse(json.dumps(modify_form.errors), content_type='application/json')
class SendEmailCodeView(LoginRequiredMixin, View):
'''发送邮箱修改验证码'''
def get(self,request):
email = request.GET.get('email','')
if CustomUser.objects.filter(email=email):
return HttpResponse('{"email":"邮箱已存在"}', content_type='application/json')
send_register_eamil(email,'update_email')
return HttpResponse('{"status":"success"}', content_type='application/json')
class UpdateEmailView(LoginRequiredMixin, View):
'''修改邮箱'''
def post(self, request):
email = request.POST.get("email", "")
code = request.POST.get("code", "")
existed_records = EmailVerifyRecord.objects.filter(email=email, code=code, send_type='update_email')
if existed_records:
user = request.user
user.email = email
user.save()
return HttpResponse('{"status":"success"}', content_type='application/json')
else:
return HttpResponse('{"email":"验证码无效"}', content_type='application/json')
class MyCourseView(LoginRequiredMixin, View):
'''我的课程'''
def get(self, request):
user_courses = UserCourse.objects.filter(user=request.user)
return render(request, "usercenter-mycourse.html", {
"user_courses":user_courses,
})
class MyFavOrgView(LoginRequiredMixin,View):
'''我收藏的课程机构'''
def get(self, request):
org_list = []
fav_orgs = UserFavorite.objects.filter(user=request.user, fav_type=2)
# 上面的fav_orgs只是存放了id。我们还需要通过id找到机构对象
for fav_org in fav_orgs:
# 取出fav_id也就是机构的id。
org_id = fav_org.fav_id
# 获取这个机构对象
org = CourseOrg.objects.get(id=org_id)
org_list.append(org)
return render(request, "usercenter-fav-org.html", {
"org_list": org_list,
})
class MyFavTeacherView(LoginRequiredMixin, View):
'''我收藏的授课讲师'''
def get(self, request):
teacher_list = []
fav_teachers = UserFavorite.objects.filter(user=request.user, fav_type=3)
for fav_teacher in fav_teachers:
teacher_id = fav_teacher.fav_id
teacher = Teacher.objects.get(id=teacher_id)
teacher_list.append(teacher)
return render(request, "usercenter-fav-teacher.html", {
"teacher_list": teacher_list,
})
class MyFavCourseView(LoginRequiredMixin,View):
"""
我收藏的课程
"""
def get(self, request):
course_list = []
fav_courses = UserFavorite.objects.filter(user=request.user, fav_type=1)
for fav_course in fav_courses:
course_id = fav_course.fav_id
course = Course.objects.get(id=course_id)
course_list.append(course)
return render(request, 'usercenter-fav-course.html', {
"course_list":course_list,
})
class MyMessageView(LoginRequiredMixin, View):
'''我的消息'''
def get(self, request):
all_message = UserMessage.objects.filter(user= request.user.id)
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(all_message, 4,request=request)
messages = p.page(page)
return render(request, "usercenter-message.html", {
"messages":messages,
})
from django.shortcuts import render_to_response
def pag_not_found(request):
# 全局404处理函数
response = render_to_response('404.html', {})
response.status_code = 404
return response
def page_error(request):
# 全局500处理函数
from django.shortcuts import render_to_response
response = render_to_response('500.html', {})
response.status_code = 500
return response
| 34.176322 | 108 | 0.627874 | 12,895 | 0.886742 | 0 | 0 | 0 | 0 | 0 | 0 | 3,983 | 0.273896 |
b2dc609945782154da069152ac5405f35cce2d2b | 25 | py | Python | SWSIdentity/Controllers/__init__.py | vanzhiganov/identity | 90936482cc23251ba06121658e6a0a9251e30b3b | [
"Apache-2.0"
] | 1 | 2018-03-26T21:18:52.000Z | 2018-03-26T21:18:52.000Z | SWSIdentity/Controllers/__init__.py | vanzhiganov/identity | 90936482cc23251ba06121658e6a0a9251e30b3b | [
"Apache-2.0"
] | null | null | null | SWSIdentity/Controllers/__init__.py | vanzhiganov/identity | 90936482cc23251ba06121658e6a0a9251e30b3b | [
"Apache-2.0"
] | null | null | null | __all__ = [
'Users'
] | 8.333333 | 11 | 0.48 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0.28 |
b2dc6929b15ae89ed88d8e3ef2cd52728dbd110e | 5,794 | py | Python | gui/Ui_sales_transaction.py | kim-song/kimsong-apriori | 0f2a4a2b749989ad1305da3836e7404c09482534 | [
"MIT"
] | 2 | 2020-07-17T09:36:56.000Z | 2020-12-11T11:36:11.000Z | gui/Ui_sales_transaction.py | kimsongsao/kimsong-apriori | 0f2a4a2b749989ad1305da3836e7404c09482534 | [
"MIT"
] | null | null | null | gui/Ui_sales_transaction.py | kimsongsao/kimsong-apriori | 0f2a4a2b749989ad1305da3836e7404c09482534 | [
"MIT"
] | 1 | 2020-07-17T09:23:15.000Z | 2020-07-17T09:23:15.000Z | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'd:\MITE12\ksapriori\gui\sales_transaction.ui'
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_FormSalesTransaction(object):
def setupUi(self, FormSalesTransaction):
FormSalesTransaction.setObjectName("FormSalesTransaction")
FormSalesTransaction.resize(989, 466)
self.groupBox = QtWidgets.QGroupBox(FormSalesTransaction)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 181, 451))
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.groupBox.setFont(font)
self.groupBox.setObjectName("groupBox")
self.pushButton = QtWidgets.QPushButton(self.groupBox)
self.pushButton.setGeometry(QtCore.QRect(80, 190, 75, 23))
self.pushButton.setObjectName("pushButton")
self.widget = QtWidgets.QWidget(self.groupBox)
self.widget.setGeometry(QtCore.QRect(10, 31, 151, 140))
self.widget.setObjectName("widget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.widget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.label.setFont(font)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.fromDate = QtWidgets.QDateEdit(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.fromDate.setFont(font)
self.fromDate.setObjectName("fromDate")
self.verticalLayout.addWidget(self.fromDate)
self.label_2 = QtWidgets.QLabel(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.toDate = QtWidgets.QDateEdit(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.toDate.setFont(font)
self.toDate.setObjectName("toDate")
self.verticalLayout.addWidget(self.toDate)
self.label_3 = QtWidgets.QLabel(self.widget)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.verticalLayout.addWidget(self.label_3)
self.lineEdit = QtWidgets.QLineEdit(self.widget)
self.lineEdit.setObjectName("lineEdit")
self.verticalLayout.addWidget(self.lineEdit)
self.tableWidget = QtWidgets.QTableWidget(FormSalesTransaction)
self.tableWidget.setGeometry(QtCore.QRect(200, 20, 781, 441))
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(8)
self.tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(7, item)
self.retranslateUi(FormSalesTransaction)
QtCore.QMetaObject.connectSlotsByName(FormSalesTransaction)
def retranslateUi(self, FormSalesTransaction):
_translate = QtCore.QCoreApplication.translate
FormSalesTransaction.setWindowTitle(_translate("FormSalesTransaction", "Sales Transaction"))
self.groupBox.setTitle(_translate("FormSalesTransaction", "Filters"))
self.pushButton.setText(_translate("FormSalesTransaction", "Search"))
self.label.setText(_translate("FormSalesTransaction", "From Date"))
self.label_2.setText(_translate("FormSalesTransaction", "To Date"))
self.label_3.setText(_translate("FormSalesTransaction", "Item Code"))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("FormSalesTransaction", "Document No"))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("FormSalesTransaction", "Posting Date"))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("FormSalesTransaction", "Item Code"))
item = self.tableWidget.horizontalHeaderItem(3)
item.setText(_translate("FormSalesTransaction", "Item Label"))
item = self.tableWidget.horizontalHeaderItem(4)
item.setText(_translate("FormSalesTransaction", "Description"))
item = self.tableWidget.horizontalHeaderItem(5)
item.setText(_translate("FormSalesTransaction", "Quantity"))
item = self.tableWidget.horizontalHeaderItem(6)
item.setText(_translate("FormSalesTransaction", "Price"))
item = self.tableWidget.horizontalHeaderItem(7)
item.setText(_translate("FormSalesTransaction", "Amount"))
| 48.283333 | 101 | 0.681049 | 5,504 | 0.949948 | 0 | 0 | 0 | 0 | 0 | 0 | 885 | 0.152744 |
b2dddf20dfadcb66e5efbea5b82b5fce448e57cf | 727 | py | Python | setup.py | dboddie/Beeware-Hello-VOC | a22ffc58121ead7acac850c6edb60576bdb66993 | [
"MIT"
] | 35 | 2017-09-21T03:45:33.000Z | 2021-11-18T01:18:13.000Z | setup.py | dboddie/Beeware-Hello-VOC | a22ffc58121ead7acac850c6edb60576bdb66993 | [
"MIT"
] | 6 | 2017-09-25T12:34:31.000Z | 2021-07-05T03:40:19.000Z | setup.py | dboddie/Beeware-Hello-VOC | a22ffc58121ead7acac850c6edb60576bdb66993 | [
"MIT"
] | 10 | 2018-02-03T12:51:31.000Z | 2022-02-08T18:54:48.000Z | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.app_name }}',
version='0.0.1',
description='{{ cookiecutter.description }}',
author='{{ cookiecutter.author }}',
author_email='{{ cookiecutter.author_email }}',
license='{{ cookiecutter.license }}',
packages=find_packages(
exclude=['docs', 'tests', 'android']
),
classifiers=[
'Development Status :: 1 - Planning',
'License :: OSI Approved :: {{ cookiecutter.license }}',
],
install_requires=[
],
options={
'app': {
'formal_name': '{{ cookiecutter.formal_name }}',
'bundle': '{{ cookiecutter.bundle }}'
},
}
)
| 25.964286 | 64 | 0.569464 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 375 | 0.515818 |
b2de089e75f188f3482c29fc33bcbb7a91997599 | 27,975 | py | Python | src/app.py | chunyuyuan/NEWS_2019_network-master | 0eec84b383156c82fbd64d900dce578700575d99 | [
"MIT"
] | null | null | null | src/app.py | chunyuyuan/NEWS_2019_network-master | 0eec84b383156c82fbd64d900dce578700575d99 | [
"MIT"
] | null | null | null | src/app.py | chunyuyuan/NEWS_2019_network-master | 0eec84b383156c82fbd64d900dce578700575d99 | [
"MIT"
] | null | null | null | from flask import Flask, request, render_template, send_file, Response
import io
import base64
import csv
import json
import time
from collections import OrderedDict
import numpy
import pandas as pd
from numpy import genfromtxt
from flask import jsonify
from flask_cors import CORS
from LoadingNetwork import EchoWebSocket
import shutil
import gc
from tornado.wsgi import WSGIContainer
from tornado.web import Application, FallbackHandler
from tornado.websocket import WebSocketHandler
from tornado.ioloop import IOLoop
app = Flask('flasknado')
#app = Flask(__name__)
app.debug = True
CORS(app)
##initial netwrok csv data############################
rawdata = open('NetworkWithDistance.txt')
with open('NetworkWithDistance.txt') as f:
rawdata = f.readlines()
# you may also want to remove whitespace characters like `\n` at the end
# of each line
rawdata = [x.strip() for x in rawdata]
my_data = genfromtxt('networkwithdist.csv', delimiter=',')
# my_data=numpy.delete(my_data,(0),axis=0)
header = ['id', 'id_to', 'lon', 'lat', 'basinid']
frame = pd.DataFrame(my_data, columns=header)
data = []
MY_GLOBAL = []
with open('tempcsv.csv') as f:
for line in f:
temp = line.strip().split(',')
data.append(temp)
#############################
data1 = []
with open('MyFile1.txt') as f:
r = 0
for line in f:
if(r > 0):
data2 = []
# print(line)
temp = line.split("\",")
data2.append(temp[0][1:])
temp1 = temp[1].split(",[")
data2.append(temp1[0])
data2.append(temp1[1][:-2])
data1.append(data2)
r += 1
header = ['celllist', 'cellid', 'cellto']
frame_celllist = pd.DataFrame(data1, columns=header)
frame_celllist = frame_celllist.drop_duplicates()
del data1[:]
##################
data_c = []
with open('powerplant_cell_loc.csv') as f:
r = 0
for line in f:
if(r > 0):
data_cc = line.split(",")
data_c.append(data_cc)
# print(line)
r += 1
header = ['cellid', 'loc']
frame_cell = pd.DataFrame(data_c, columns=header)
frame_cell = frame_cell.drop_duplicates()
del data_c[:]
########################################################
import os
import sys
from SimpleHTTPServer import SimpleHTTPRequestHandler
import BaseHTTPServer
# class MyHTTPRequestHandler(SimpleHTTPRequestHandler):
# def translate_path(self,path):
# path = SimpleHTTPRequestHandler.translate_path(self,path)
# if os.path.isdir(path):
# for base in "index", "default":
# for ext in ".html", ".htm", ".txt":
# index = path + "/" + base + ext
# if os.path.exists(index):
# return index
# return path
# def test(HandlerClass = MyHTTPRequestHandler,
# ServerClass = BaseHTTPServer.HTTPServer):
# BaseHTTPServer.test(HandlerClass, ServerClass)
##################travesal network upstream############
'''def find_upstream(value):
gc.collect()
ii=0
li = []
temp=[]
a=frame.ix[int(value)]
temp.append(a)
#print(MY_GLOBAL)
MY_GLOBAL[:]=[]
#x=data[int(value)]
#x=frame[frame['id']==a['id_to']]
#print x
i=0
z=0
zz=0
while zz<len(temp):
item=temp[zz]
zz+=1
##print(z,len(temp))
## item=temp.pop()
## print item
#x=frame[frame['id_to']==item['id']]
x=data[int(float(item['id']))]
#print x
i=1
while i<len(x) :
# d = OrderedDict()
# xx=x.loc[x.index[i]]
xx=frame.ix[int(float(x[i]))]
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
i+=1
# ii+=1
##if ii%1000==0:
## print ii
temp.append(xx)
print(len(temp))
while z<len(temp):
item=temp[z]
z+=1
##print(z,len(temp))
## item=temp.pop()
## print item
#x=frame[frame['id_to']==item['id']]
x=data[int(float(item['id']))]
#print x
i=1
while i<len(x) :
d = OrderedDict()
#xx=x.loc[x.index[i]]
xx=frame.ix[int(float(x[i]))]
d['type'] = 'Feature'
d['geometry'] = {
'type': 'MultiLineString',
'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
}
d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
}
li.append(d)
d = OrderedDict()
#xx=x.loc[x.index[i]]
# xx=frame.ix[int(float(x[i]))]
i+=1
ii+=1
if ii%1000==0 or (ii+1)/len(temp)==1:
MY_GLOBAL.append((int)((ii+1)/(len(temp)* 1.0)*100))
## print(checkInt,ii,len(temp))
## print ii
# temp.append(xx)
#d = OrderedDict()
#d['type'] = 'FeatureCollection'
#d['features'] = li
#print li
print(ii)
return li,200'''
def find_upstream(value):
gc.collect()
ii = 0
li = []
temp = []
a = frame.ix[int(value)]
temp.append(int(value))
MY_GLOBAL[:] = []
i = 0
z = 0
zz = 0
jstring = ''
while z < len(temp):
item = frame.ix[temp[z]]
z += 1
x = data[int(float(item['id']))]
#print x
i = 1
while i < len(x):
xx = frame.ix[int(float(x[i]))]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
ii += 1
temp.append(int(float(x[i])))
i += 1
if ii % 1000 == 0:
# print(ii)
MY_GLOBAL.append((int)((ii + 1) / (200000 * 1.0) * 100))
# print(checkInt,ii,len(temp))
## print ii
# temp.append(xx)
#d = OrderedDict()
#d['type'] = 'FeatureCollection'
#d['features'] = li
#print li
# print(jstring)
MY_GLOBAL.append(100)
return jstring[:-1], 200
##################travesal network downstream############
def find_downstream(value, sourceid):
#print value,sourceid
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value)]
temp.append(a)
check = True
z = 0
while z < len(temp) and check:
item = temp[z]
z += 1
if(item['id_to'] == sourceid):
check = False
# break
## print item
# if(item['id']==sourceid):
# check=False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
# d=OrderedDict()
i += 1
ii += 1
temp.append(xx)
# if(item['id']==sourceid):
# check=False
# MY_GLOBAL.append(100)
# d = OrderedDict()
# d['type'] = 'FeatureCollection'
# d['features'] = li
# print li
# if (check==False):
return jstring[:-1], 200
##################travesal network downstream############
def find_downstream1(value):
#print value,sourceid
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value)]
temp.append(a)
check = True
z = 0
while z < len(temp) and check:
item = temp[z]
z += 1
## print item
# if(item['id']==sourceid):
# check=False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
# d=OrderedDict()
i += 1
ii += 1
temp.append(xx)
# if(item['id']==sourceid):
# check=False
# MY_GLOBAL.append(100)
# d = OrderedDict()
# d['type'] = 'FeatureCollection'
# d['features'] = li
# print li
# if (check==False):
return jstring[:-1], 200
#######################pp upstream#######################
def find_upstream_pp(cellid):
gc.collect()
# header=['celllist','cellid','cellto']
# header=['cellid','loc']
templi = frame_celllist[frame_celllist['cellid']
== cellid]['celllist'].tolist()
templist = templi[0][1:-1].split(",")
z = 0
jstring = ''
while z < len(templist):
curid = templist[z].strip()
# print(curid,templist)
curidloc = frame_cell[frame_cell['cellid'] == curid]['loc'].tolist()
curidloc1 = curidloc[0].split("_")
# print(curidloc1[0],curidloc1[1][:-1],curidloc[0])
z += 1
temp = frame_celllist[frame_celllist['cellid']
== curid]['cellto'].tolist()
print(temp)
temp = temp[0].split(",")
if len(temp) == 1 and temp[0][:-1] == "none":
# print(temp[0])
continue
else:
zz = 0
while zz < len(temp):
# print(temp[zz],temp)
x = temp[zz]
zz += 1
if zz == len(temp):
nextloc = frame_cell[frame_cell['cellid']
== x[:-1]]['loc'].tolist()
else:
nextloc = frame_cell[frame_cell['cellid']
== x]['loc'].tolist()
nextloc1 = nextloc[0].split("_")
# print(nextloc1[0],nextloc1[1][:-1],nextloc1)
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(curidloc1[0]) + ',' + str(curidloc1[1][:-1]) + '],[' + str(
nextloc1[0]) + ',' + str(nextloc1[1][:-1]) + ']]]},"properties": {"lat":' + str(curidloc1[1][:-1]) + ',"lon": ' + str(curidloc1[0]) + '}},'
# jstring+='{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[['+str(float(xx['lon']))+','+str(float(xx['lat']))+'],['+str(float(item['lon']))+','+str(float(item['lat']))+']]]},"properties": {"id_to": '+str(int(xx['id_to']))+',"id":'+str(int(xx['id']))+',"lat":'+str(float(xx['lat']))+',"lon": '+str(float(xx['lon']))+'}},';
return jstring[:-1], 200
#######################pp downstream#######################
def find_downstream_pp(cellid, dcellid):
gc.collect()
# header=['celllist','cellid','cellto']
# header=['cellid','loc']
print(cellid, dcellid)
templi = frame_celllist[frame_celllist['cellid']
== cellid]['celllist'].tolist()
templist = templi[0][1:-1].split(",")
z = len(templist) - 1
jstring = ''
while z > 0:
print(templist[z].strip())
curid = templist[z].strip()
if curid != str(dcellid):
z -= 1
else:
print(z)
break
while z > 0:
curid = templist[z].strip()
# print(curid,templist)
curidloc = frame_cell[frame_cell['cellid'] == curid]['loc'].tolist()
curidloc1 = curidloc[0].split("_")
# print(curidloc1[0],curidloc1[1][:-1],curidloc[0])
temp = frame_celllist[frame_celllist['cellid']
== templist[z].strip()]['cellto'].tolist()
z -= 1
print(temp)
temp = temp[0].split(",")
if len(temp) == 1 and temp[0][:-1] == "none":
# print(temp[0])
z -= 1
continue
else:
zz = 0
aaaa = 'false'
while zz < len(temp):
# print(temp[zz],temp)
x = temp[zz]
zz += 1
if zz == len(temp):
if x[:-1] == curid:
aaaa = 'true'
nextloc = frame_cell[frame_cell['cellid']
== x[:-1]]['loc'].tolist()
else:
if x == curid:
aaaa = 'true'
nextloc = frame_cell[frame_cell['cellid']
== x]['loc'].tolist()
if aaaa == 'true':
nextloc1 = nextloc[0].split("_")
# print(nextloc1[0],nextloc1[1][:-1],nextloc1)
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(curidloc1[0]) + ',' + str(curidloc1[1][:-1]) + '],[' + str(
nextloc1[0]) + ',' + str(nextloc1[1][:-1]) + ']]]},"properties": {"lat":' + str(curidloc1[1][:-1]) + ',"lon": ' + str(curidloc1[0]) + '}},'
# jstring+='{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[['+str(float(xx['lon']))+','+str(float(xx['lat']))+'],['+str(float(item['lon']))+','+str(float(item['lat']))+']]]},"properties": {"id_to": '+str(int(xx['id_to']))+',"id":'+str(int(xx['id']))+',"lat":'+str(float(xx['lat']))+',"lon": '+str(float(xx['lon']))+'}},';
print(jstring)
if len(jstring) > 0:
return jstring[:-1], 200
else:
return jstring, 200
@app.route("/", methods=['GET', 'POST'])
def index():
print(request)
return render_template('test1.html')
@app.route("/api/", methods=['GET', 'POST'])
def update():
print(request.method)
if request.method == "POST":
source = request.form["source"]
dist = request.form["dist"]
pic = request.form["pic"]
downfirst = request.form["downfirst"]
pp = request.form["pp"]
print(pp, source, dist, downfirst, pic)
if(pp == 'yes'):
upstream = request.form["upstream"]
if(upstream == 'yes'):
ucellid = request.form["ucellid"]
re, ii = find_upstream_pp(ucellid)
# print(re)
return json.dumps(re), ii
# if(upstream=='no'):
### ucellid = request.form["ucellid"]
# dcellid = request.form["dcellid"]
# re,ii=find_downstream_pp(ucellid,dcellid)
# print(re)
# if(pp=='no'):
source = request.form["source"]
dist = request.form["dist"]
pic = request.form["pic"]
downfirst = request.form["downfirst"]
#print dist
if(downfirst == 'no'):
if(source == 'yes'):
sourceid = request.form["sourceid"]
#print sourceid
import time
start = time. time()
re, ii = find_upstream(sourceid)
end = time. time()
#print ii,(end-start)
# print(re)
# print(MY_GLOBAL)
return json.dumps(re), ii
if(dist == 'yes'):
distid = request.form["distid"]
sourceid = request.form["sourceid"]
MY_GLOBAL[:] = []
#print distid,sourceid
re, ii = find_downstream(int(distid), int(sourceid))
print (re)
gc.collect()
MY_GLOBAL.append(100)
return json.dumps(re, sort_keys=False, indent=4), ii
if(downfirst == 'yes'):
if(dist == 'yes'):
distid = request.form["distid"]
sourceid = request.form["sourceid"]
MY_GLOBAL[:] = []
#print distid,sourceid
re, ii = find_downstream1(int(distid))
print (re)
gc.collect()
MY_GLOBAL.append(100)
return json.dumps(re, sort_keys=False, indent=4), ii
if(pic == 'yes'):
#print request.form
MY_GLOBAL[:] = []
start1 = request.form["dist_lat"]
start2 = request.form["dist_lon"]
goal1 = request.form["source_lat"]
goal2 = request.form["source_lon"]
fromdate = request.form["from"]
todate = request.form["to"]
import time
before = time.time()
output, str1, str2, str3 = LoadingNetwork.main(
[start1, start2], [goal1, goal2], fromdate, todate, rawdata)
#print str1,str2,str3
after = time.time()
print ("time,", after - before)
if(isinstance(output, str)):
return output, 201
else:
# gc.collect()
#print base64.b64encode(output.getvalue())
return base64.b64encode(
output.getvalue()) + "***" + str1 + "***" + str2 + "***" + str3, 200
class WebSocket(WebSocketHandler):
def on_message(self, message):
# self.write_message("Received: " + message)
# self.write_message("Received2: " + message)
# m=message.split("&")
print("Received message: " + m[0])
print("Received message: " + m[1])
print("Received message: " + m[2])
print("Received message: " + m[3])
print("Received message: " + m[4])
print("Received message: " + m[5])
print("Received message: " + m[6])
m=message[1:-1].split("&")
source = m[0].split("=")[1]
value = m[1].split("=")[1]
dist = m[2].split("=")[1]
value1 = m[3].split("=")[1]
pic = m[4].split("=")[1]
downfirst = m[5].split("=")[1]
pp = m[6].split("=")
print(pp, source, dist, downfirst, pic,value,value1)
###################################upstram##########################3
if(downfirst == 'no'):
if(source == 'yes'):
##################
gc.collect()
ii = 0
li = []
temp = []
a = frame.ix[int(value)]
temp.append(int(value))
i = 0
z = 0
zz = 0
jstring = ''
while z < len(temp):
item = frame.ix[temp[z]]
z += 1
x = data[int(float(item['id']))]
#print x
i = 1
while i < len(x):
xx = frame.ix[int(float(x[i]))]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
ii += 1
temp.append(int(float(x[i])))
i += 1
if(len(jstring)>1500000):
zz+=5
self.write_message( jstring[:-1])
self.write_message( '~'+str(zz*1.0/100))
jstring = ''
self.write_message( jstring[:-1])
self.write_message( '~1')
############################downstream#########################
if(dist == 'yes'):
########################################################################
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value1)]
temp.append(a)
check = True
z = 0
zz=0
while z < len(temp) and check:
item = temp[z]
z += 1
if(item['id_to'] == int(value)):
check = False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
i += 1
ii += 1
temp.append(xx)
if(len(jstring)>150000):
zz+=5
self.write_message( jstring[:-1])
self.write_message( '~'+str(zz*1.0/100))
jstring = ''
self.write_message( jstring[:-1])
self.write_message( '~1')
##########################downfirst##############################################
if(downfirst == 'yes'):
if(dist == 'yes'):
ii = 0
li = []
temp = []
jstring = ''
# MY_GLOBAL[:]=[]
a = frame.ix[int(value1)]
temp.append(a)
z = 0
zz=0
while z < len(temp) :
item = temp[z]
z += 1
# break
## print item
# if(item['id']==sourceid):
# check=False
x = frame.ix[frame['id'] == item['id_to']]
#print x
i = 0
while i < len(x):
# d = OrderedDict()
xx = x.ix[x.index[i]]
jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(
float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},'
# d['type'] = 'Feature'
# d['geometry'] = {
# 'type': 'MultiLineString',
# 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]
# }
# d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat'])
# }
# li.append(d)
# d=OrderedDict()
i += 1
ii += 1
temp.append(xx)
# if(item['id']==sourceid):
# check=False
# MY_GLOBAL.append(100)
# d = OrderedDict()
# d['type'] = 'FeatureCollection'
# d['features'] = li
# print li
# if (check==False):
if(len(jstring)>150000):
zz+=5
self.write_message( jstring[:-1])
self.write_message( '~'+str(zz*1.0/100))
jstring = ''
self.write_message( jstring[:-1])
self.write_message( '~1')
# if(downfirst == 'yes'):
if(pic == 'yes'):
#print request.form
#"&dist_lat="+dist_lat+"&dist_lon="+dist_lon+"&source_lat="+source_lat+"&source_lon="+source_lon+"&from="+value3.value+"&to="+value4.value);
#m[6].split("=")
# start1 = request.form["dist_lat"]
# start2 = request.form["dist_lon"]
# goal1 = request.form["source_lat"]
# goal2 = request.form["source_lon"]
# fromdate = request.form["from"]
# todate = request.form["to"]
start1 = m[7].split("=")[1]
start2 = m[8].split("=")[1]
goal1 =m[9].split("=")[1]
goal2 = m[10].split("=")[1]
fromdate = m[11].split("=")[1]
todate = m[12].split("=")[1]
print(start1,start2,goal1,goal2,fromdate,todate)
import time
before = time.time()
output, str1, str2, str3 = LoadingNetwork.main(
[start1, start2], [goal1, goal2], fromdate, todate, rawdata)
#print str1,str2,str3
# print(output)
after = time.time()
print ("time,", after - before)
# if(isinstance(output, str)):
# return output, 201
# else:
# gc.collect()
#print base64.b64encode(output.getvalue())
# return base64.b64encode(
# output.getvalue()) + "***" + str1 + "***" + str2 + "***" + str3, 200
#
if __name__ == "__main__":
container = WSGIContainer(app)
server = Application([
(r'/websocket/', WebSocket),
(r'/we/', EchoWebSocket),
(r'.*', FallbackHandler, dict(fallback=container))
])
server.listen(5000)
IOLoop.instance().start()
# test()
| 16.913543 | 360 | 0.43378 | 8,450 | 0.302055 | 0 | 0 | 3,544 | 0.126685 | 0 | 0 | 10,870 | 0.388561 |
b2df22a6002e061c7249b53a9c05a87dc4e272cf | 697 | py | Python | src/oca/models/__init__.py | goubertbrent/oca-backend | b9f59cc02568aecb55d4b54aec05245790ea25fd | [
"Apache-2.0"
] | null | null | null | src/oca/models/__init__.py | goubertbrent/oca-backend | b9f59cc02568aecb55d4b54aec05245790ea25fd | [
"Apache-2.0"
] | null | null | null | src/oca/models/__init__.py | goubertbrent/oca-backend | b9f59cc02568aecb55d4b54aec05245790ea25fd | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
# flake8: noqa
"""
Our City App
Our City App internal apis # noqa: E501
The version of the OpenAPI document: 0.0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
# import models into model package
from oca.models.home_screen import HomeScreen
from oca.models.home_screen_bottom_navigation import HomeScreenBottomNavigation
from oca.models.home_screen_bottom_sheet import HomeScreenBottomSheet
from oca.models.home_screen_bottom_sheet_header import HomeScreenBottomSheetHeader
from oca.models.home_screen_content import HomeScreenContent
from oca.models.home_screen_navigation_button import HomeScreenNavigationButton
| 30.304348 | 82 | 0.826399 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 230 | 0.329986 |
b2df334cefd6c85d1832ef9e9b56545aefa460f8 | 428 | py | Python | calibrate.py | jamesbowman/k40-python | 02054fe7f0bf727910c58d634aae7ba4799a2e2c | [
"BSD-3-Clause"
] | 2 | 2018-07-11T00:36:34.000Z | 2018-09-03T06:58:29.000Z | calibrate.py | jamesbowman/k40-python | 02054fe7f0bf727910c58d634aae7ba4799a2e2c | [
"BSD-3-Clause"
] | null | null | null | calibrate.py | jamesbowman/k40-python | 02054fe7f0bf727910c58d634aae7ba4799a2e2c | [
"BSD-3-Clause"
] | null | null | null | import svgwrite
def cross(dwg, x, y, r = 5):
dwg.add(dwg.line((x - r, y), (x + r, y), stroke='red', stroke_width=.1))
dwg.add(dwg.line((x, y - r), (x, y + r), stroke='red', stroke_width=.1))
if __name__ == '__main__':
dwg = svgwrite.Drawing('test.svg', size=('150mm', '150mm'), viewBox=('0 0 150 150'))
cross(dwg, 5, 5)
cross(dwg, 145, 5)
cross(dwg, 145, 145)
cross(dwg, 5, 145)
dwg.save()
| 23.777778 | 88 | 0.556075 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 57 | 0.133178 |
b2df89e53c0696d724b6e84ea61023d0f33bec67 | 597 | py | Python | Pokemon Identifier/app.py | sethuiyer/mlhub | 6be271c0070a0c0bb90dd92aceb344e7415bb1db | [
"MIT"
] | 22 | 2016-12-28T16:14:18.000Z | 2019-09-22T16:39:29.000Z | Pokemon Identifier/app.py | sethuiyer/mlhub | 6be271c0070a0c0bb90dd92aceb344e7415bb1db | [
"MIT"
] | 6 | 2020-03-24T17:48:55.000Z | 2022-03-12T00:04:58.000Z | Pokemon Identifier/app.py | sethuiyer/mlhub | 6be271c0070a0c0bb90dd92aceb344e7415bb1db | [
"MIT"
] | 17 | 2017-01-17T09:45:14.000Z | 2020-04-21T07:19:39.000Z | from poketype import PokemonTypeIdentifier
from flask import Flask, request, make_response,jsonify
import os
id = PokemonTypeIdentifier()
app = Flask(__name__,static_url_path='/static')
@app.route('/findtype',methods=['GET'])
def classify():
poke_name=request.args.get('pokename')
results = id.predict_type(poke_name)
return jsonify({'results':results})
@app.route('/',methods=['GET'])
def root():
return app.send_static_file('index.html')
if __name__ == '__main__':
port = int(os.environ.get('PORT', 8001))
app.run(debug=True,host='0.0.0.0',port=port,use_reloader=False)
| 33.166667 | 67 | 0.721943 | 0 | 0 | 0 | 0 | 268 | 0.448911 | 0 | 0 | 89 | 0.149079 |
b2e0de9c060c5c18b08f58d054b60642948a19ab | 13,975 | py | Python | SViTE/backup/sparselearning/snip.py | VITA-Group/SViTE | b0c62fd153c8b0b99917ab935ee76925c9de1149 | [
"MIT"
] | 50 | 2021-05-29T00:52:45.000Z | 2022-03-17T11:39:47.000Z | SViTE/backup/sparselearning/snip.py | VITA-Group/SViTE | b0c62fd153c8b0b99917ab935ee76925c9de1149 | [
"MIT"
] | 2 | 2022-01-16T07:24:52.000Z | 2022-03-29T01:56:24.000Z | SViTE/backup/sparselearning/snip.py | VITA-Group/SViTE | b0c62fd153c8b0b99917ab935ee76925c9de1149 | [
"MIT"
] | 6 | 2021-06-27T22:24:16.000Z | 2022-01-17T02:45:32.000Z | import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import math
import copy
import types
def fgsm_attack(image, epsilon, data_grad):
print('Attacking...')
# Collect the element-wise sign of the data gradient
sign_data_grad = data_grad.sign()
# Create the perturbed image by adjusting each pixel of the input image
perturbed_image = image + epsilon*sign_data_grad
# Adding clipping to maintain [0,1] range
perturbed_image = torch.clamp(perturbed_image, 0, 1)
# Return the perturbed image
return perturbed_image
def snip_forward_conv2d(self, x):
return F.conv2d(x, self.weight * self.weight_mask, self.bias,
self.stride, self.padding, self.dilation, self.groups)
def snip_forward_linear(self, x):
return F.linear(x, self.weight * self.weight_mask, self.bias)
def SNIP(net, keep_ratio, train_dataloader, device):
# TODO: shuffle?
# Grab a single batch from the training dataset
inputs, targets = next(iter(train_dataloader))
inputs = inputs.to(device)
targets = targets.to(device)
inputs.requires_grad = True
# Let's create a fresh copy of the network so that we're not worried about
# affecting the actual training-phase
net = copy.deepcopy(net)
# Monkey-patch the Linear and Conv2d layer to learn the multiplicative mask
# instead of the weights
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
layer.weight_mask = nn.Parameter(torch.ones_like(layer.weight))
nn.init.xavier_normal_(layer.weight)
layer.weight.requires_grad = False
# Override the forward methods:
if isinstance(layer, nn.Conv2d):
layer.forward = types.MethodType(snip_forward_conv2d, layer)
if isinstance(layer, nn.Linear):
layer.forward = types.MethodType(snip_forward_linear, layer)
# Compute gradients (but don't apply them)
net.zero_grad()
outputs = net.forward(inputs)
loss = F.nll_loss(outputs, targets)
loss.backward()
grads_abs = []
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
grads_abs.append(torch.abs(layer.weight_mask.grad))
# Gather all scores in a single vector and normalise
all_scores = torch.cat([torch.flatten(x) for x in grads_abs])
norm_factor = torch.sum(all_scores)
all_scores.div_(norm_factor)
num_params_to_keep = int(len(all_scores) * keep_ratio)
threshold, _ = torch.topk(all_scores, num_params_to_keep, sorted=True)
acceptable_score = threshold[-1]
keep_masks = []
for g in grads_abs:
keep_masks.append(((g / norm_factor) >= acceptable_score).float())
print(torch.sum(torch.cat([torch.flatten(x == 1) for x in keep_masks])))
return keep_masks
def SNIP_training(net, keep_ratio, train_dataloader, device, masks, death_rate):
# TODO: shuffle?
# Grab a single batch from the training dataset
inputs, targets = next(iter(train_dataloader))
inputs = inputs.to(device)
targets = targets.to(device)
print('Pruning rate:', death_rate)
# Let's create a fresh copy of the network so that we're not worried about
# affecting the actual training-phase
net = copy.deepcopy(net)
# Monkey-patch the Linear and Conv2d layer to learn the multiplicative mask
# instead of the weights
# for layer in net.modules():
# if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
# layer.weight_mask = nn.Parameter(torch.ones_like(layer.weight))
# # nn.init.xavier_normal_(layer.weight)
# # layer.weight.requires_grad = False
#
# # Override the forward methods:
# if isinstance(layer, nn.Conv2d):
# layer.forward = types.MethodType(snip_forward_conv2d, layer)
#
# if isinstance(layer, nn.Linear):
# layer.forward = types.MethodType(snip_forward_linear, layer)
# Compute gradients (but don't apply them)
net.zero_grad()
outputs = net.forward(inputs)
loss = F.nll_loss(outputs, targets)
loss.backward()
grads_abs = []
masks_copy = []
new_masks = []
for name in masks:
masks_copy.append(masks[name])
index = 0
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
# clone mask
mask = masks_copy[index].clone()
num_nonzero = (masks_copy[index] != 0).sum().item()
num_zero = (masks_copy[index] == 0).sum().item()
# calculate score
scores = torch.abs(layer.weight.grad * layer.weight * masks_copy[index]) # weight * grad
norm_factor = torch.sum(scores)
scores.div_(norm_factor)
x, idx = torch.sort(scores.data.view(-1))
num_remove = math.ceil(death_rate * num_nonzero)
k = math.ceil(num_zero + num_remove)
if num_remove == 0.0: return masks_copy[index] != 0.0
mask.data.view(-1)[idx[:k]] = 0.0
new_masks.append(mask)
index += 1
return new_masks
def GraSP_fetch_data(dataloader, num_classes, samples_per_class):
datas = [[] for _ in range(num_classes)]
labels = [[] for _ in range(num_classes)]
mark = dict()
dataloader_iter = iter(dataloader)
while True:
inputs, targets = next(dataloader_iter)
for idx in range(inputs.shape[0]):
x, y = inputs[idx:idx+1], targets[idx:idx+1]
category = y.item()
if len(datas[category]) == samples_per_class:
mark[category] = True
continue
datas[category].append(x)
labels[category].append(y)
if len(mark) == num_classes:
break
X, y = torch.cat([torch.cat(_, 0) for _ in datas]), torch.cat([torch.cat(_) for _ in labels]).view(-1)
return X, y
def count_total_parameters(net):
total = 0
for m in net.modules():
if isinstance(m, (nn.Linear, nn.Conv2d)):
total += m.weight.numel()
return total
def count_fc_parameters(net):
total = 0
for m in net.modules():
if isinstance(m, (nn.Linear)):
total += m.weight.numel()
return total
def GraSP(net, ratio, train_dataloader, device, num_classes=10, samples_per_class=25, num_iters=1, T=200, reinit=True):
eps = 1e-10
keep_ratio = ratio
old_net = net
net = copy.deepcopy(net) # .eval()
net.zero_grad()
weights = []
total_parameters = count_total_parameters(net)
fc_parameters = count_fc_parameters(net)
# rescale_weights(net)
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
if isinstance(layer, nn.Linear) and reinit:
nn.init.xavier_normal(layer.weight)
weights.append(layer.weight)
inputs_one = []
targets_one = []
grad_w = None
for w in weights:
w.requires_grad_(True)
print_once = False
for it in range(num_iters):
print("(1): Iterations %d/%d." % (it, num_iters))
inputs, targets = GraSP_fetch_data(train_dataloader, num_classes, samples_per_class)
N = inputs.shape[0]
din = copy.deepcopy(inputs)
dtarget = copy.deepcopy(targets)
inputs_one.append(din[:N//2])
targets_one.append(dtarget[:N//2])
inputs_one.append(din[N // 2:])
targets_one.append(dtarget[N // 2:])
inputs = inputs.to(device)
targets = targets.to(device)
outputs = net.forward(inputs[:N//2])/T
if print_once:
# import pdb; pdb.set_trace()
x = F.softmax(outputs)
print(x)
print(x.max(), x.min())
print_once = False
loss = F.cross_entropy(outputs, targets[:N//2])
# ===== debug ================
grad_w_p = autograd.grad(loss, weights)
if grad_w is None:
grad_w = list(grad_w_p)
else:
for idx in range(len(grad_w)):
grad_w[idx] += grad_w_p[idx]
outputs = net.forward(inputs[N // 2:])/T
loss = F.cross_entropy(outputs, targets[N // 2:])
grad_w_p = autograd.grad(loss, weights, create_graph=False)
if grad_w is None:
grad_w = list(grad_w_p)
else:
for idx in range(len(grad_w)):
grad_w[idx] += grad_w_p[idx]
ret_inputs = []
ret_targets = []
for it in range(len(inputs_one)):
print("(2): Iterations %d/%d." % (it, num_iters))
inputs = inputs_one.pop(0).to(device)
targets = targets_one.pop(0).to(device)
ret_inputs.append(inputs)
ret_targets.append(targets)
outputs = net.forward(inputs)/T
loss = F.cross_entropy(outputs, targets)
# ===== debug ==============
grad_f = autograd.grad(loss, weights, create_graph=True)
z = 0
count = 0
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
z += (grad_w[count].data * grad_f[count]).sum()
count += 1
z.backward()
grads = dict()
old_modules = list(old_net.modules())
for idx, layer in enumerate(net.modules()):
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
grads[old_modules[idx]] = -layer.weight.data * layer.weight.grad # -theta_q Hg
# Gather all scores in a single vector and normalise
all_scores = torch.cat([torch.flatten(x) for x in grads.values()])
norm_factor = torch.abs(torch.sum(all_scores)) + eps
print("** norm factor:", norm_factor)
all_scores.div_(norm_factor)
num_params_to_rm = int(len(all_scores) * (1-keep_ratio))
threshold, _ = torch.topk(all_scores, num_params_to_rm, sorted=True)
# import pdb; pdb.set_trace()
acceptable_score = threshold[-1]
print('** accept: ', acceptable_score)
keep_masks = []
for m, g in grads.items():
keep_masks.append(((g / norm_factor) <= acceptable_score).float())
# print(torch.sum(torch.cat([torch.flatten(x == 1) for x in keep_masks.values()])))
return keep_masks
def GraSP_Training(net, ratio, train_dataloader, device, num_classes=10, samples_per_class=25, num_iters=1, T=200, reinit=False):
eps = 1e-10
death_rate = ratio
net = copy.deepcopy(net) # .eval()
net.zero_grad()
weights = []
# rescale_weights(net)
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
if isinstance(layer, nn.Linear) and reinit:
nn.init.xavier_normal(layer.weight)
weights.append(layer.weight)
inputs_one = []
targets_one = []
grad_w = None
for w in weights:
w.requires_grad_(True)
print_once = False
for it in range(num_iters):
print("(1): Iterations %d/%d." % (it, num_iters))
inputs, targets = GraSP_fetch_data(train_dataloader, num_classes, samples_per_class)
N = inputs.shape[0]
din = copy.deepcopy(inputs)
dtarget = copy.deepcopy(targets)
inputs_one.append(din[:N//2])
targets_one.append(dtarget[:N//2])
inputs_one.append(din[N // 2:])
targets_one.append(dtarget[N // 2:])
inputs = inputs.to(device)
targets = targets.to(device)
outputs = net.forward(inputs[:N//2])/T
if print_once:
# import pdb; pdb.set_trace()
x = F.softmax(outputs)
print(x)
print(x.max(), x.min())
print_once = False
loss = F.cross_entropy(outputs, targets[:N//2])
# ===== debug ================
grad_w_p = autograd.grad(loss, weights)
if grad_w is None:
grad_w = list(grad_w_p)
else:
for idx in range(len(grad_w)):
grad_w[idx] += grad_w_p[idx]
outputs = net.forward(inputs[N // 2:])/T
loss = F.cross_entropy(outputs, targets[N // 2:])
grad_w_p = autograd.grad(loss, weights, create_graph=False)
if grad_w is None:
grad_w = list(grad_w_p)
else:
for idx in range(len(grad_w)):
grad_w[idx] += grad_w_p[idx]
ret_inputs = []
ret_targets = []
for it in range(len(inputs_one)):
print("(2): Iterations %d/%d." % (it, num_iters))
inputs = inputs_one.pop(0).to(device)
targets = targets_one.pop(0).to(device)
ret_inputs.append(inputs)
ret_targets.append(targets)
outputs = net.forward(inputs)/T
loss = F.cross_entropy(outputs, targets)
# ===== debug ==============
grad_f = autograd.grad(loss, weights, create_graph=True)
z = 0
count = 0
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
z += (grad_w[count].data * grad_f[count]).sum()
count += 1
z.backward()
keep_masks = []
for idx, layer in enumerate(net.modules()):
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
grad = -layer.weight.data * layer.weight.grad
# scores = torch.flatten(grad) # -theta_q Hg
mask = grad != 0
num_nonzero = (grad != 0).sum().item()
num_positive = (grad > 0).sum().item()
num_zero = (grad == 0).sum().item()
num_remove = math.ceil(num_nonzero*death_rate)
if num_remove > num_positive:
k = num_remove + num_zero
else:
k = num_remove
threshold, idx = torch.topk(grad.data.view(-1), k, sorted=True)
mask.data.view(-1)[idx[:k]] = 0.0
keep_masks.append(mask)
return keep_masks | 34.506173 | 129 | 0.606082 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,083 | 0.149052 |
b2e1c343a37dcd72cd8cbdfb5d12721802108339 | 296 | py | Python | digin/leads/views.py | yhung119/cs411-digin | 55e884ff0c26ca06056219e6cb641dc1ceae1f56 | [
"Apache-2.0"
] | null | null | null | digin/leads/views.py | yhung119/cs411-digin | 55e884ff0c26ca06056219e6cb641dc1ceae1f56 | [
"Apache-2.0"
] | null | null | null | digin/leads/views.py | yhung119/cs411-digin | 55e884ff0c26ca06056219e6cb641dc1ceae1f56 | [
"Apache-2.0"
] | null | null | null | from django.shortcuts import render
from leads.models import Lead
from leads.serializers import LeadSerializer
from rest_framework import generics
# Create your views here.
class LeadListCreate(generics.ListCreateAPIView):
queryset = Lead.objects.all()
serializer_class = LeadSerializer
| 29.6 | 49 | 0.820946 | 121 | 0.408784 | 0 | 0 | 0 | 0 | 0 | 0 | 25 | 0.084459 |
b2e290a686a1065fd69b5d2e7d362f288caf266f | 117 | py | Python | Module01/OOP/FirstClassDef.py | fenglihanxiao/Python | 872baf3a3a5ee42740161152605ca2b1ddf4cd30 | [
"MIT"
] | null | null | null | Module01/OOP/FirstClassDef.py | fenglihanxiao/Python | 872baf3a3a5ee42740161152605ca2b1ddf4cd30 | [
"MIT"
] | null | null | null | Module01/OOP/FirstClassDef.py | fenglihanxiao/Python | 872baf3a3a5ee42740161152605ca2b1ddf4cd30 | [
"MIT"
] | null | null | null | """ First class definition"""
class Cat:
pass
class RaceCar:
pass
cat1 = Cat()
cat2 = Cat()
cat3 = Cat() | 9 | 29 | 0.589744 | 42 | 0.358974 | 0 | 0 | 0 | 0 | 0 | 0 | 29 | 0.247863 |
b2e37635f83fbc719c3828b77b744bc8d962608e | 870 | py | Python | ddtrace/contrib/aiobotocore/__init__.py | tophatmonocle/dd-trace-py | 7db12f1c398c07cd5baf91c571aed672dbb6496d | [
"BSD-3-Clause"
] | null | null | null | ddtrace/contrib/aiobotocore/__init__.py | tophatmonocle/dd-trace-py | 7db12f1c398c07cd5baf91c571aed672dbb6496d | [
"BSD-3-Clause"
] | null | null | null | ddtrace/contrib/aiobotocore/__init__.py | tophatmonocle/dd-trace-py | 7db12f1c398c07cd5baf91c571aed672dbb6496d | [
"BSD-3-Clause"
] | null | null | null | """
The aiobotocore integration will trace all AWS calls made with the ``aiobotocore``
library. This integration isn't enabled when applying the default patching.
To enable it, you must run ``patch_all(botocore=True)``
::
import aiobotocore.session
from ddtrace import patch
# If not patched yet, you can patch botocore specifically
patch(aiobotocore=True)
# This will report spans with the default instrumentation
aiobotocore.session.get_session()
lambda_client = session.create_client('lambda', region_name='us-east-1')
# This query generates a trace
lambda_client.list_functions()
"""
from ...utils.importlib import require_modules
required_modules = ['aiobotocore.client']
with require_modules(required_modules) as missing_modules:
if not missing_modules:
from .patch import patch
__all__ = ['patch']
| 28.064516 | 82 | 0.74023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 655 | 0.752874 |
b2e5f383923565aa615eda28c9950c8812f8c749 | 3,161 | py | Python | luna/__init__.py | ktemkin/luna | 661dc89f7f60ba8a51165f7f8037ad2d5854cf34 | [
"BSD-3-Clause"
] | 4 | 2020-02-11T18:40:02.000Z | 2020-04-03T13:07:38.000Z | luna/__init__.py | ktemkin/luna | 661dc89f7f60ba8a51165f7f8037ad2d5854cf34 | [
"BSD-3-Clause"
] | null | null | null | luna/__init__.py | ktemkin/luna | 661dc89f7f60ba8a51165f7f8037ad2d5854cf34 | [
"BSD-3-Clause"
] | null | null | null | #
# This file is part of LUNA.
#
import shutil
import tempfile
import argparse
from nmigen import Elaboratable
from .gateware.platform import get_appropriate_platform
def top_level_cli(fragment, *pos_args, **kwargs):
""" Runs a default CLI that assists in building and running gateware.
If the user's options resulted in the board being programmed, this returns the fragment
that was programmed onto the board. Otherwise, it returns None.
"""
parser = argparse.ArgumentParser(description="Gateware generation/upload script for '{}' gateware.".format(fragment.__class__.__name__))
parser.add_argument('--output', '-o', metavar='filename', help="Build and output a bitstream to the given file.")
parser.add_argument('--erase', '-E', action='store_true',
help="Clears the relevant FPGA's flash before performing other options.")
parser.add_argument('--upload', '-U', action='store_true',
help="Uploads the relevant design to the target hardware. Default if no options are provided.")
parser.add_argument('--flash', '-F', action='store_true',
help="Flashes the relevant design to the target hardware's configuration flash.")
parser.add_argument('--dry-run', '-D', action='store_true',
help="When provided as the only option; builds the relevant bitstream without uploading or flashing it.")
parser.add_argument('--keep-files', action='store_true',
help="Keeps the local files in the default `build` folder.")
args = parser.parse_args()
platform = get_appropriate_platform()
# If this isn't a fragment directly, interpret it as an object that will build one.
if callable(fragment):
fragment = fragment(*pos_args, **kwargs)
# If we have no other options set, build and upload the relevant file.
if (args.output is None and not args.flash and not args.erase and not args.dry_run):
args.upload = True
# Once the device is flashed, it will self-reconfigure, so we
# don't need an explicitly upload step; and it implicitly erases
# the flash, so we don't need an erase step.
if args.flash:
args.erase = False
args.upload = False
# Build the relevant gateware, uploading if requested.
build_dir = "build" if args.keep_files else tempfile.mkdtemp()
# Build the relevant files.
try:
if args.erase:
platform.toolchain_erase()
products = platform.build(fragment,
do_program=args.upload,
build_dir=build_dir
)
# If we're flashing the FPGA's flash, do so.
if args.flash:
platform.toolchain_flash(products)
# If we're outputting a file, write it.
if args.output:
bitstream = products.get("top.bit")
with open(args.output, "wb") as f:
f.write(bitstream)
# Return the fragment we're working with, for convenience.
if args.upload or args.flash:
return fragment
# Clean up any directories we've created.
finally:
if not args.keep_files:
shutil.rmtree(build_dir)
return None
| 37.188235 | 140 | 0.669092 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,521 | 0.481177 |
b2e71e54f3ede13551ca6c960041e280c9f907b3 | 761 | py | Python | htdocs/geojson/hsearch.py | akrherz/depbackend | d43053319227a3aaaf7553c823e8e2e748fbe95d | [
"Apache-2.0"
] | null | null | null | htdocs/geojson/hsearch.py | akrherz/depbackend | d43053319227a3aaaf7553c823e8e2e748fbe95d | [
"Apache-2.0"
] | 1 | 2022-02-17T17:43:52.000Z | 2022-02-17T17:43:52.000Z | htdocs/geojson/hsearch.py | akrherz/depbackend | d43053319227a3aaaf7553c823e8e2e748fbe95d | [
"Apache-2.0"
] | 2 | 2021-11-28T11:41:32.000Z | 2022-01-26T17:12:03.000Z | """search for HUC12 by name."""
import json
from paste.request import parse_formvars
from pyiem.util import get_dbconn
def search(q):
"""Search for q"""
pgconn = get_dbconn("idep")
cursor = pgconn.cursor()
d = dict(results=[])
cursor.execute(
"""SELECT huc_12, hu_12_name from huc12
WHERE hu_12_name ~* %s and scenario = 0 LIMIT 10""",
(q,),
)
for row in cursor:
d["results"].append(dict(huc_12=row[0], name=row[1]))
return d
def application(environ, start_response):
"""DO Something"""
form = parse_formvars(environ)
q = form.get("q", "")
headers = [("Content-type", "application/json")]
start_response("200 OK", headers)
return [json.dumps(search(q)).encode("ascii")]
| 23.78125 | 61 | 0.622865 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 229 | 0.30092 |
b2e84727d200add756532d87eca711fb92b61dde | 1,570 | py | Python | setup.py | Peque/mmsim | b3a78ad0119db6ee8df349a89559ea8006c85db1 | [
"BSD-3-Clause"
] | null | null | null | setup.py | Peque/mmsim | b3a78ad0119db6ee8df349a89559ea8006c85db1 | [
"BSD-3-Clause"
] | null | null | null | setup.py | Peque/mmsim | b3a78ad0119db6ee8df349a89559ea8006c85db1 | [
"BSD-3-Clause"
] | null | null | null | """
Setup module.
"""
from setuptools import setup
from mmsim import __version__
setup(
name='mmsim',
version=__version__,
description='A simple Micromouse Maze Simulator server',
long_description="""The server can load different mazes and any client
can connect to it to ask for the current position walls, move from
one cell to another and visualize the simulated micromouse state.""",
url='https://github.com/Theseus/mmsim',
author='Miguel Sánchez de León Peque',
author_email='peque@neosit.es',
license='BSD License',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
],
keywords='micromouse maze server simulator',
entry_points={
'console_scripts': [
'mmsim = mmsim.commands:launch',
],
},
packages=['mmsim'],
install_requires=[
'click',
'numpy',
'pyqtgraph',
'pyqt5',
'pyzmq'],
extras_require={
'docs': [
'doc8',
'sphinx',
'sphinx_rtd_theme',
],
'lint': [
'flake8',
'flake8-bugbear',
'flake8-per-file-ignores',
'flake8-quotes',
'pep8-naming',
],
'test': [
'pytest',
'pytest-cov',
],
},
)
| 26.610169 | 77 | 0.54586 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 877 | 0.557888 |
b2e911b19926607cd6e241f7b09f34ddcf0231cd | 2,556 | py | Python | fastinference.py | wkcw/VariousDiscriminator-CycleGan | de9c033aeed1c429f37c531056c1f74cb51a771c | [
"MIT"
] | null | null | null | fastinference.py | wkcw/VariousDiscriminator-CycleGan | de9c033aeed1c429f37c531056c1f74cb51a771c | [
"MIT"
] | null | null | null | fastinference.py | wkcw/VariousDiscriminator-CycleGan | de9c033aeed1c429f37c531056c1f74cb51a771c | [
"MIT"
] | null | null | null | """
A fast version of the original inference.
Constructing one graph to infer all the samples.
Originaly one graph for each sample.
"""
import tensorflow as tf
import os
from model import CycleGAN
import utils
import scipy.misc
import numpy as np
try:
from os import scandir
except ImportError:
# Python 2 polyfill module
from scandir import scandir
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string('model', '', 'model path (.pb)')
tf.flags.DEFINE_string('input', 'data/apple', 'input image path')
tf.flags.DEFINE_string('output', 'samples/apple', 'output image path')
tf.flags.DEFINE_integer('image_size', 128, 'image size, default: 128')
def data_reader(input_dir):
file_paths = []
for img_file in scandir(input_dir):
if img_file.name.endswith('.jpg') and img_file.is_file():
file_paths.append(img_file.path)
return file_paths
def inference():
graph = tf.Graph()
with graph.as_default():
with tf.gfile.FastGFile(FLAGS.model, 'rb') as model_file:
graph_def = tf.GraphDef()
graph_def.ParseFromString(model_file.read())
input_image = tf.placeholder(tf.float32,shape=[FLAGS.image_size, FLAGS.image_size, 3])
[output_image] = tf.import_graph_def(graph_def,
input_map={'input_image': input_image},
return_elements=['output_image:0'],
name='output')
#print type(output_image), output_image
file_list = data_reader(FLAGS.input)
whole = len(file_list)
cnt = 0
with tf.Session(graph=graph) as sess:
for file in file_list:
tmp_image = scipy.misc.imread(file)
tmp_image = scipy.misc.imresize(tmp_image, (FLAGS.image_size, FLAGS.image_size, 3))
processed_image = tmp_image / 127.5 - 1
processed_image = np.asarray(processed_image, dtype=np.float32)
predicted_image = sess.run(output_image, feed_dict={input_image: processed_image})
predicted_image = np.squeeze(predicted_image)
#print tmp_image.shape, predicted_image.shape
save_image = np.concatenate((tmp_image, predicted_image), axis=1)
print cnt
output_file_name = file.split('/')[-1]
try:
os.makedirs(FLAGS.output)
except os.error, e:
pass
scipy.misc.imsave(FLAGS.output + '/{}'.format(output_file_name), save_image)
cnt += 1
if cnt//whole > 0.05:
print cnt//whole, 'done'
def main(unused_argv):
inference()
if __name__ == '__main__':
tf.app.run()
| 31.555556 | 91 | 0.658842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 460 | 0.179969 |
b2eaad3cf6ba61735dc17fc8cd249e0de4bd056b | 8,879 | py | Python | examples/json_serializer.py | trisongz/lazycls | 701bad1a358ed3bb136347d0c5eb81de3201f6a3 | [
"MIT"
] | 2 | 2021-12-02T00:13:16.000Z | 2022-02-26T11:18:33.000Z | examples/json_serializer.py | trisongz/lazycls | 701bad1a358ed3bb136347d0c5eb81de3201f6a3 | [
"MIT"
] | null | null | null | examples/json_serializer.py | trisongz/lazycls | 701bad1a358ed3bb136347d0c5eb81de3201f6a3 | [
"MIT"
] | null | null | null | """
This test will show how to use the various Json serializer(s)
"""
from lazy.io.pathz import get_path
from lazy.serialize import SimdJson, OrJson, Json, Serializer
from lazy.utils import get_logger, timer
logger = get_logger('lazytest')
test_file = './files/naics_codes.json'
test_path = get_path(test_file, resolve=True)
test_keys = [
'112930', '11299', '112990', '1131', '11311', '113110', '1132', '11321', '113210', '1133', '11331', '113310', '1141', '11411', '114111', '114112', '114119', '1142', '11421', '114210', '1151', '11511', '115111', '115112', '115113', '115114', '115115', '115116', '1152', '11521', '115210', '1153', '11531', '115310', '2111', '21112', '211120', '21113', '211130', '2121', '21211', '212111', '212112', '212113', '2122', '21221', '212210', '21222', '212221', '212222', '21223', '212230', '21229', '212291', '212299', '2123', '21231', '212311', '212312', '212313', '212319', '21232', '212321', '212322', '212324', '212325', '21239', '212391', '212392', '212393', '212399', '2131', '21311', '213111', '213112', '213113', '213114', '213115', '2211', '22111', '221111', '221112', '221113', '221114', '221115', '221116', '221117', '221118', '22112', '221121', '221122', '2212', '22121', '221210', '2213', '22131', '221310', '22132', '221320', '22133', '221330', '2361', '23611', '236115', '236116', '236117', '236118', '2362', '23621', '236210', '23622', '236220', '2371', '23711', '237110', '23712', '237120', '23713', '237130', '2372', '23721', '237210', '2373', '23731', '237310', '2379', '23799', '237990', '2381', '23811', '238110',
'32412', '324121', '324122', '32419', '324191', '324199', '3251', '32511', '325110', '32512', '325120', '32513', '325130', '32518', '325180', '32519', '325193', '325194', '325199', '3252', '32521', '325211', '325212', '32522', '325220', '3253', '32531', '325311', '325312', '325314', '32532', '325320', '3254', '32541', '325411', '325412', '325413', '325414', '3255', '32551', '325510', '32552', '325520', '3256', '32561', '325611', '325612', '325613', '32562', '325620', '3259', '32591', '325910', '32592', '325920', '32599', '325991', '325992', '325998', '3261', '32611', '326111', '326112', '326113', '32612', '326121', '326122', '32613', '326130', '32614', '326140', '32615', '326150', '32616', '326160', '32619', '326191', '326199', '3262', '32621', '326211', '326212', '32622', '326220', '32629', '326291', '326299', '3271', '32711', '327110', '32712', '327120', '3272', '32721', '327211', '327212', '327213', '327215', '3273', '32731', '327310', '32732', '327320', '32733', '327331', '327332', '32739', '327390', '3274', '32741', '327410', '32742', '327420', '3279', '32791', '327910', '32799', '327991', '327992', '327993', '327999', '3311', '33111', '331110', '3312', '33121', '331210', '33122', '3312',
'333413', '333414', '333415', '3335', '33351', '333511', '333514', '333515', '333517', '333519', '3336', '33361', '333611', '333612', '333613', '333618', '3339', '33391', '333912', '333914', '33392', '333921', '333922', '333923', '333924', '33399', '333991', '333992', '333993', '333994', '333995', '333996', '333997', '333999', '3341', '33411', '334111', '334112', '334118', '3342', '33421', '334210', '33422', '334220', '33429', '334290', '3343', '33431', '334310', '3344', '33441', '334412', '334413', '334416', '334417', '334418', '334419', '3345', '33451', '334510', '334511', '334512', '334513', '334514', '334515', '334516', '334517', '334519', '3346', '33461', '334613', '334614', '3351', '33511', '335110', '33512', '335121', '335122', '335129', '3352', '33521', '335210', '33522', '335220', '3353', '33531', '335311', '335312', '335313', '335314', '3359', '33591', '335911', '335912', '33592', '335921', '335929', '33593', '335931', '335932', '33599', '335991', '335999', '3361', '33611', '336111',
'519190', '5211', '52111', '521110', '5221', '52211', '522110', '52212', '522120', '52213', '522130', '52219', '522190', '5222', '52221', '522210', '52222', '522220', '52229', '522291', '522292', '522293', '522294', '522298', '5223', '52231', '522310', '52232', '522320', '52239', '522390', '5231', '52311', '523110', '52312', '523120', '52313', '523130', '52314', '523140', '5232', '52321', '523210', '5239', '52391',
'7113', '71131', '711310', '71132', '711320', '7114', '71141', '711410', '7115', '71151', '711510', '7121', '71211', '712110', '71212', '712120', '71213', '712130', '71219', '712190', '7131', '71311', '713110', '71312', '713120', '7132', '71321', '713210', '71329', '713290', '7139', '71391', '713910', '71392', '713920', '71393', '713930', '71394', '713940', '71395', '713950', '71399', '713990', '7211', '72111', '721110', '72112', '721120', '72119', '721191', '721199', '7212', '72121', '721211', '721214', '7213', '72131', '721310', '7223', '72231', '722310', '72232', '722320', '72233', '722330', '7224', '72241', '722410', '7225', '72251', '722511', '722513', '722514', '722515', '8111', '81111', '811111', '811112', '811113', '811118', '81112', '811121', '811122', '81119', '811191', '811192', '811198', '8112', '81121', '811211', '811212', '811213', '811219', '8113', '81131', '811310', '8114', '81141', '811411', '811412', '81142', '811420', '81143', '811430', '81149', '811490', '8121', '81211', '812111',
]
def get_text():
s = timer()
text = test_path.read_text()
logger(f'Time to Read Text: {timer(s, as_string=True, short=True)}')
return text
def test_simdjson():
js = timer()
text = get_text()
t = SimdJson.parse(text)
logger(f'[SimdJson] Time to Parse: {timer(js, as_string=True, short=True)}')
num_keys = len(t.keys)
logger(f'[SimdJson] Time to Load {num_keys} Keys: {timer(js, as_string=True, short=True)}')
lt = timer()
for i in test_keys:
_ = t[i]
logger(f'[SimdJson] Time to Read {len(test_keys)} Items: {timer(lt, as_string=True, short=True)}')
lt = timer()
_ = SimdJson.dumps(t)
logger(f'[SimdJson] Time to Dump {len(test_keys)} Items: {timer(lt, as_string=True, short=True)}')
logger(f'[SimdJson] Completed Test in: {timer(js, as_string=True, short=True)}')
logger('----------------------------------------------------------------')
def test_orjson():
js = timer()
text = get_text()
t = OrJson.loads(text)
num_keys = len(t.keys())
logger(f'[OrJson] Time to Load with {num_keys} Total Items: {timer(js, as_string=True, short=True)}')
lt = timer()
for i in test_keys:
_ = t[i]
logger(f'[OrJson] Time to Read {len(test_keys)} Items: {timer(lt, as_string=True, short=True)}')
lt = timer()
_ = OrJson.dumps(t)
logger(f'[OrJson] Time to Dump {len(test_keys)} Items: {timer(lt, as_string=True, short=True)}')
logger(f'[OrJson] Completed Test in: {timer(js, as_string=True, short=True)}')
logger('----------------------------------------------------------------')
def test_json():
js = timer()
text = get_text()
## Explicitly disable Parser
Json.parser_enabled = False
t = Json.loads(text)
logger(f'[Json] Time to Load: {timer(js, as_string=True, short=True)}')
lt = timer()
for i in test_keys:
_ = t[i]
logger(f'[Json] Time to [Loads]Read {len(test_keys)} Items: {timer(lt, as_string=True, short=True)}')
t = Json.parse(text)
logger(f'[Json] Time to Parse: {timer(js, as_string=True, short=True)}')
lt = timer()
for i in test_keys:
_ = t[i]
logger(f'[Json] Time to [Parse]Read {len(test_keys)} Items: {timer(lt, as_string=True, short=True)}')
lt = timer()
_ = Json.dumps(t)
logger(f'[Json] Time to Dump {len(test_keys)} Items: {timer(lt, as_string=True, short=True)}')
logger(f'[Json] Completed Test in: {timer(js, as_string=True, short=True)}')
logger('----------------------------------------------------------------')
"""
Expected Results
Time to Read Text: 0.00095 secs
[OrJson] Time to Load with 2077 Total Items: 0.00378 secs
[OrJson] Time to Read 520 Items: 2e-05 secs
[OrJson] Time to Dump 520 Items: 0.00097 secs
[OrJson] Completed Test in: 0.00497 secs
----------------------------------------------------------------
Time to Read Text: 0.00022 secs
[Json] Time to Load: 0.00234 secs
[Json] Time to [Loads]Read 520 Items: 2e-05 secs
[Json] Time to Parse: 0.0032 secs
[Json] Time to [Parse]Read 520 Items: 0.00237 secs
[Json] Time to Dump 520 Items: 0.00238 secs
[Json] Completed Test in: 0.00814 secs
----------------------------------------------------------------
Time to Read Text: 0.00023 secs
[SimdJson] Time to Parse: 0.00051 secs
[SimdJson] Time to Load 2077 Keys: 0.00214 secs
[SimdJson] Time to Read 520 Items: 0.00011 secs
[SimdJson] Time to Dump 520 Items: 0.00365 secs
[SimdJson] Completed Test in: 0.00611 secs
----------------------------------------------------------------
"""
if __name__ == '__main__':
test_orjson()
test_json()
test_simdjson()
| 75.245763 | 1,231 | 0.582273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6,437 | 0.724969 |
b2ec6ef197e2fb9ff51a3b958346b6692ead91be | 1,018 | py | Python | donatello/utils.py | adrianchifor/donatello | 5a384b3203965b16324e9d322e83a8f1f1b27fd1 | [
"Apache-2.0"
] | 7 | 2018-12-01T10:41:16.000Z | 2021-04-08T19:04:46.000Z | donatello/utils.py | adrianchifor/donatello | 5a384b3203965b16324e9d322e83a8f1f1b27fd1 | [
"Apache-2.0"
] | 4 | 2018-12-01T15:31:58.000Z | 2018-12-01T23:59:52.000Z | donatello/utils.py | adrianchifor/donatello | 5a384b3203965b16324e9d322e83a8f1f1b27fd1 | [
"Apache-2.0"
] | 2 | 2018-12-01T10:41:29.000Z | 2018-12-02T15:56:30.000Z | import http.client
def getFunctionPublicIP():
conn = http.client.HTTPSConnection('api.ipify.org', 443)
conn.request('GET', '/?format=json')
ip = conn.getresponse().read()
print(ip)
conn.close()
return str(ip)
def non_zero_balance(balance):
"""
Return the balance with zero-value coins removed
"""
non_zero_balance = {}
for coin, amount in balance.items():
if amount > 0:
non_zero_balance[coin] = amount
return non_zero_balance
def supported_coins_balance(balance, tickers):
"""
Return the balance with non-supported coins removed
"""
supported_coins_balance = {}
for coin in balance.keys():
if coin != "BTC":
if f"{coin}/BTC" in tickers:
supported_coins_balance[coin] = balance[coin]
else:
try:
supported_coins_balance["BTC"] = balance[coin]
except KeyError:
print("BTC not in balance")
return supported_coins_balance
| 24.829268 | 62 | 0.609037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 209 | 0.205305 |
b2ee1fd8d6c226332f2055e2d1c443475db998ec | 2,442 | py | Python | omop_cdm/utility_programs/load_concept_files_into_db.py | jhajagos/CommonDataModelMapper | 65d2251713e5581b76cb16e36424d61fb194c901 | [
"Apache-2.0"
] | 1 | 2019-06-14T02:26:35.000Z | 2019-06-14T02:26:35.000Z | omop_cdm/utility_programs/load_concept_files_into_db.py | jhajagos/CommonDataModelMapper | 65d2251713e5581b76cb16e36424d61fb194c901 | [
"Apache-2.0"
] | null | null | null | omop_cdm/utility_programs/load_concept_files_into_db.py | jhajagos/CommonDataModelMapper | 65d2251713e5581b76cb16e36424d61fb194c901 | [
"Apache-2.0"
] | 1 | 2019-08-12T20:19:28.000Z | 2019-08-12T20:19:28.000Z | import argparse
import json
import sys
import os
try:
from utility_functions import load_csv_files_into_db, generate_vocabulary_load
except(ImportError):
sys.path.insert(0, os.path.abspath(os.path.join(os.path.split(__file__)[0], os.path.pardir, os.path.pardir, "src")))
from utility_functions import load_csv_files_into_db, generate_vocabulary_load
def main(vocab_directory, connection_string, schema, vocabularies=["CONCEPT"]):
vocab_list = generate_vocabulary_load(vocab_directory, vocabularies)
vocab_data_dict = {}
for pair in vocab_list:
vocab_data_dict[pair[1]] = pair[0]
load_csv_files_into_db(connection_string, vocab_data_dict, schema_ddl=None, indices_ddl=None,
i_print_update=1000, truncate=True, schema=schema, delimiter="\t")
if __name__ == "__main__":
arg_parse_obj = argparse.ArgumentParser(description="Load concept/vocabulary files into database")
arg_parse_obj.add_argument("-c", "--config-file-name", dest="config_file_name", help="JSON config file", default="../hi_config.json")
arg_parse_obj.add_argument("--connection-uri", dest="connection_uri", default=None)
arg_parse_obj.add_argument("--schema", dest="schema", default=None)
arg_parse_obj.add_argument("--load-concept_ancestor", default=False, action="store_true", dest="load_concept_ancestor")
arg_parse_obj.add_argument("--full-concept-files", default=False, action="store_true", dest="load_full_concept_files")
arg_obj = arg_parse_obj.parse_args()
print("Reading config file '%s'" % arg_obj.config_file_name)
with open(arg_obj.config_file_name) as f:
config = json.load(f)
if arg_obj.connection_uri is None:
connection_uri = config["connection_uri"]
else:
connection_uri = arg_obj.connection_uri
if arg_obj.schema is None:
schema = config["schema"]
else:
schema = arg_obj.schema
if arg_obj.load_full_concept_files:
vocabularies_to_load = ["CONCEPT", "CONCEPT_ANCESTOR", "CONCEPT_CLASS", "CONCEPT_RELATIONSHIP",
"CONCEPT_SYNONYM", "DOMAIN", "DRUG_STRENGTH", "RELATIONSHIP", "VOCABULARY"]
elif arg_obj.load_concept_ancestor:
vocabularies_to_load = ["CONCEPT", "CONCEPT_ANCESTOR"]
else:
vocabularies_to_load = ["CONCEPT"]
main(config["json_map_directory"], connection_uri, schema, vocabularies=vocabularies_to_load)
| 37 | 137 | 0.72154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 559 | 0.228911 |
b2ef1e91c18ddeb4d7361450a7de67ebdb4b2e6e | 1,588 | py | Python | triplinker/tests/test_views/test_non_dynamic_urls/test_accounts_views/tests.py | GonnaFlyMethod/triplinker | f4189e499ad48fd9102dd2211a8884078136eae9 | [
"MIT"
] | null | null | null | triplinker/tests/test_views/test_non_dynamic_urls/test_accounts_views/tests.py | GonnaFlyMethod/triplinker | f4189e499ad48fd9102dd2211a8884078136eae9 | [
"MIT"
] | null | null | null | triplinker/tests/test_views/test_non_dynamic_urls/test_accounts_views/tests.py | GonnaFlyMethod/triplinker | f4189e499ad48fd9102dd2211a8884078136eae9 | [
"MIT"
] | null | null | null | # Python modules.
import pytest
# Django modules.
from django.urls import reverse
from django.test import TestCase
# !Triplinker modules:
from tests.helpers.create_user import new_user
@pytest.mark.django_db
def test_signup_view(client):
url = reverse('accounts:signup')
response = client.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_signin_view(client):
url = reverse('accounts:login')
response = client.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_profile_view(client):
response = new_user()['client']
url = reverse('accounts:profile')
response = response.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_profile_edit_view(client):
response = new_user()['client']
url = reverse('accounts:profile_edit')
response = response.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_feed_view(client):
response = new_user()['client']
url = reverse('accounts:feed')
response = response.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_feed_view(client):
response = new_user()['client']
url = reverse('accounts:all_users_list')
response = response.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_logout_view(client):
response = new_user()['client']
url = reverse('accounts:logout')
response = response.get(url)
assert response.status_code == 200
| 25.206349 | 47 | 0.691436 | 0 | 0 | 0 | 0 | 1,352 | 0.851385 | 0 | 0 | 230 | 0.144836 |
b2efa45aafbdbf20dc2a3beb6bb3e66667896bb3 | 414 | py | Python | DNN_Experiments/MaskRCNN/convert.py | wmjpillow/FlameDetectionAPP | c3761c9e15adccbd084b17cd6b6f63c561c7f856 | [
"MIT"
] | 2 | 2019-12-28T21:46:18.000Z | 2020-01-10T03:41:03.000Z | DNN_Experiments/MaskRCNN/convert.py | wmjpillow/FlameDetectionAPP | c3761c9e15adccbd084b17cd6b6f63c561c7f856 | [
"MIT"
] | 10 | 2019-12-28T21:31:19.000Z | 2020-04-12T20:01:58.000Z | DNN_Experiments/MaskRCNN/convert.py | wmjpillow/FlameDetectionAPP | c3761c9e15adccbd084b17cd6b6f63c561c7f856 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# convert jpg tp png
from glob import glob
import cv2
pngs = glob('./*.jpg')
for j in pngs:
img = cv2.imread(j)
cv2.imwrite(j[:-3] + 'png', img)
# delete jpg files
import glob
import os
dir = "/Users/wangmeijie/ALLImportantProjects/FlameDetectionAPP/Models/MaskRCNN/02_26_2020/Mask_RCNN/dataset/train"
for jpgpath in glob.iglob(os.path.join(dir, '*.jpg')):
os.remove(jpgpath) | 21.789474 | 115 | 0.707729 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 189 | 0.456522 |
b2effe0f8a82275a9712cccbfe5467301c5502b1 | 4,544 | py | Python | src/title2Id_redirect_parser.py | mjstrobl/WEXEA | 0af0be1cdb93fc00cd81f885aa15ef8d6579b304 | [
"Apache-2.0"
] | 10 | 2020-06-14T15:46:53.000Z | 2021-04-29T15:02:23.000Z | src/title2Id_redirect_parser.py | mjstrobl/WEXEA | 0af0be1cdb93fc00cd81f885aa15ef8d6579b304 | [
"Apache-2.0"
] | 3 | 2021-08-25T16:16:45.000Z | 2022-02-10T04:29:10.000Z | src/title2Id_redirect_parser.py | mjstrobl/WEXEA | 0af0be1cdb93fc00cd81f885aa15ef8d6579b304 | [
"Apache-2.0"
] | 1 | 2021-02-17T17:44:06.000Z | 2021-02-17T17:44:06.000Z | import xml.sax
import re
import os
import json
import time
current_milli_time = lambda: int(round(time.time() * 1000))
RE_LINKS = re.compile(r'\[{2}(.*?)\]{2}', re.DOTALL | re.UNICODE)
IGNORED_NAMESPACES = [
'wikipedia', 'category', 'file', 'portal', 'template',
'mediaWiki', 'user', 'help', 'book', 'draft', 'wikiProject',
'special', 'talk', 'image','module'
]
"""MediaWiki namespaces that ought to be ignored."""
class WikiHandler(xml.sax.ContentHandler):
def __init__(self,title2Id,id2Title,redirects):
self.tag = ""
self.content = ''
self.title = ''
self.id = -1
self.title2Id = title2Id
self.id2Title = id2Title
self.redirects = redirects
self.counter_all = 0
self.attributes = {}
self.n = 0
self.start = current_milli_time()
# Call when an element starts
def startElement(self, tag, attributes):
self.tag = tag
self.attributes = attributes
# Call when an elements ends
def endElement(self, tag):
if tag == 'title':
self.title = self.content.strip()
elif tag == 'id':
self.id = int(self.content)
if self.title not in self.title2Id:
self.title2Id[self.title] = self.id
self.id2Title[self.id] = self.title
self.counter_all += 1
if self.counter_all % 1000 == 0:
diff = current_milli_time() - self.start
print('Pages processed: ' + str(self.counter_all) + ', avg t: ' + str(diff / self.counter_all), end='\r')
elif tag == 'text':
self.n += 1
if not any(self.title.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) and not self.title.lower().startswith('list of'):
self.processArticle()
elif tag == 'redirect' and 'title' in self.attributes:
redirect = self.attributes['title']
if not any(self.title.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) \
and not any(redirect.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) \
and not redirect.lower().startswith('list of') \
and not self.title.lower().startswith('list of'):
self.redirects[self.title] = redirect
self.content = ""
# Call when a character is read
def characters(self, content):
self.content += content
def processArticle(self):
text = self.content.strip()
#self.title2Id[self.title] = self.id
if text.lower().startswith('#redirect'):
match = re.search(RE_LINKS,text)
if match:
redirect = match.group(1).strip()
pos_bar = redirect.find('|')
if pos_bar > -1:
redirect = redirect[:pos_bar]
redirect = redirect.replace('_',' ')
if not any(redirect.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) and not redirect.lower().startswith('list of'):
self.redirects[self.title] = redirect
else:
lines = text.split('\n')
for line in lines:
if not line.startswith('{{redirect|'):
break
else:
line = line[11:]
line = line[:line.find('|')]
if len(line) > 0:
if not any(line.lower().startswith(ignore + ':') for ignore in IGNORED_NAMESPACES) and not line.lower().startswith('list of'):
self.redirects[line] = self.title
if (__name__ == "__main__"):
title2Id = {}
id2Title = {}
redirects = {}
config = json.load(open('config/config.json'))
wikipath = config['wikipath']
outputpath = config['outputpath']
dictionarypath = outputpath + 'dictionaries/'
mode = 0o755
os.mkdir(outputpath, mode)
os.mkdir(dictionarypath, mode)
parser = xml.sax.make_parser()
parser.setFeature(xml.sax.handler.feature_namespaces, 0)
Handler = WikiHandler(title2Id,id2Title,redirects)
parser.setContentHandler(Handler)
parser.parse(wikipath)
print('done')
with open(dictionarypath + 'title2Id.json', 'w') as f:
json.dump(title2Id, f)
with open(dictionarypath + 'id2Title.json', 'w') as f:
json.dump(id2Title, f)
with open(dictionarypath + 'redirects.json', 'w') as f:
json.dump(redirects, f)
| 36.352 | 150 | 0.568442 | 3,258 | 0.716989 | 0 | 0 | 0 | 0 | 0 | 0 | 629 | 0.138424 |
b2f00de04b4a4965219cd8965a1067b02342ac09 | 1,571 | bzl | Python | rules/starlark_configurations/cc_test/defs.bzl | CyberFlameGO/examples | 87a4812cb23f7e7969d74cc073579fb82540c0f6 | [
"Apache-2.0"
] | 572 | 2015-09-02T20:26:41.000Z | 2022-03-30T07:43:22.000Z | rules/starlark_configurations/cc_test/defs.bzl | CyberFlameGO/examples | 87a4812cb23f7e7969d74cc073579fb82540c0f6 | [
"Apache-2.0"
] | 158 | 2015-08-31T20:21:50.000Z | 2022-03-20T20:13:14.000Z | rules/starlark_configurations/cc_test/defs.bzl | CyberFlameGO/examples | 87a4812cb23f7e7969d74cc073579fb82540c0f6 | [
"Apache-2.0"
] | 408 | 2015-08-31T20:05:14.000Z | 2022-03-28T02:36:44.000Z | # We can transition on native options using this
# //command_line_option:<option-name> syntax
_BUILD_SETTING = "//command_line_option:test_arg"
def _test_arg_transition_impl(settings, attr):
_ignore = (settings, attr)
return {_BUILD_SETTING: ["new arg"]}
_test_arg_transition = transition(
implementation = _test_arg_transition_impl,
inputs = [],
outputs = [_BUILD_SETTING],
)
def _test_transition_rule_impl(ctx):
# We need to copy the executable because starlark doesn't allow
# providing an executable not created by the rule
executable_src = ctx.executable.actual_test
executable_dst = ctx.actions.declare_file(ctx.label.name)
ctx.actions.run_shell(
tools = [executable_src],
outputs = [executable_dst],
command = "cp %s %s" % (executable_src.path, executable_dst.path),
)
runfiles = ctx.attr.actual_test[0][DefaultInfo].default_runfiles
return [DefaultInfo(runfiles = runfiles, executable = executable_dst)]
transition_rule_test = rule(
implementation = _test_transition_rule_impl,
attrs = {
"actual_test": attr.label(cfg = _test_arg_transition, executable = True),
"_allowlist_function_transition": attr.label(
default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
),
},
test = True,
)
def test_arg_cc_test(name, **kwargs):
cc_test_name = name + "_native_test"
transition_rule_test(
name = name,
actual_test = ":%s" % cc_test_name,
)
native.cc_test(name = cc_test_name, **kwargs)
| 33.425532 | 85 | 0.700827 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 381 | 0.242521 |
b2f00fc324ac567aafda05e99a1cb8336f8b4e7a | 7,116 | py | Python | test_cases/apache_avro_adapter.py | QratorLabs/ritfest2016 | cddaaa9e827f5315d2e426c083029124649d6f50 | [
"MIT"
] | null | null | null | test_cases/apache_avro_adapter.py | QratorLabs/ritfest2016 | cddaaa9e827f5315d2e426c083029124649d6f50 | [
"MIT"
] | null | null | null | test_cases/apache_avro_adapter.py | QratorLabs/ritfest2016 | cddaaa9e827f5315d2e426c083029124649d6f50 | [
"MIT"
] | null | null | null | import io
import avro.io
try:
from avro.schema import parse
except ImportError:
from avro.schema import Parse as parse
class ApacheAvroAdapter(object):
NAME = 'avro'
def __init__(self):
with open('specs/str.avsc', 'r') as f:
schema = parse(f.read())
self.str_reader = avro.io.DatumReader(schema)
self.str_writer = avro.io.DatumWriter(schema)
with open('specs/bin.avsc', 'r') as f:
schema = parse(f.read())
self.bin_reader = avro.io.DatumReader(schema)
self.bin_writer = avro.io.DatumWriter(schema)
with open('specs/int.avsc', 'r') as f:
schema = parse(f.read())
self.int_reader = avro.io.DatumReader(schema)
self.int_writer = avro.io.DatumWriter(schema)
with open('specs/float.avsc', 'r') as f:
schema = parse(f.read())
self.float_reader = avro.io.DatumReader(schema)
self.float_writer = avro.io.DatumWriter(schema)
with open('specs/null.avsc', 'r') as f:
schema = parse(f.read())
self.null_reader = avro.io.DatumReader(schema)
self.null_writer = avro.io.DatumWriter(schema)
with open('specs/bool.avsc', 'r') as f:
schema = parse(f.read())
self.bool_reader = avro.io.DatumReader(schema)
self.bool_writer = avro.io.DatumWriter(schema)
with open('specs/array.avsc', 'r') as f:
schema = parse(f.read())
self.array_reader = avro.io.DatumReader(schema)
self.array_writer = avro.io.DatumWriter(schema)
with open('specs/map.avsc', 'r') as f:
schema = parse(f.read())
self.map_reader = avro.io.DatumReader(schema)
self.map_writer = avro.io.DatumWriter(schema)
with open('specs/struct10.avsc', 'r') as f:
schema = parse(f.read())
self.struct10_reader = avro.io.DatumReader(schema)
self.struct10_writer = avro.io.DatumWriter(schema)
with open('specs/struct_map.avsc', 'r') as f:
schema = parse(f.read())
self.struct_map_reader = avro.io.DatumReader(schema)
self.struct_map_writer = avro.io.DatumWriter(schema)
with open('specs/simple_list.avsc', 'r') as f:
schema = parse(f.read())
self.simple_list_reader = avro.io.DatumReader(schema)
self.simple_list_writer = avro.io.DatumWriter(schema)
with open('specs/points_list.avsc', 'r') as f:
schema = parse(f.read())
self.points_list_reader = avro.io.DatumReader(schema)
self.points_list_writer = avro.io.DatumWriter(schema)
def encoder_string(self, data):
io_stream = io.BytesIO()
self.str_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_string(self, data):
io_stream = io.BytesIO(data)
return self.str_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_bytes(self, data):
io_stream = io.BytesIO()
self.bin_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_bytes(self, data):
io_stream = io.BytesIO(data)
return self.bin_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_integer(self, data):
io_stream = io.BytesIO()
self.int_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_integer(self, data):
io_stream = io.BytesIO(data)
return self.int_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_float(self, data):
io_stream = io.BytesIO()
self.float_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_float(self, data):
io_stream = io.BytesIO(data)
return self.float_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_null(self, data):
io_stream = io.BytesIO()
self.null_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_null(self, data):
io_stream = io.BytesIO(data)
return self.null_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_boolean(self, data):
io_stream = io.BytesIO()
self.bool_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_boolean(self, data):
io_stream = io.BytesIO(data)
return self.bool_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_array(self, data):
io_stream = io.BytesIO()
self.array_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_array(self, data):
io_stream = io.BytesIO(data)
return self.array_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_map(self, data):
io_stream = io.BytesIO()
self.map_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_map(self, data):
io_stream = io.BytesIO(data)
return self.map_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_struct_10(self, data):
io_stream = io.BytesIO()
self.struct10_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_struct_10(self, data):
io_stream = io.BytesIO(data)
return self.struct10_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_struct_map(self, data):
io_stream = io.BytesIO()
self.struct_map_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_struct_map(self, data):
io_stream = io.BytesIO(data)
return self.struct_map_reader.read(avro.io.BinaryDecoder(io_stream))
def encoder_simple_list(self, data):
io_stream = io.BytesIO()
self.simple_list_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_simple_list(self, data):
io_stream = io.BytesIO(data)
return self.simple_list_reader.read(
avro.io.BinaryDecoder(io_stream),
)
def encoder_points_list(self, data):
io_stream = io.BytesIO()
self.points_list_writer.write(
data,
avro.io.BinaryEncoder(io_stream),
)
return io_stream.getvalue()
def decoder_points_list(self, data):
io_stream = io.BytesIO(data)
return self.points_list_reader.read(
avro.io.BinaryDecoder(io_stream),
)
| 32.054054 | 76 | 0.604272 | 6,986 | 0.981731 | 0 | 0 | 0 | 0 | 0 | 0 | 268 | 0.037662 |
b2f17c3de89d94e2aba8cc14a42ef09cd569851a | 41 | py | Python | tests/test_vec/__init__.py | karin0018/EduNLP | 172c36a2cd2c41a1f1c5833d7b1abcbc5e3bbd5f | [
"Apache-2.0"
] | 18 | 2021-02-15T13:10:42.000Z | 2022-03-17T12:57:34.000Z | tests/test_vec/__init__.py | karin0018/EduNLP | 172c36a2cd2c41a1f1c5833d7b1abcbc5e3bbd5f | [
"Apache-2.0"
] | 81 | 2021-06-02T07:45:20.000Z | 2022-03-29T15:21:32.000Z | tests/test_vec/__init__.py | karin0018/EduNLP | 172c36a2cd2c41a1f1c5833d7b1abcbc5e3bbd5f | [
"Apache-2.0"
] | 29 | 2021-05-18T08:34:58.000Z | 2022-03-12T00:19:09.000Z | # coding: utf-8
# 2021/5/30 @ tongshiwei
| 13.666667 | 24 | 0.658537 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.95122 |
b2f1d9ab07a35f78efc77316abd28bebe9c01e76 | 4,004 | py | Python | tests/test_nameko_prometheus.py | alfaro28/nameko-prometheus | 0f50006b1510eef375712a1b7c4bd00d5f08eb1b | [
"Apache-2.0"
] | null | null | null | tests/test_nameko_prometheus.py | alfaro28/nameko-prometheus | 0f50006b1510eef375712a1b7c4bd00d5f08eb1b | [
"Apache-2.0"
] | null | null | null | tests/test_nameko_prometheus.py | alfaro28/nameko-prometheus | 0f50006b1510eef375712a1b7c4bd00d5f08eb1b | [
"Apache-2.0"
] | null | null | null | import pytest
from nameko.events import EventDispatcher, event_handler
from nameko.rpc import rpc
from nameko.testing.services import entrypoint_hook, entrypoint_waiter
from nameko.web.handlers import http
from prometheus_client import REGISTRY, Counter
from nameko_prometheus import PrometheusMetrics
@pytest.fixture
def config(rabbit_config, web_config):
# merge nameko-provided fixtures in one config for container_factory
config = rabbit_config.copy()
config.update(web_config)
return config
@pytest.fixture(autouse=True)
def reset_prometheus_registry():
collectors_to_unregister = []
for collector, names in REGISTRY._collector_to_names.items():
if any(name.startswith("my_service") for name in names):
collectors_to_unregister.append(collector)
for collector in collectors_to_unregister:
REGISTRY.unregister(collector)
my_counter = Counter("my_counter", "My counter")
class MyService:
name = "my_service"
metrics = PrometheusMetrics()
dispatcher = EventDispatcher()
@rpc
def update_counter(self):
my_counter.inc()
@http("GET", "/metrics")
def expose_metrics(self, request):
return self.metrics.expose_metrics(request)
@http("GET", "/error")
def raise_error(self, request):
raise ValueError("poof")
@rpc
def emit_event(self):
self.dispatcher("my_event", {"foo": "bar"})
@event_handler("my_service", "my_event")
def handle_event(self, payload):
return f"handled: {payload}"
def test_expose_default_metrics(config, container_factory, web_session):
container = container_factory(MyService, config)
container.start()
with entrypoint_hook(container, "update_counter") as update_counter:
update_counter()
update_counter()
response = web_session.get("/metrics")
# assert that default metrics are exposed in Prometheus text format
assert f"TYPE {MyService.name}_rpc_requests_total counter" in response.text
assert (
f'{MyService.name}_rpc_requests_total{{method_name="update_counter"}} 2.0'
in response.text
)
def test_expose_custom_metrics(config, container_factory, web_session):
container = container_factory(MyService, config)
container.start()
with entrypoint_hook(container, "update_counter") as update_counter:
update_counter()
update_counter()
response = web_session.get("/metrics")
assert "my_counter_total" in response.text
def test_expose_event_handler_metrics(config, container_factory, web_session):
container = container_factory(MyService, config)
container.start()
with entrypoint_waiter(container, "handle_event"):
with entrypoint_hook(container, "emit_event") as emit_event:
emit_event()
response = web_session.get("/metrics")
assert f"TYPE {MyService.name}_events_total counter" in response.text
assert f"TYPE {MyService.name}_events_latency_seconds histogram" in response.text
assert (
f'{MyService.name}_events_total{{event_type="my_event",source_service="my_service"}} 1.0'
in response.text
)
def test_http_metrics_collected_on_exception(config, container_factory, web_session):
container = container_factory(MyService, config)
container.start()
web_session.get("/error")
response = web_session.get("/metrics")
assert (
f'{MyService.name}_http_requests_total{{endpoint="/error",http_method="GET",status_code="500"}} 1.0'
in response.text
)
def test_override_default_metric_prefix(config, container_factory, web_session):
prefix = "my_prefix"
config.update({"PROMETHEUS": {MyService.name: {"prefix": prefix}}})
container = container_factory(MyService, config)
container.start()
with entrypoint_hook(container, "update_counter") as update_counter:
update_counter()
response = web_session.get("/metrics")
assert f"TYPE {prefix}_rpc_requests_total counter" in response.text
| 33.932203 | 108 | 0.727273 | 600 | 0.14985 | 0 | 0 | 1,038 | 0.259241 | 0 | 0 | 920 | 0.22977 |
b2f264acc957c78592942a789c9b0334224d0fdd | 604 | py | Python | testing/forms/products.py | Miki761000/storage_podari_s_luibov | a82caccfd40391f6a2609538e4e629d3b113aca9 | [
"MIT"
] | null | null | null | testing/forms/products.py | Miki761000/storage_podari_s_luibov | a82caccfd40391f6a2609538e4e629d3b113aca9 | [
"MIT"
] | null | null | null | testing/forms/products.py | Miki761000/storage_podari_s_luibov | a82caccfd40391f6a2609538e4e629d3b113aca9 | [
"MIT"
] | null | null | null | from django import forms
from warehouse.models import Product, ProductAdditionalInformation
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = '__all__'
field_order = [
'product_name',
'product_code',
'product_quantity',
'product_type',
'product_id',
]
product_code = forms.CharField(disabled=True)
class ProductAdditionalInformationForm(forms.ModelForm):
class Meta:
model = ProductAdditionalInformation
exclude = ['product']
fields = '__all__' | 25.166667 | 66 | 0.622517 | 506 | 0.837748 | 0 | 0 | 0 | 0 | 0 | 0 | 99 | 0.163907 |
b2f42395c99cb32b176d6e1140d0291edc965ff1 | 1,210 | py | Python | src/amuse/community/sei/test_sei.py | joshuawall/amuse | c2034074ee76c08057c4faa96c32044ab40952e9 | [
"Apache-2.0"
] | 1 | 2019-12-28T22:47:51.000Z | 2019-12-28T22:47:51.000Z | src/amuse/community/sei/test_sei.py | joshuawall/amuse | c2034074ee76c08057c4faa96c32044ab40952e9 | [
"Apache-2.0"
] | null | null | null | src/amuse/community/sei/test_sei.py | joshuawall/amuse | c2034074ee76c08057c4faa96c32044ab40952e9 | [
"Apache-2.0"
] | 2 | 2021-11-19T04:41:37.000Z | 2021-11-20T02:11:17.000Z | from amuse.test.amusetest import TestWithMPI
from amuse.units import nbody_system
from amuse.units import units
import os
import sys
import numpy
import math
from amuse.community.sei.interface import SeiInterface
from amuse.community.sei.interface import Sei
from amuse import datamodel
class TestSeiInterface(TestWithMPI):
def test0(self):
instance = SeiInterface()
instance.initialization()
instance.set_state(0,1,0,0,0,0,0)
for i in range(0,10):
instance.evolve(i)
print instance.get_state(0)
instance.stop()
class TestSei(TestWithMPI):
def test0(self):
convert_nbody = nbody_system.nbody_to_si(1.0 | units.MSun, 1 | units.AU)
particle = datamodel.Particles(1)
particle.position = [1.0, 0.0, 0.0,]|units.AU
particle.velocity = [0.0, 2.0*3.1415926535*1.0/365, 0.0] | units.AUd
sei = Sei(convert_nbody)
sei.initialization()
sei.particles.add_particles(particle)
print sei.particles.position.x.value_in(units.AU)
for i in range(365):
sei.evolve_model(i|units.day)
print sei.particles.position.x.value_in(units.AU)
| 29.512195 | 80 | 0.661157 | 913 | 0.754545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
b2f4e0a17b1d50d9e628464d9c96026fcbff615c | 20,063 | py | Python | datetimeparser/enums.py | aridevelopment-de/datetimeparser | df63d6f7ed0c362f6d6b4e55d61b973b7fcf3f56 | [
"MIT"
] | 12 | 2021-11-05T21:17:21.000Z | 2022-03-30T17:53:50.000Z | datetimeparser/enums.py | aridevelopment-de/datetimeparser | df63d6f7ed0c362f6d6b4e55d61b973b7fcf3f56 | [
"MIT"
] | 45 | 2021-11-14T16:05:04.000Z | 2022-03-29T18:51:31.000Z | datetimeparser/enums.py | aridevelopment-de/datetimeparser | df63d6f7ed0c362f6d6b4e55d61b973b7fcf3f56 | [
"MIT"
] | 1 | 2021-11-14T13:44:37.000Z | 2021-11-14T13:44:37.000Z | from datetime import datetime, timedelta
from enum import Enum, auto
from dateutil.relativedelta import relativedelta
from .baseclasses import Constant, MethodEnum
from .formulars import days_feb, eastern_calc, thanksgiving_calc, year_start
class ConstantOption(Enum):
TIME_VARIABLE = auto()
DATE_VARIABLE = auto()
YEAR_VARIABLE = auto()
class Constants:
CHRISTMAS = Constant('christmas', ['xmas'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=25))
HOLY_EVE = Constant('holy eve', options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=24))
SILVESTER = Constant('silvester', ['new years eve'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=31))
EASTERN = Constant('eastern', ['easter'], options=[ConstantOption.YEAR_VARIABLE], time_value=eastern_calc)
NICHOLAS = Constant('nicholas', ['nicholas day'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=6))
HALLOWEEN = Constant('halloween', options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=10, day=31))
APRIL_FOOLS_DAY = Constant('april fools day', ['april fool day'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=4, day=1))
THANKSGIVING = Constant('thanksgiving', options=[ConstantOption.YEAR_VARIABLE], time_value=thanksgiving_calc)
SAINT_PATRICKS_DAY = Constant('saint patrick\'s day',
['saint patricks day', 'st. patrick\'s day', 'saint pt. day', 'st patrick\'s day', 'st patricks day'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=3, day=17))
VALENTINES_DAY = Constant('valentines day', ['valentine', 'valentine day'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=2, day=14))
PI_DAY = Constant("pi day", ["piday", "pi-day"], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=3, day=14))
TAU_DAY = Constant("tau day", ["tauday", "tau-day"], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=6, day=28))
SUMMER_BEGIN = Constant('summer begin', ['summer', 'begin of summer', 'begin of the summer'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=6, day=1))
WINTER_BEGIN = Constant('winter begin', ['winter', 'begin of winter', 'begin of the winter'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=1))
SPRING_BEGIN = Constant('spring begin', ['spring', 'begin of spring', 'begin of the spring'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=3, day=1))
FALL_BEGIN = Constant('fall begin',
['fall', 'begin of fall', 'begin of the fall', 'autumn begin', 'autumn', 'begin of autumn',
'begin of the autumn'],
options=[ConstantOption.YEAR_VARIABLE], time_value=lambda year_time: datetime(year=year_time, month=9, day=1))
SUMMER_END = Constant('summer end', ['end of summer', 'end of the summer'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=8, day=31, hour=23, minute=59, second=59))
WINTER_END = Constant('winter end', ['end of winter', 'end of the winter'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=2, day=days_feb(year_time), hour=23, minute=59,
second=59))
SPRING_END = Constant('spring end', ['end of spring', 'end of the spring'], options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=5, day=31, hour=23, minute=59, second=59))
FALL_END = Constant('fall end', ['end of fall', 'end of the fall', 'autumn end', 'end of autumn', 'end of the autumn'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=11, day=30, hour=23, minute=59, second=59))
MORNING = Constant('morning', ['at morning'],
options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE])
EVENING = Constant('evening', ['at evening'],
options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE])
LUNCHTIME = Constant('lunchtime', ['lunch'], options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE])
# advent of code always starts at midnight 1st december in SET (5 hours negative UTC offset)
BEGIN_AOC = Constant('aoc begin',
['aoc', 'begin of aoc', 'begin of the aoc', 'advent of code begin', 'advent of code', 'begin of advent of code',
'begin of the advent of code'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=1, hour=0),
offset=-5)
END_AOC = Constant('aoc end',
['end of aoc', 'end of the aoc', 'advent of code end', 'end of advent of code', 'end of the advent of code'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=26, hour=0),
offset=-5)
END_OF_YEAR = Constant('end of year', ['the end of year', 'the end of the year', 'end of the year'],
options=[ConstantOption.YEAR_VARIABLE],
time_value=lambda year_time: datetime(year=year_time, month=12, day=31, hour=23, minute=59, second=59))
BEGIN_OF_YEAR = Constant('begin of year', ['the begin of year', 'the begin of the year', 'begin of the year'],
options=[ConstantOption.YEAR_VARIABLE], time_value=year_start)
INFINITY = Constant('infinity', ['inf'], value=None)
TODAY = Constant('today', options=[ConstantOption.TIME_VARIABLE],
time_value=lambda _: datetime(datetime.today().year, datetime.today().month, datetime.today().day))
TOMORROW = Constant('tomorrow', options=[ConstantOption.TIME_VARIABLE],
time_value=lambda _: datetime(datetime.today().year, datetime.today().month, datetime.today().day) + relativedelta(
days=1))
YESTERDAY = Constant('yesterday', options=[ConstantOption.TIME_VARIABLE],
time_value=lambda _: datetime(datetime.today().year, datetime.today().month, datetime.today().day) - relativedelta(
days=1))
NOW = Constant('now', ['at the moment', 'current time', 'current time now'], time_value=lambda _: datetime.now())
ALL = [
CHRISTMAS, HOLY_EVE, SILVESTER, EASTERN, NICHOLAS, HALLOWEEN, APRIL_FOOLS_DAY, THANKSGIVING, SAINT_PATRICKS_DAY, VALENTINES_DAY,
PI_DAY, TAU_DAY,
SUMMER_END, WINTER_END, SPRING_END, FALL_END, SUMMER_BEGIN, WINTER_BEGIN, SPRING_BEGIN, FALL_BEGIN,
MORNING, EVENING, LUNCHTIME,
BEGIN_AOC, END_AOC,
END_OF_YEAR, BEGIN_OF_YEAR,
INFINITY,
TODAY, TOMORROW, YESTERDAY, NOW
]
ALL_RELATIVE_CONSTANTS = [TODAY, TOMORROW, YESTERDAY, NOW]
class DatetimeDeltaConstants:
# time_value is a tuple containing (hours, minutes, seconds)
MIDNIGHT = Constant('midnight', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (0, 0, 0))
NIGHT = Constant('night', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (21, 0, 0))
MORNING_NIGHT = Constant('morning night', value=0, options=[ConstantOption.DATE_VARIABLE],
time_value=lambda _: (3, 0, 0))
DAYLIGHT_CHANGE = Constant('daylight change', ['daylight saving', 'daylight saving time'], value=0,
options=[ConstantOption.YEAR_VARIABLE, ConstantOption.DATE_VARIABLE],
time_value=lambda _: (6, 0, 0))
SUNRISE = Constant('sunrise', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (7, 0, 0))
MORNING = Constant('morning', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (6, 0, 0))
BREAKFAST = Constant('breakfast', value=0, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (8, 0, 0))
MIDDAY = Constant('midday', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (12, 0, 0))
LUNCH = Constant('lunch', ['lunchtime'], value=12, options=[ConstantOption.DATE_VARIABLE],
time_value=lambda _: (12, 0, 0))
AFTERNOON = Constant('afternoon', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (15, 0, 0))
EVENING = Constant('evening', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (18, 0, 0))
DINNER = Constant('dinner', ['dinnertime'], value=12, options=[ConstantOption.DATE_VARIABLE],
time_value=lambda _: (19, 0, 0))
DAWN = Constant('dawn', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (6, 0, 0))
DUSK = Constant('dusk', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (20, 0, 0))
SUNSET = Constant('sunset', value=12, options=[ConstantOption.DATE_VARIABLE], time_value=lambda _: (18, 30, 0))
ALL = [
MORNING, AFTERNOON, EVENING, NIGHT, MORNING_NIGHT, DAYLIGHT_CHANGE, MIDNIGHT, MIDDAY, DAWN, DUSK,
SUNRISE, SUNSET, LUNCH, DINNER, BREAKFAST
]
class NumberConstants:
# Presented to you by github copilot
ONE = Constant('one', value=1)
TWO = Constant('two', value=2)
THREE = Constant('three', value=3)
FOUR = Constant('four', value=4)
FIVE = Constant('five', value=5)
SIX = Constant('six', value=6)
SEVEN = Constant('seven', value=7)
EIGHT = Constant('eight', value=8)
NINE = Constant('nine', value=9)
TEN = Constant('ten', value=10)
ELEVEN = Constant('eleven', value=11)
TWELVE = Constant('twelve', value=12)
THIRTEEN = Constant('thirteen', value=13)
FOURTEEN = Constant('fourteen', value=14)
FIFTEEN = Constant('fifteen', value=15)
SIXTEEN = Constant('sixteen', value=16)
SEVENTEEN = Constant('seventeen', value=17)
EIGHTEEN = Constant('eighteen', value=18)
NINETEEN = Constant('nineteen', value=19)
TWENTY = Constant('twenty', value=20)
TWENTY_ONE = Constant('twenty one', alias=["twentyone", "twenty-one"], value=21)
TWENTY_TWO = Constant('twenty two', alias=["twentytwo", "twenty-two"], value=22)
TWENTY_THREE = Constant('twenty three', alias=["twentythree", "twenty-three"], value=23)
TWENTY_FOUR = Constant('twenty four', alias=["twentyfour", "twenty-four"], value=24)
TWENTY_FIVE = Constant('twenty five', alias=["twentyfive", "twenty-five"], value=25)
TWENTY_SIX = Constant('twenty six', alias=["twentysix", "twenty-six"], value=26)
TWENTY_SEVEN = Constant('twenty seven', alias=["twentyseven", "twenty-seven"], value=27)
TWENTY_EIGHT = Constant('twenty eight', alias=["twentyeight", "twenty-eight"], value=28)
TWENTY_NINE = Constant('twenty nine', alias=["twentynine", "twenty-nine"], value=29)
THIRTY = Constant('thirty', value=30)
THIRTY_ONE = Constant('thirty one', alias=["thirtyone", "thirty-one"], value=31)
# Reversed to avoid conflicts with other constants (one is included in twenty one)
ALL = [ONE, TWO, THREE, FOUR, FIVE, SIX, SEVEN, EIGHT, NINE, TEN,
ELEVEN, TWELVE, THIRTEEN, FOURTEEN, FIFTEEN, SIXTEEN, SEVENTEEN, EIGHTEEN, NINETEEN, TWENTY,
TWENTY_ONE, TWENTY_TWO, TWENTY_THREE, TWENTY_FOUR, TWENTY_FIVE, TWENTY_SIX, TWENTY_SEVEN, TWENTY_EIGHT, TWENTY_NINE,
THIRTY, THIRTY_ONE][::-1]
class NumberCountConstants:
# Presented to you by github copilot
FIRST = Constant('first', alias=['1st', '1.'], value=1)
SECOND = Constant('second', alias=['2nd', '2.'], value=2)
THIRD = Constant('third', alias=['3rd', '3.'], value=3)
FOURTH = Constant('fourth', alias=['4th', '4.'], value=4)
FIFTH = Constant('fifth', alias=['5th', '5.'], value=5)
SIXTH = Constant('sixth', alias=['6th', '6.'], value=6)
SEVENTH = Constant('seventh', alias=['7th', '7.'], value=7)
EIGHTH = Constant('eighth', alias=['8th', '8.'], value=8)
NINTH = Constant('ninth', alias=['9th', '9.'], value=9)
TENTH = Constant('tenth', alias=['10th', '10.'], value=10)
ELEVENTH = Constant('eleventh', alias=['11th', '11.'], value=11)
TWELFTH = Constant('twelfth', alias=['12th', '12.'], value=12)
THIRTEENTH = Constant('thirteenth', alias=['13th', '13.'], value=13)
FOURTEENTH = Constant('fourteenth', alias=['14th', '14.'], value=14)
FIFTEENTH = Constant('fifteenth', alias=['15th', '15.'], value=15)
SIXTEENTH = Constant('sixteenth', alias=['16th', '16.'], value=16)
SEVENTEENTH = Constant('seventeenth', alias=['17th', '17.'], value=17)
EIGHTEENTH = Constant('eighteenth', alias=['18th', '18.'], value=18)
NINETEENTH = Constant('nineteenth', alias=['19th', '19.'], value=19)
TWENTIETH = Constant('twentieth', alias=['20th', '20.'], value=20)
TWENTY_FIRST = Constant('twenty first', alias=['21st', '21.', 'twentyfirst', 'twenty-first'], value=21)
TWENTY_SECOND = Constant('twenty second', alias=['22nd', '22.', 'twentysecond', 'twenty-second'], value=22)
TWENTY_THIRD = Constant('twenty third', alias=['23rd', '23.', 'twentythird', 'twenty-third'], value=23)
TWENTY_FOURTH = Constant('twenty fourth', alias=['24th', '24.', 'twentyfourth', 'twenty-fourth'], value=24)
TWENTY_FIFTH = Constant('twenty fifth', alias=['25th', '25.', 'twentyfifth', 'twenty-fifth'], value=25)
TWENTY_SIXTH = Constant('twenty sixth', alias=['26th', '26.', 'twentysixth', 'twenty-sixth'], value=26)
TWENTY_SEVENTH = Constant('twenty seventh', alias=['27th', '27.', 'twentyseventh', 'twenty-seventh'], value=27)
TWENTY_EIGHTH = Constant('twenty eighth', alias=['28th', '28.', 'twentyeighth', 'twenty-eighth'], value=28)
TWENTY_NINTH = Constant('twenty ninth', alias=['29th', '29.', 'twentyninth', 'twenty-ninth'], value=29)
THIRTIETH = Constant('thirtieth', alias=['30th', '30.'], value=30)
THIRTY_FIRST = Constant('thirty first', alias=['31st', '31.', 'thirthyfirst', "thirty-first"], value=31)
# Reversed to avoid conflicts with other constants
ALL = [FIRST, SECOND, THIRD, FOURTH, FIFTH, SIXTH, SEVENTH, EIGHTH, NINTH, TENTH,
ELEVENTH, TWELFTH, THIRTEENTH, FOURTEENTH, FIFTEENTH, SIXTEENTH, SEVENTEENTH, EIGHTEENTH, NINETEENTH, TWENTIETH,
TWENTY_FIRST, TWENTY_SECOND, TWENTY_THIRD, TWENTY_FOURTH, TWENTY_FIFTH, TWENTY_SIXTH, TWENTY_SEVENTH, TWENTY_EIGHTH,
TWENTY_NINTH,
THIRTIETH, THIRTY_FIRST][::-1]
class DatetimeConstants:
SECONDS = Constant('seconds', ['second', 'sec', 'secs'])
MINUTES = Constant('minutes', ['minute', 'min', 'mins'])
QUARTERS = Constant('quarters', ['quarter', 'qtr', 'qtrs'])
HOURS = Constant('hours', ['hour'])
DAYS = Constant('days', ['day'])
WEEKS = Constant('weeks', ['week'])
MONTHS = Constant('months', ['month'])
YEARS = Constant('years', ['year'])
OLYMPIADS = Constant('olympiads', ['olympiad']) # 4 years
DECADES = Constant('decades', ['decade']) # 10 years
CENTURIES = Constant('centuries', ['century']) # 100 years
MILLENNIUMS = Constant('millenniums', ['millennium']) # 1,000 years
MEGAANNUMS = Constant('megaannuums', ['megaannuum']) # 1,000,000 years
GIGAANNUMS = Constant('gigaannuums', ['gigaannuum']) # 1,000,000,000 years
TIME = [SECONDS, MINUTES, QUARTERS, HOURS]
DATE = [DAYS, WEEKS, MONTHS, YEARS, DECADES, CENTURIES, MILLENNIUMS, MEGAANNUMS, GIGAANNUMS]
ALL = [*DATE, *TIME]
@classmethod
def convert_from_mini_date(cls, md):
if md.lower() == "s":
return cls.SECONDS
elif md.lower() == "m":
return cls.MINUTES
elif md.lower() == "h":
return cls.HOURS
elif md.lower() == "w":
return cls.WEEKS
elif md.lower() == "d":
return cls.DAYS
elif md.lower() == "y":
return cls.YEARS
class WeekdayConstants:
MONDAY = Constant('monday', time_value=lambda date: f"{date + timedelta((0 - date.weekday()) % 7)}")
TUESDAY = Constant('tuesday', time_value=lambda date: f"{date + timedelta((1 - date.weekday()) % 7)}")
WEDNESDAY = Constant('wednesday', time_value=lambda date: f"{date + timedelta((2 - date.weekday()) % 7)}")
THURSDAY = Constant('thursday', time_value=lambda date: f"{date + timedelta((3 - date.weekday()) % 7)}")
FRIDAY = Constant('friday', time_value=lambda date: f"{date + timedelta((4 - date.weekday()) % 7)}")
SATURDAY = Constant('saturday', time_value=lambda date: f"{date + timedelta((5 - date.weekday()) % 7)}")
SUNDAY = Constant('sunday', time_value=lambda date: f"{date + timedelta((6 - date.weekday()) % 7)}")
ALL = [MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY]
class MonthConstants:
JANUARY = Constant('january', ['jan'], time_value=lambda year_time: datetime(year=year_time, month=1, day=1))
FEBRUARY = Constant('february', ['feb'], time_value=lambda year_time: datetime(year=year_time, month=2, day=1))
MARCH = Constant('march', ['mar'], time_value=lambda year_time: datetime(year=year_time, month=3, day=1))
APRIL = Constant('april', ['apr'], time_value=lambda year_time: datetime(year=year_time, month=4, day=1))
MAY = Constant('may', time_value=lambda year_time: datetime(year=year_time, month=5, day=1))
JUNE = Constant('june', ['jun'], time_value=lambda year_time: datetime(year=year_time, month=6, day=1))
JULY = Constant('july', ['jul'], time_value=lambda year_time: datetime(year=year_time, month=7, day=1))
AUGUST = Constant('august', ['aug'], time_value=lambda year_time: datetime(year=year_time, month=8, day=1))
SEPTEMBER = Constant('september', ['sep'], time_value=lambda year_time: datetime(year=year_time, month=9, day=1))
OCTOBER = Constant('october', ['oct'], time_value=lambda year_time: datetime(year=year_time, month=10, day=1))
NOVEMBER = Constant('november', ['nov'], time_value=lambda year_time: datetime(year=year_time, month=11, day=1))
DECEMBER = Constant('december', ['dec'], time_value=lambda year_time: datetime(year=year_time, month=12, day=1))
ALL = [JANUARY, FEBRUARY, MARCH, APRIL, MAY, JUNE, JULY, AUGUST, SEPTEMBER, OCTOBER, NOVEMBER, DECEMBER]
class Keywords:
OF = Constant('of')
AFTER = Constant('after')
BEFORE = Constant('before')
NEXT = Constant('next')
IN = Constant('in')
FOR = Constant('for')
PAST = Constant('past')
ALL = [OF, AFTER, BEFORE, NEXT, IN, FOR, PAST]
class Method:
ABSOLUTE_PREPOSITIONS = MethodEnum('absolute_prepositions')
ABSOLUTE_DATE_FORMATS = MethodEnum('absolute_date_formats')
CONSTANTS = MethodEnum('constants')
CONSTANTS_RELATIVE_EXTENSIONS = MethodEnum('constants_relative_extensions')
DATETIME_DELTA_CONSTANTS = MethodEnum('datetime_delta_constants')
RELATIVE_DATETIMES = MethodEnum('relative_datetimes')
| 64.719355 | 140 | 0.648358 | 19,790 | 0.986393 | 0 | 0 | 420 | 0.020934 | 0 | 0 | 4,565 | 0.227533 |
b2f5acf01cacb2c9f5deb948c74813e3ef341bde | 2,295 | py | Python | ontospy/extras/hacks/server.py | michaelyryi/Ontospy | f1a18daa296285ea02a97d1331e94140e801edc4 | [
"MIT"
] | null | null | null | ontospy/extras/hacks/server.py | michaelyryi/Ontospy | f1a18daa296285ea02a97d1331e94140e801edc4 | [
"MIT"
] | null | null | null | ontospy/extras/hacks/server.py | michaelyryi/Ontospy | f1a18daa296285ea02a97d1331e94140e801edc4 | [
"MIT"
] | null | null | null | # !/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
UTILITY TO START A LOCAL SERVER
Copyright (c) 2015 __Michele Pasin__ <http://www.michelepasin.org>. All rights reserved.
Shows local repo within a server
"""
MODULE_VERSION = 0.1
USAGE = "@todo"
import time, optparse, os, rdflib, sys, webbrowser
import SimpleHTTPServer, SocketServer
from .. import main
from ..core.ontospy import Ontospy
from ..core.utils import *
DEFAULT_PORT = 7899
# in order to avoid waiting for a minute after restar
class NoBrokenServer(SocketServer.TCPServer):
"""
> utility class
solve the bug with server restart
http://stackoverflow.com/questions/10613977/a-simple-python-server-using-simplehttpserver-and-socketserver-how-do-i-close-t
"""
allow_reuse_address = True
def startServer(port=DEFAULT_PORT, location=None, openbrowser=True):
""" """
if location:
os.chdir(location)
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = NoBrokenServer(("", port), Handler)
if openbrowser:
webbrowser.open('http://127.0.0.1:' + str(port))
print("serving at port", port)
httpd.serve_forever()
def parse_options():
"""
parse_options() -> opts, args
Parse any command-line options given returning both
the parsed options and arguments.
https://docs.python.org/2/library/optparse.html
"""
parser = optparse.OptionParser(usage=USAGE, version=ontospy.VERSION)
parser.add_option("-p", "--port",
action="store", type="int", default=DEFAULT_PORT, dest="port",
help="A number specifying which port to use for the server.")
opts, args = parser.parse_args()
# if not opts.all and not opts.query:
# parser.print_help()
# sys.exit(0)
return opts, args
def main():
""" command line script """
# boilerplate
print("OntoSpy " + ontospy.VERSION)
ontospy.get_or_create_home_repo()
ONTOSPY_LOCAL_MODELS = ontospy.get_home_location()
opts, args = parse_options()
sTime = time.time()
# switch dir and start server
startServer(port=DEFAULT_PORT, location=ONTOSPY_LOCAL_MODELS)
# finally:
# print some stats....
eTime = time.time()
tTime = eTime - sTime
printDebug("-" * 10)
printDebug("Time: %0.2fs" % tTime)
if __name__ == '__main__':
# from .. import main
try:
main()
sys.exit(0)
except KeyboardInterrupt as e: # Ctrl-C
raise e
| 19.285714 | 124 | 0.712418 | 260 | 0.11329 | 0 | 0 | 0 | 0 | 0 | 0 | 1,005 | 0.437908 |
b2f71e9c0092918763ba4db64292488e285e5cbe | 14,866 | py | Python | python/dolfinx_contact/unbiased/nitsche_unbiased.py | jorgensd/asimov-contact | 08704ade6343c346bc54dfd38186983cc7ab4485 | [
"MIT"
] | null | null | null | python/dolfinx_contact/unbiased/nitsche_unbiased.py | jorgensd/asimov-contact | 08704ade6343c346bc54dfd38186983cc7ab4485 | [
"MIT"
] | null | null | null | python/dolfinx_contact/unbiased/nitsche_unbiased.py | jorgensd/asimov-contact | 08704ade6343c346bc54dfd38186983cc7ab4485 | [
"MIT"
] | null | null | null | # Copyright (C) 2021 Sarah Roggendorf
#
# SPDX-License-Identifier: MIT
from typing import Callable, Tuple, Union
import dolfinx.common as _common
import dolfinx.fem as _fem
import dolfinx.log as _log
import dolfinx.mesh as _mesh
import dolfinx_cuas
import numpy as np
import ufl
from dolfinx.cpp.graph import AdjacencyList_int32
from dolfinx.cpp.mesh import MeshTags_int32
from petsc4py import PETSc as _PETSc
import dolfinx_contact
import dolfinx_contact.cpp
from dolfinx_contact.helpers import (epsilon, lame_parameters,
rigid_motions_nullspace, sigma_func)
kt = dolfinx_contact.cpp.Kernel
__all__ = ["nitsche_unbiased"]
def nitsche_unbiased(mesh: _mesh.Mesh, mesh_tags: list[MeshTags_int32],
domain_marker: MeshTags_int32,
surfaces: AdjacencyList_int32,
dirichlet: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
neumann: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
contact_pairs: list[Tuple[int, int]],
body_forces: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
physical_parameters: dict[str, Union[bool, np.float64, int]],
nitsche_parameters: dict[str, np.float64],
quadrature_degree: int = 5, form_compiler_params: dict = None, jit_params: dict = None,
petsc_options: dict = None, newton_options: dict = None, initial_guess=None,
outfile: str = None, order: int = 1) -> Tuple[_fem.Function, int, int, float]:
"""
Use custom kernel to compute the contact problem with two elastic bodies coming into contact.
Parameters
==========
mesh
The input mesh
mesh_tags
A list of meshtags. The first element must contain the mesh_tags for all puppet surfaces,
Dirichlet-surfaces and Neumann-surfaces
All further elements may contain candidate_surfaces
domain_marker
marker for subdomains where a body force is applied
surfaces
Adjacency list. Links of i are meshtag values for contact surfaces in ith mesh_tag in mesh_tags
dirichlet
List of Dirichlet boundary conditions as pairs of (meshtag value, function), where function
is a function to be interpolated into the dolfinx function space
neumann
Same as dirichlet for Neumann boundary conditions
contact_pairs:
list of pairs (i, j) marking the ith surface as a puppet surface and the jth surface
as the corresponding candidate surface
physical_parameters
Optional dictionary with information about the linear elasticity problem.
Valid (key, value) tuples are: ('E': float), ('nu', float), ('strain', bool)
nitsche_parameters
Optional dictionary with information about the Nitsche configuration.
Valid (keu, value) tuples are: ('gamma', float), ('theta', float) where theta can be -1, 0 or 1 for
skew-symmetric, penalty like or symmetric enforcement of Nitsche conditions
displacement
The displacement enforced on Dirichlet boundary
quadrature_degree
The quadrature degree to use for the custom contact kernels
form_compiler_params
Parameters used in FFCX compilation of this form. Run `ffcx --help` at
the commandline to see all available options. Takes priority over all
other parameter values, except for `scalar_type` which is determined by
DOLFINX.
jit_params
Parameters used in CFFI JIT compilation of C code generated by FFCX.
See https://github.com/FEniCS/dolfinx/blob/main/python/dolfinx/jit.py
for all available parameters. Takes priority over all other parameter values.
petsc_options
Parameters that is passed to the linear algebra backend
PETSc. For available choices for the 'petsc_options' kwarg,
see the `PETSc-documentation
<https://petsc4py.readthedocs.io/en/stable/manual/ksp/>`
newton_options
Dictionary with Newton-solver options. Valid (key, item) tuples are:
("atol", float), ("rtol", float), ("convergence_criterion", "str"),
("max_it", int), ("error_on_nonconvergence", bool), ("relaxation_parameter", float)
initial_guess
A functon containing an intial guess to use for the Newton-solver
outfile
File to append solver summary
order
The order of mesh and function space
"""
form_compiler_params = {} if form_compiler_params is None else form_compiler_params
jit_params = {} if jit_params is None else jit_params
petsc_options = {} if petsc_options is None else petsc_options
newton_options = {} if newton_options is None else newton_options
strain = physical_parameters.get("strain")
if strain is None:
raise RuntimeError("Need to supply if problem is plane strain (True) or plane stress (False)")
else:
plane_strain = bool(strain)
_E = physical_parameters.get("E")
if _E is not None:
E = np.float64(_E)
else:
raise RuntimeError("Need to supply Youngs modulus")
if physical_parameters.get("nu") is None:
raise RuntimeError("Need to supply Poisson's ratio")
else:
nu = physical_parameters.get("nu")
# Compute lame parameters
mu_func, lambda_func = lame_parameters(plane_strain)
mu = mu_func(E, nu)
lmbda = lambda_func(E, nu)
sigma = sigma_func(mu, lmbda)
# Nitche parameters and variables
theta = nitsche_parameters.get("theta")
if theta is None:
raise RuntimeError("Need to supply theta for Nitsche imposition of boundary conditions")
_gamma = nitsche_parameters.get("gamma")
if _gamma is None:
raise RuntimeError("Need to supply Coercivity/Stabilization parameter for Nitsche condition")
else:
gamma: np.float64 = _gamma * E
lifting = nitsche_parameters.get("lift_bc", False)
# Functions space and FEM functions
V = _fem.VectorFunctionSpace(mesh, ("CG", order))
u = _fem.Function(V)
v = ufl.TestFunction(V)
du = ufl.TrialFunction(V)
h = ufl.CellDiameter(mesh)
n = ufl.FacetNormal(mesh)
# Integration measure and ufl part of linear/bilinear form
# metadata = {"quadrature_degree": quadrature_degree}
dx = ufl.Measure("dx", domain=mesh, subdomain_data=domain_marker)
ds = ufl.Measure("ds", domain=mesh, # metadata=metadata,
subdomain_data=mesh_tags[0])
J = ufl.inner(sigma(du), epsilon(v)) * dx
F = ufl.inner(sigma(u), epsilon(v)) * dx
for contact_pair in contact_pairs:
surface_value = int(surfaces.links(0)[contact_pair[0]])
J += - 0.5 * theta * h / gamma * ufl.inner(sigma(du) * n, sigma(v) * n) * \
ds(surface_value)
F += - 0.5 * theta * h / gamma * ufl.inner(sigma(u) * n, sigma(v) * n) * \
ds(surface_value)
# Dirichle boundary conditions
bcs = []
if lifting:
tdim = mesh.topology.dim
for bc in dirichlet:
facets = mesh_tags[0].find(bc[0])
cells = _mesh.compute_incident_entities(mesh, facets, tdim - 1, tdim)
u_bc = _fem.Function(V)
u_bc.interpolate(bc[1], cells)
u_bc.x.scatter_forward()
bcs.append(_fem.dirichletbc(u_bc, _fem.locate_dofs_topological(V, tdim - 1, facets)))
else:
for bc in dirichlet:
f = _fem.Function(V)
f.interpolate(bc[1])
F += - ufl.inner(sigma(u) * n, v) * ds(bc[0])\
- theta * ufl.inner(sigma(v) * n, u - f) * \
ds(bc[0]) + gamma / h * ufl.inner(u - f, v) * ds(bc[0])
J += - ufl.inner(sigma(du) * n, v) * ds(bc[0])\
- theta * ufl.inner(sigma(v) * n, du) * \
ds(bc[0]) + gamma / h * ufl.inner(du, v) * ds(bc[0])
# Neumann boundary conditions
for bc in neumann:
g = _fem.Function(V)
g.interpolate(bc[1])
F -= ufl.inner(g, v) * ds(bc[0])
# body forces
for bf in body_forces:
f = _fem.Function(V)
f.interpolate(bf[1])
F -= ufl.inner(f, v) * dx(bf[0])
# Custom assembly
# create contact class
with _common.Timer("~Contact: Init"):
contact = dolfinx_contact.cpp.Contact(mesh_tags, surfaces, contact_pairs,
V._cpp_object, quadrature_degree=quadrature_degree)
with _common.Timer("~Contact: Distance maps"):
for i in range(len(contact_pairs)):
contact.create_distance_map(i)
# pack constants
consts = np.array([gamma, theta])
# Pack material parameters mu and lambda on each contact surface
with _common.Timer("~Contact: Interpolate coeffs (mu, lmbda)"):
V2 = _fem.FunctionSpace(mesh, ("DG", 0))
lmbda2 = _fem.Function(V2)
lmbda2.interpolate(lambda x: np.full((1, x.shape[1]), lmbda))
mu2 = _fem.Function(V2)
mu2.interpolate(lambda x: np.full((1, x.shape[1]), mu))
entities = []
with _common.Timer("~Contact: Compute active entities"):
for pair in contact_pairs:
entities.append(contact.active_entities(pair[0]))
material = []
with _common.Timer("~Contact: Pack coeffs (mu, lmbda"):
for i in range(len(contact_pairs)):
material.append(dolfinx_cuas.pack_coefficients([mu2, lmbda2], entities[i]))
# Pack celldiameter on each surface
h_packed = []
with _common.Timer("~Contact: Compute and pack celldiameter"):
surface_cells = np.unique(np.hstack([entities[i][:, 0] for i in range(len(contact_pairs))]))
h_int = _fem.Function(V2)
expr = _fem.Expression(h, V2.element.interpolation_points)
h_int.interpolate(expr, surface_cells)
for i in range(len(contact_pairs)):
h_packed.append(dolfinx_cuas.pack_coefficients([h_int], entities[i]))
# Pack gap, normals and test functions on each surface
gaps = []
normals = []
test_fns = []
with _common.Timer("~Contact: Pack gap, normals, testfunction"):
for i in range(len(contact_pairs)):
gaps.append(contact.pack_gap(i))
normals.append(contact.pack_ny(i, gaps[i]))
test_fns.append(contact.pack_test_functions(i, gaps[i]))
# Concatenate all coeffs
coeffs_const = []
for i in range(len(contact_pairs)):
coeffs_const.append(np.hstack([material[i], h_packed[i], gaps[i], normals[i], test_fns[i]]))
# Generate Jacobian data structures
J_custom = _fem.form(J, form_compiler_params=form_compiler_params, jit_params=jit_params)
with _common.Timer("~Contact: Generate Jacobian kernel"):
kernel_jac = contact.generate_kernel(kt.Jac)
with _common.Timer("~Contact: Create matrix"):
J = contact.create_matrix(J_custom)
# Generate residual data structures
F_custom = _fem.form(F, form_compiler_params=form_compiler_params, jit_params=jit_params)
with _common.Timer("~Contact: Generate residual kernel"):
kernel_rhs = contact.generate_kernel(kt.Rhs)
with _common.Timer("~Contact: Create vector"):
b = _fem.petsc.create_vector(F_custom)
@_common.timed("~Contact: Update coefficients")
def compute_coefficients(x, coeffs):
u.vector[:] = x.array
u_candidate = []
with _common.Timer("~~Contact: Pack u contact"):
for i in range(len(contact_pairs)):
u_candidate.append(contact.pack_u_contact(i, u._cpp_object, gaps[i]))
u_puppet = []
with _common.Timer("~~Contact: Pack u"):
for i in range(len(contact_pairs)):
u_puppet.append(dolfinx_cuas.pack_coefficients([u], entities[i]))
for i in range(len(contact_pairs)):
c_0 = np.hstack([coeffs_const[i], u_puppet[i], u_candidate[i]])
coeffs[i][:, :] = c_0[:, :]
@_common.timed("~Contact: Assemble residual")
def compute_residual(x, b, coeffs):
b.zeroEntries()
with _common.Timer("~~Contact: Contact contributions (in assemble vector)"):
for i in range(len(contact_pairs)):
contact.assemble_vector(b, i, kernel_rhs, coeffs[i], consts)
with _common.Timer("~~Contact: Standard contributions (in assemble vector)"):
_fem.petsc.assemble_vector(b, F_custom)
# Apply boundary condition
if lifting:
_fem.petsc.apply_lifting(b, [J_custom], bcs=[bcs], x0=[x], scale=-1.0)
b.ghostUpdate(addv=_PETSc.InsertMode.ADD, mode=_PETSc.ScatterMode.REVERSE)
_fem.petsc.set_bc(b, bcs, x, -1.0)
@_common.timed("~Contact: Assemble matrix")
def compute_jacobian_matrix(x, A, coeffs):
A.zeroEntries()
with _common.Timer("~~Contact: Contact contributions (in assemble matrix)"):
for i in range(len(contact_pairs)):
contact.assemble_matrix(A, [], i, kernel_jac, coeffs[i], consts)
with _common.Timer("~~Contact: Standard contributions (in assemble matrix)"):
_fem.petsc.assemble_matrix(A, J_custom, bcs=bcs)
A.assemble()
# coefficient arrays
num_coeffs = contact.coefficients_size()
coeffs = np.array([np.zeros((len(entities[i]), num_coeffs)) for i in range(len(contact_pairs))])
newton_solver = dolfinx_contact.NewtonSolver(mesh.comm, J, b, coeffs)
# Set matrix-vector computations
newton_solver.set_residual(compute_residual)
newton_solver.set_jacobian(compute_jacobian_matrix)
newton_solver.set_coefficients(compute_coefficients)
# Set rigid motion nullspace
null_space = rigid_motions_nullspace(V)
newton_solver.A.setNearNullSpace(null_space)
# Set Newton solver options
newton_solver.set_newton_options(newton_options)
# Set initial guess
if initial_guess is None:
u.x.array[:] = 0
else:
u.x.array[:] = initial_guess.x.array[:]
# Set Krylov solver options
newton_solver.set_krylov_options(petsc_options)
dofs_global = V.dofmap.index_map_bs * V.dofmap.index_map.size_global
_log.set_log_level(_log.LogLevel.OFF)
# Solve non-linear problem
timing_str = f"~Contact: {id(dofs_global)} Solve Nitsche"
with _common.Timer(timing_str):
n, converged = newton_solver.solve(u)
if outfile is not None:
viewer = _PETSc.Viewer().createASCII(outfile, "a")
newton_solver.krylov_solver.view(viewer)
newton_time = _common.timing(timing_str)
if not converged:
raise RuntimeError("Newton solver did not converge")
u.x.scatter_forward()
print(f"{dofs_global}\n Number of Newton iterations: {n:d}\n",
f"Number of Krylov iterations {newton_solver.krylov_iterations}\n", flush=True)
return u, n, newton_solver.krylov_iterations, newton_time[1]
| 42.965318 | 108 | 0.654245 | 0 | 0 | 0 | 0 | 1,921 | 0.129221 | 0 | 0 | 5,039 | 0.338961 |
b2f7f4cc70879c961d4345ed522c0b9c510c8bf6 | 5,218 | py | Python | scratch/movielens-mongodb.py | crcsmnky/movielens-data-exports | f316f1367abef80a1abce64d3adb3bd3effc6365 | [
"Apache-2.0"
] | 1 | 2022-02-01T19:44:36.000Z | 2022-02-01T19:44:36.000Z | scratch/movielens-mongodb.py | crcsmnky/movielens-data-exports | f316f1367abef80a1abce64d3adb3bd3effc6365 | [
"Apache-2.0"
] | null | null | null | scratch/movielens-mongodb.py | crcsmnky/movielens-data-exports | f316f1367abef80a1abce64d3adb3bd3effc6365 | [
"Apache-2.0"
] | null | null | null | """
usage: python movielens-mongodb.py [movies] [ratings] [links]
"""
import sys
import re
import csv
import os
# import tmdbsimple as tmdb
from pymongo import MongoClient
from pymongo import ASCENDING, DESCENDING
from datetime import datetime
from time import sleep
def import_movies(db, mfile):
movies = []
mcsv = csv.DictReader(mfile)
for row in mcsv:
movie = {
'movieid': int(row['movieId']),
'title': row['title'].split(' (')[0],
'year': row['title'].split(' (')[-1][:-1],
'genres': row['genres'].split('|')
}
movies.append(movie)
if (len(movies) % 1000) == 0:
# print count, "movies inserted"
db.command('insert', 'movies', documents=movies, ordered=False)
movies = []
if len(movies) > 0:
db.command('insert', 'movies', documents=movies, ordered=False)
def import_ratings(db, rfile):
count = 0
ratings, movies, users = [], [], []
rcsv = csv.DictReader(rfile)
for row in rcsv:
rating = {
'movieid': int(row['movieId']),
'userid': int(row['userId']),
'rating': float(row['rating']),
'ts': datetime.fromtimestamp(float(row['timestamp']))
}
ratings.append(rating)
movie_update = {
'q': { 'movieid': int(row['movieId']) },
'u': { '$inc': {
'ratings' : 1,
'total_rating': float(row['rating'])
}
}
}
movies.append(movie_update)
user_update = {
'q': { 'userid' : int(row['userId']) },
'u': { '$inc': { 'ratings': 1 } },
'upsert': True
}
users.append(user_update)
count += 1
if (count % 1000) == 0:
# print count, "ratings inserted, movies updated, users updated"
db.command('insert', 'ratings', documents=ratings, ordered=False)
db.command('update', 'movies', updates=movies, ordered=False)
db.command('update', 'users', updates=users, ordered=False)
ratings, movies, users = [], [], []
if count > 0:
db.command('insert', 'ratings', documents=ratings, ordered=False)
db.command('update', 'movies', updates=movies, ordered=False)
db.command('update', 'users', updates=users, ordered=False)
def import_links(db, lfile):
count = 0
movies = []
lcsv = csv.DictReader(lfile)
for row in lcsv:
try:
movies.append({
'q': {'movieid': int(row['movieId'])},
'u': { '$set': {
'imdb': row['imdbId'],
'tmdb': row['tmdbId']
}}
})
count += 1
except:
continue
if (count % 1000) == 0:
db.command('update', 'movies', updates=movies, ordered=False)
movies = []
if count > 0:
db.command('update', 'movies', updates=movies, ordered=False)
def create_genres(db):
docs = list(db.movies.aggregate([
{'$unwind' : '$genres'},
{'$group': {
'_id': '$genres',
'count': {'$sum': 1}
}},
], cursor={}))
genres = [
{'_id': idx, 'name': doc['_id'], 'count': doc['count']}
for idx, doc in enumerate(docs)
]
db.command('insert', 'genres', documents=genres, ordered=False)
def update_avg_ratings(db):
movies = db.movies.find()
for m in movies:
try:
db.movies.update_one({'_id': m['_id']}, {'$set': {'avg_rating': float(m['total_rating'])/m['ratings']}})
except:
continue
def get_poster_links(db):
tmdb.API_KEY='[YOUR API KEY HERE]'
conf = tmdb.Configuration()
imgurl = conf.info()['images']['base_url'] + 'w154' + '{path}'
allmovies = db.movies.find()
for i in xrange(0, allmovies.count(), 40):
print i
for j in xrange(i, i+40):
try:
movie = tmdb.Movies(int(allmovies[j]['tmdb'])).info()
db.movies.update_one(
{'_id': allmovies[j]['_id']},
{'$set': {'poster': imgurl.format(path=movie['poster_path'])}}
)
except:
continue
sleep(10)
def ensure_indexes(db):
db.movies.ensure_index("movieid")
db.movies.ensure_index("ratings")
db.movies.ensure_index("genres")
db.ratings.ensure_index([("userid", ASCENDING),("movieid", ASCENDING)])
db.users.ensure_index("userid")
db.genres.ensure_index("name")
def main():
host=os.environ.get('MONGODB_HOST', 'localhost')
port=os.environ.get('MONGODB_PORT', 27017)
database=os.environ.get('MONGODB_DB', 'movieweb')
db = MongoClient(host, port)[database]
with open(sys.argv[1]) as mfile:
import_movies(db, mfile)
with open(sys.argv[2]) as rfile:
import_ratings(db, rfile)
with open(sys.argv[3]) as lfile:
import_links(db, lfile)
create_genres(db)
update_avg_ratings(db)
get_poster_links(db)
# ensure_indexes(db)
if __name__ == '__main__':
main()
| 26.622449 | 116 | 0.526639 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,049 | 0.201035 |
b2f7fb8b602bfdd673abb75e7f89ca8dc32301c9 | 1,400 | py | Python | coretabs ATM py/withdraw.py | attia7/Test | c74f09816ba2e0798b0533e31ea8b72249dec598 | [
"MIT"
] | null | null | null | coretabs ATM py/withdraw.py | attia7/Test | c74f09816ba2e0798b0533e31ea8b72249dec598 | [
"MIT"
] | 11 | 2020-03-24T17:40:26.000Z | 2022-01-13T01:42:38.000Z | coretabs ATM py/withdraw.py | attia7/AttiaGit | c74f09816ba2e0798b0533e31ea8b72249dec598 | [
"MIT"
] | null | null | null | balance = 700
papers=[100, 50, 10, 5,4,3,2,1]
def withdraw(balance, request):
if balance < request :
print('Sorry, you are try withdraw: {0}, but Your balance just : {1}'.format(request, balance))
else:
print ('your balance >>', balance)
orgnal_request = request
while request > 0:
for i in papers:
while request >= i:
print('give', i)
request-=i
balance -= orgnal_request
return balance
def withdraw1(balance, request):
give = 0
if balance < request :
print('Sorry, you are try withdraw: {0}, but Your balance just : {1}'.format(request, balance))
else:
print ('your balance >>', balance)
balance -= request
while request > 0:
if request >= 100:
give = 100
elif request >= 50:
give = 50
elif request >= 10:
give = 10
elif request >= 5:
give = 5
else :
give = request
print('give',give)
request -= give
return balance
balance = withdraw(balance, 777)
balance = withdraw(balance, 276)
balance = withdraw1(balance, 276)
balance = withdraw(balance, 34)
balance = withdraw1(balance, 5)
balance = withdraw1(balance, 500) | 30.434783 | 103 | 0.512857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 172 | 0.122857 |
b2f8b3e8d1857a6e5f87e42cb97fafb1e51c9432 | 1,126 | py | Python | dynamicform/widgets.py | cdgagne/django-dynamicform | f60c549d01c6c091addaf0b4121367d7a1d917f0 | [
"MIT"
] | null | null | null | dynamicform/widgets.py | cdgagne/django-dynamicform | f60c549d01c6c091addaf0b4121367d7a1d917f0 | [
"MIT"
] | null | null | null | dynamicform/widgets.py | cdgagne/django-dynamicform | f60c549d01c6c091addaf0b4121367d7a1d917f0 | [
"MIT"
] | null | null | null | from django import forms
from django.forms.utils import flatatt
from django.utils import formats
from django.utils.encoding import force_text
from django.utils.html import format_html
class AjaxValidatingTextInput(forms.TextInput):
def __init__(self, *args, **kwargs):
super(AjaxValidatingTextInput, self).__init__(*args, **kwargs)
self.attrs = {'class': 'ajax-validate'}
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_text(self._format_value(value))
input_html = format_html('<input{} />', flatatt(final_attrs))
error_div_attrs = {
'id': 'form_error_%s' % final_attrs['id']
}
error_div = '<div><span class="error-container label label-danger"{}></span></div>'
error_div_html = format_html(error_div, flatatt(error_div_attrs))
return '%s%s' % (input_html, error_div_html)
| 41.703704 | 91 | 0.652753 | 939 | 0.833925 | 0 | 0 | 0 | 0 | 0 | 0 | 203 | 0.180284 |
b2fa232a245de5b9da5a3e07a8eb834b4df0cb1b | 1,406 | py | Python | ocdskingfisherprocess/maindatabase/migrations/versions/8e3f80979dc9_change_unique_constraint_on_collection.py | matiasSanabria/kingfisher-process | 88cb768aaa562714c8bd53e05717639faf041501 | [
"BSD-3-Clause"
] | 1 | 2019-04-11T10:17:32.000Z | 2019-04-11T10:17:32.000Z | ocdskingfisherprocess/maindatabase/migrations/versions/8e3f80979dc9_change_unique_constraint_on_collection.py | matiasSanabria/kingfisher-process | 88cb768aaa562714c8bd53e05717639faf041501 | [
"BSD-3-Clause"
] | 282 | 2018-12-20T16:49:22.000Z | 2022-02-01T00:48:10.000Z | ocdskingfisherprocess/maindatabase/migrations/versions/8e3f80979dc9_change_unique_constraint_on_collection.py | matiasSanabria/kingfisher-process | 88cb768aaa562714c8bd53e05717639faf041501 | [
"BSD-3-Clause"
] | 7 | 2019-04-15T13:36:18.000Z | 2021-03-02T16:25:41.000Z | """Change unique constraint on collection
Revision ID: 8e3f80979dc9
Revises: 3d5fae27a215
Create Date: 2019-12-18 13:14:56.466907
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '8e3f80979dc9'
down_revision = '3d5fae27a215'
branch_labels = None
depends_on = None
def upgrade():
"""
SELECT source_id, data_version, sample, COUNT(*) FROM collection
WHERE transform_type IS NULL or transform_type = ''
GROUP BY source_id, data_version, sample
HAVING COUNT(*) > 1;
"""
# 0 rows
op.drop_constraint('unique_collection_identifiers', 'collection')
op.create_index('unique_collection_identifiers', 'collection', ['source_id', 'data_version', 'sample'],
unique=True, postgresql_where=sa.text("transform_type = ''"))
op.execute("UPDATE collection SET transform_type = '' WHERE transform_type IS NULL")
op.alter_column('collection', 'transform_type', nullable=False)
def downgrade():
op.drop_index('unique_collection_identifiers', 'collection')
op.create_unique_constraint('unique_collection_identifiers', 'collection', [
'source_id', 'data_version', 'sample', 'transform_from_collection_id', 'transform_type',
])
op.alter_column('collection', 'transform_type', nullable=True)
op.execute("UPDATE collection SET transform_type = NULL WHERE transform_type = ''")
| 32.697674 | 107 | 0.72404 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 921 | 0.65505 |
b2fa730dbd73b043c294390890c9c13d76abf7ce | 1,685 | py | Python | tests/stdlib_test.py | misantroop/jsonpickle | 97f4a05ccffe8593458b4b787c3fc97622f23cec | [
"BSD-3-Clause"
] | null | null | null | tests/stdlib_test.py | misantroop/jsonpickle | 97f4a05ccffe8593458b4b787c3fc97622f23cec | [
"BSD-3-Clause"
] | 1 | 2019-04-03T20:19:40.000Z | 2019-04-03T20:19:40.000Z | tests/stdlib_test.py | parsons-kyle-89/jsonpickle | 2828dd4a247bbae9d37a3d78194caaaeadeb2ed2 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Test miscellaneous objects from the standard library"""
import uuid
import unittest
import jsonpickle
class UUIDTestCase(unittest.TestCase):
def test_random_uuid(self):
u = uuid.uuid4()
encoded = jsonpickle.encode(u)
decoded = jsonpickle.decode(encoded)
expect = u.hex
actual = decoded.hex
self.assertEqual(expect, actual)
def test_known_uuid(self):
expect = '28b56adbd18f44e2a5556bba2f23e6f6'
exemplar = uuid.UUID(expect)
encoded = jsonpickle.encode(exemplar)
decoded = jsonpickle.decode(encoded)
actual = decoded.hex
self.assertEqual(expect, actual)
class BytesTestCase(unittest.TestCase):
def test_bytestream(self):
expect = (b'\x89HDF\r\n\x1a\n\x00\x00\x00\x00\x00\x08\x08\x00'
b'\x04\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xffh'
b'\x848\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff'
b'\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00`\x00\x00'
b'\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x88\x00\x00\x00\x00\x00\x00\x00\xa8\x02\x00'
b'\x00\x00\x00\x00\x00\x01\x00\x01\x00')
encoded = jsonpickle.encode(expect)
actual = jsonpickle.decode(encoded)
self.assertEqual(expect, actual)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(UUIDTestCase))
suite.addTest(unittest.makeSuite(BytesTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 30.089286 | 70 | 0.629674 | 1,310 | 0.777448 | 0 | 0 | 0 | 0 | 0 | 0 | 532 | 0.315727 |
b2fb2e91c6a7d4ac7cee9919699e52b9663ccb72 | 3,435 | py | Python | app/core.py | JoonyoungYi/lol-recommend | b92efff858e491b68c902abb6de31212c688b47e | [
"Apache-2.0"
] | null | null | null | app/core.py | JoonyoungYi/lol-recommend | b92efff858e491b68c902abb6de31212c688b47e | [
"Apache-2.0"
] | null | null | null | app/core.py | JoonyoungYi/lol-recommend | b92efff858e491b68c902abb6de31212c688b47e | [
"Apache-2.0"
] | null | null | null | import os
import time
import tensorflow as tf
import numpy as np
import pandas as pd
from .configs import *
from .models import init_models
EPOCH_NUMBER = 10000
EARLY_STOP = True
EARLY_STOP_MAX_ITER = 40
def _train(session, saver, models, train_data, valid_data):
model_file_path = _init_model_file_path()
prev_valid_rmse = float("Inf")
early_stop_iters = 0
for epoch in range(EPOCH_NUMBER):
if models['train_op']:
_, train_rmse = session.run(
[models['train_op'], models['rmse']],
feed_dict={
models['u']: train_data['user_id'],
models['i']: train_data['item_id'],
models['r']: train_data['rating'],
models['c']: train_data['confidence'],
})
else:
train_rmse = float("NaN")
_, valid_rmse, mu = session.run(
[models['loss'], models['rmse'], models['mu']],
feed_dict={
models['u']: valid_data['user_id'],
models['i']: valid_data['item_id'],
models['r']: valid_data['rating'],
models['c']: valid_data['confidence'],
})
# print(mu)
# if epoch % 10 == 0:
print('>> EPOCH:', "{:3d}".format(epoch), "{:3f}, {:3f}".format(
train_rmse, valid_rmse))
if EARLY_STOP:
early_stop_iters += 1
if valid_rmse < prev_valid_rmse:
prev_valid_rmse = valid_rmse
early_stop_iters = 0
saver.save(session, model_file_path)
elif early_stop_iters >= EARLY_STOP_MAX_ITER:
print("Early stopping ({} vs. {})...".format(
prev_valid_rmse, valid_rmse))
break
else:
saver.save(session, model_file_path)
return model_file_path
def _test(session, models, valid_data, test_data):
valid_rmse = session.run(
[models['rmse']],
feed_dict={
models['u']: valid_data['user_id'],
models['i']: valid_data['item_id'],
models['r']: valid_data['rating'],
models['c']: valid_data['confidence'],
})
test_rmse = session.run(
[models['rmse']],
feed_dict={
models['u']: test_data['user_id'],
models['i']: test_data['item_id'],
models['r']: test_data['rating'],
models['c']: test_data['confidence'],
})
print("Final valid RMSE: {}, test RMSE: {}".format(valid_rmse, test_rmse))
return valid_rmse, test_rmse
def _init_model_file_path():
folder_path = 'logs/{}'.format(int(time.time() * 1000))
if not os.path.exists(folder_path):
os.mkdir(folder_path)
return os.path.join(folder_path, 'model.ckpt')
def main(data):
K = 1
print("rank", K)
lambda_value = 0.1
N, M = 560200, 140
models = init_models(N, M, K, lambda_value)
saver = tf.train.Saver()
with tf.Session() as session:
session.run(tf.global_variables_initializer())
model_file_path = _train(session, saver, models, data['train'],
data['valid'])
print('Loading best checkpointed model')
saver.restore(session, model_file_path)
valid_rmse, test_rmse = _test(session, models, data['valid'],
data['test'])
| 31.227273 | 78 | 0.546725 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 483 | 0.140611 |
b2fce30e886d32040df251037d8a8ded7ce043ca | 3,817 | py | Python | aito/client/requests/query_api_request.py | AitoDotAI/aito-python-tools | 891d433222b04f4ff8a4eeafbb9268516fd215dc | [
"MIT"
] | 6 | 2019-10-16T02:35:06.000Z | 2021-02-03T13:39:43.000Z | aito/client/requests/query_api_request.py | AitoDotAI/aito-python-tools | 891d433222b04f4ff8a4eeafbb9268516fd215dc | [
"MIT"
] | 23 | 2020-03-17T13:16:02.000Z | 2021-04-23T15:09:51.000Z | aito/client/requests/query_api_request.py | AitoDotAI/aito-python-tools | 891d433222b04f4ff8a4eeafbb9268516fd215dc | [
"MIT"
] | null | null | null | """Aito `Query API <https://aito.ai/docs/api/#query-api>`__ Request Class"""
import re
from abc import ABC
from typing import Dict, Optional, Union, List
from .aito_request import AitoRequest, _PatternEndpoint, _PostRequest
from ..responses import SearchResponse, PredictResponse, RecommendResponse, EvaluateResponse, SimilarityResponse, \
MatchResponse, RelateResponse, HitsResponse
class QueryAPIRequest(_PostRequest, _PatternEndpoint, AitoRequest, ABC):
"""Request to a `Query API <https://aito.ai/docs/api/#query-api>`__
"""
#: the Query API path
path: str = None # get around of not having abstract class attribute
_query_api_paths = ['_search', '_predict', '_recommend', '_evaluate', '_similarity', '_match', '_relate', '_query']
def __init__(self, query: Dict):
"""
:param query: an Aito query if applicable, optional
:type query: Dict
"""
if self.path is None:
raise NotImplementedError(f'The API path must be implemented')
endpoint = self._endpoint_from_path(self.path)
super().__init__(method=self.method, endpoint=endpoint, query=query)
@classmethod
def _endpoint_pattern(cls):
return re.compile(f"^{cls._api_version_endpoint_prefix}/({'|'.join(cls._query_api_paths)})$")
@classmethod
def make_request(cls, method: str, endpoint: str, query: Optional[Union[Dict, List]]) -> 'AitoRequest':
for sub_cls in cls.__subclasses__():
if method == sub_cls.method and endpoint == QueryAPIRequest._endpoint_from_path(sub_cls.path):
return sub_cls(query=query)
raise ValueError(f"invalid {cls.__name__} with '{method}({endpoint})'")
@classmethod
def _endpoint_from_path(cls, path: str):
"""return the query api endpoint from the Query API path"""
if path not in cls._query_api_paths:
raise ValueError(f"path must be one of {'|'.join(cls._query_api_paths)}")
return f'{cls._api_version_endpoint_prefix}/{path}'
class SearchRequest(QueryAPIRequest):
"""Request to the `Search API <https://aito.ai/docs/api/#post-api-v1-search>`__"""
#: the Query API path
path: str = '_search'
response_cls = SearchResponse
class PredictRequest(QueryAPIRequest):
"""Request to the `Predict API <https://aito.ai/docs/api/#post-api-v1-predict>`__"""
#: the Query API path
path: str = '_predict'
#: the class of the response for this request class
response_cls = PredictResponse
class RecommendRequest(QueryAPIRequest):
"""Request to the `Recommend API <https://aito.ai/docs/api/#post-api-v1-recommend>`__"""
#: the Query API path
path: str = '_recommend'
response_cls = RecommendResponse
class EvaluateRequest(QueryAPIRequest):
"""Request to the `Evaluate API <https://aito.ai/docs/api/#post-api-v1-evaluate>`__"""
#: the Query API path
path: str = '_evaluate'
response_cls = EvaluateResponse
class SimilarityRequest(QueryAPIRequest):
"""Request to the `Similarity API <https://aito.ai/docs/api/#post-api-v1-similarity>`__"""
#: the Query API path
path: str = '_similarity'
response_cls = SimilarityResponse
class MatchRequest(QueryAPIRequest):
"""Request to the `Match query <https://aito.ai/docs/api/#post-api-v1-match>`__"""
#: the Query API path
path: str = '_match'
response_cls = MatchResponse
class RelateRequest(QueryAPIRequest):
"""Request to the `Relate API <https://aito.ai/docs/api/#post-api-v1-relate>`__"""
#: the Query API path
path: str = '_relate'
response_cls = RelateResponse
class GenericQueryRequest(QueryAPIRequest):
"""Request to the `Generic Query API <https://aito.ai/docs/api/#post-api-v1-query>`__"""
path: str = '_query'
response_cls = HitsResponse | 37.058252 | 119 | 0.687451 | 3,401 | 0.891014 | 0 | 0 | 858 | 0.224784 | 0 | 0 | 1,698 | 0.444852 |
b2fcf5fc344b109dbc4a9623cb76fea9e977c60b | 3,417 | py | Python | tests/test_domain.py | shyams2/pychebfun | 0c1efee54829457b9e1b0d6c34259af6c002e105 | [
"BSD-3-Clause"
] | 72 | 2015-02-23T17:08:38.000Z | 2022-02-09T18:17:08.000Z | tests/test_domain.py | shyams2/pychebfun | 0c1efee54829457b9e1b0d6c34259af6c002e105 | [
"BSD-3-Clause"
] | 6 | 2015-01-19T14:12:23.000Z | 2021-11-05T08:16:27.000Z | tests/test_domain.py | shyams2/pychebfun | 0c1efee54829457b9e1b0d6c34259af6c002e105 | [
"BSD-3-Clause"
] | 14 | 2015-07-31T00:08:36.000Z | 2021-02-01T22:20:41.000Z | #!/usr/bin/env python
# coding: UTF-8
from __future__ import division
from pychebfun import Chebfun
import operator
import unittest
import pytest
from . import tools
import numpy as np
import numpy.testing as npt
#------------------------------------------------------------------------------
# Unit test for arbitrary interval Chebfuns
#------------------------------------------------------------------------------
class TestDomain(unittest.TestCase):
def test_mismatch(self):
c1 = Chebfun.identity()
c2 = Chebfun.from_function(lambda x:x, domain=[2,3])
for op in [operator.add, operator.sub, operator.mul, operator.truediv]:
with self.assertRaises(Chebfun.DomainMismatch):
op(c1, c2)
def test_restrict(self):
x = Chebfun.identity()
with self.assertRaises(ValueError):
x.restrict([-2,0])
with self.assertRaises(ValueError):
x.restrict([0,2])
@pytest.mark.parametrize("ufunc", tools.ufunc_list, ids=tools.name_func)
def test_init(ufunc):
xx = Chebfun.from_function(lambda x: x,[0.25,0.75])
ff = ufunc(xx)
assert isinstance(ff, Chebfun)
result = ff.values()
expected = ufunc(ff._ui_to_ab(ff.p.xi))
npt.assert_allclose(result, expected)
#------------------------------------------------------------------------------
# Test the restrict operator
#------------------------------------------------------------------------------
from . import data
@pytest.mark.parametrize('ff', [Chebfun.from_function(tools.f,[-3,4])])
@pytest.mark.parametrize('domain', data.IntervalTestData.domains)
def test_restrict(ff, domain):
ff_ = ff.restrict(domain)
xx = tools.map_ui_ab(tools.xs, domain[0],domain[1])
tools.assert_close(tools.f, ff_, xx)
#------------------------------------------------------------------------------
# Add the arbitrary interval tests
#------------------------------------------------------------------------------
@pytest.fixture(params=list(range(5)))
def tdata(request):
index = request.param
class TData(): pass
tdata = TData()
tdata.function = data.IntervalTestData.functions[0]
tdata.function_d = data.IntervalTestData.first_derivs[0]
tdata.domain = data.IntervalTestData.domains[index]
tdata.roots = data.IntervalTestData.roots[0][index]
tdata.integral = data.IntervalTestData.integrals[0][index]
tdata.chebfun = Chebfun.from_function(tdata.function, tdata.domain)
return tdata
class TestArbitraryIntervals(object):
"""Test the various operations for Chebfun on arbitrary intervals"""
def test_evaluation(self, tdata):
xx = tools.map_ui_ab(tools.xs, tdata.domain[0], tdata.domain[1])
tools.assert_close(tdata.chebfun, tdata.function, xx)
def test_domain(self, tdata):
assert tdata.chebfun._domain[0] == tdata.domain[0]
assert tdata.chebfun._domain[1] == tdata.domain[1]
def test_first_deriv(self, tdata):
xx = tools.map_ui_ab(tools.xs, tdata.domain[0], tdata.domain[1])
tools.assert_close(tdata.chebfun.differentiate(), tdata.function_d, xx)
def test_definite_integral(self, tdata):
actual = tdata.integral
npt.assert_allclose(tdata.chebfun.sum(), actual, rtol=1e-12)
def test_roots(self, tdata):
actual = tdata.roots
npt.assert_allclose(np.sort(tdata.chebfun.roots()), actual, rtol=1e-12)
| 33.831683 | 79 | 0.59643 | 1,474 | 0.431373 | 0 | 0 | 1,119 | 0.32748 | 0 | 0 | 702 | 0.205443 |
b2fd53048e194cb59b5b4420a7e50d932868c531 | 1,878 | py | Python | glance/rpc/common.py | Quinton/glance | 7674bc8963a3bec21f719c48f40e8a3fc0846e6f | [
"Apache-2.0"
] | 5 | 2017-04-23T05:50:36.000Z | 2019-03-12T09:45:20.000Z | glance/rpc/common.py | Quinton/glance | 7674bc8963a3bec21f719c48f40e8a3fc0846e6f | [
"Apache-2.0"
] | null | null | null | glance/rpc/common.py | Quinton/glance | 7674bc8963a3bec21f719c48f40e8a3fc0846e6f | [
"Apache-2.0"
] | 2 | 2018-08-16T11:41:18.000Z | 2018-10-21T06:56:50.000Z | #!/usr/bin/env python
#encode=utf-8
#vim: tabstop=4 shiftwidth=4 softtabstop=4
#Created on 2013-8-17
#Copyright 2013 nuoqingyun xuqifeng
import copy
import logging
import traceback
class RPCException(Exception):
message = "An Unknown RPC related exception occurred"
def __init__(self, message = None, **kwargs):
self.kwargs = kwargs
if not message:
try:
message = self.message % kwargs
except Exception as e:
message = self.message
super(RPCException, self).__init__(message)
class RemoteError(RPCException):
message = "Remote error: %(exc_type)s %(value)s\n%(traceback)s"
def __init__(self, exc_type = None, value = None, traceback = None):
self.exc_type = exc_type
self.value = value
self.traceback = traceback
super(RemoteError, self).__init__(exc_type = exc_type,
value = value,
traceback = traceback)
class Timeout(RPCException):
"""
"""
message = "Timeout while waiting on RPC response"
class InvalidRPCConnectionReuse(RPCException):
message = "Invalid reuse of an RPC Connection"
class Connection(object):
def close(self):
raise NotImplementedError()
def create_consumer(self, topic, proxy, fanout = False):
raise NotImplementedError()
def create_worker(self, topic, proxy, pool_name):
raise NotImplementedError()
def consumer_in_thread(self):
raise NotImplementedError()
def _sage_log(log_func, mes, msg_data):
"""
"""
pass
def serialize_remote_exception(failure_info):
"""
"""
pass
def deserialize_remote_exception(conf, data):
"""
"""
pass
| 22.357143 | 73 | 0.596912 | 1,428 | 0.760383 | 0 | 0 | 0 | 0 | 0 | 0 | 356 | 0.189563 |
b2fdab74611ca607c1a5e2e63e4ac639ef552870 | 12,029 | py | Python | tests/test_simple.py | ImportTaste/WebRequest | 0cc385622624de16ec980e0c12d9080d593cab74 | [
"WTFPL"
] | 8 | 2018-06-04T09:34:28.000Z | 2021-09-16T15:21:24.000Z | tests/test_simple.py | ImportTaste/WebRequest | 0cc385622624de16ec980e0c12d9080d593cab74 | [
"WTFPL"
] | 4 | 2018-03-03T07:45:27.000Z | 2019-12-26T20:38:18.000Z | tests/test_simple.py | ImportTaste/WebRequest | 0cc385622624de16ec980e0c12d9080d593cab74 | [
"WTFPL"
] | 1 | 2019-12-26T20:36:32.000Z | 2019-12-26T20:36:32.000Z | import unittest
import socket
import json
import base64
import zlib
import gzip
import bs4
from http.server import BaseHTTPRequestHandler, HTTPServer
from threading import Thread
import WebRequest
from . import testing_server
class TestPlainCreation(unittest.TestCase):
def test_plain_instantiation_1(self):
wg = WebRequest.WebGetRobust()
self.assertTrue(wg is not None)
def test_plain_instantiation_2(self):
wg = WebRequest.WebGetRobust(cloudflare=True)
self.assertTrue(wg is not None)
def test_plain_instantiation_3(self):
wg = WebRequest.WebGetRobust(use_socks=True)
self.assertTrue(wg is not None)
class TestSimpleFetch(unittest.TestCase):
def setUp(self):
self.wg = WebRequest.WebGetRobust()
# Configure mock server.
self.mock_server_port, self.mock_server, self.mock_server_thread = testing_server.start_server(self, self.wg)
def tearDown(self):
self.mock_server.shutdown()
self.mock_server_thread.join()
self.wg = None
def test_fetch_1(self):
page = self.wg.getpage("http://localhost:{}".format(self.mock_server_port))
self.assertEqual(page, 'Root OK?')
def test_fetch_decode_1(self):
# text/html content should be decoded automatically.
page = self.wg.getpage("http://localhost:{}/html-decode".format(self.mock_server_port))
self.assertEqual(page, 'Root OK?')
def test_fetch_soup_1(self):
# text/html content should be decoded automatically.
page = self.wg.getSoup("http://localhost:{}/html/real".format(self.mock_server_port))
self.assertEqual(page, bs4.BeautifulSoup('<html><body>Root OK?</body></html>', 'lxml'))
def test_fetch_soup_2(self):
page = self.wg.getSoup("http://localhost:{}/html-decode".format(self.mock_server_port))
self.assertEqual(page, bs4.BeautifulSoup('<html><body><p>Root OK?</p></body></html>', 'lxml'))
def test_fetch_soup_3(self):
# getSoup fails to fetch content that's not of content-type text/html
with self.assertRaises(WebRequest.ContentTypeError):
self.wg.getSoup("http://localhost:{}/binary_ctnt".format(self.mock_server_port))
def test_fetch_decode_json(self):
# text/html content should be decoded automatically.
page = self.wg.getJson("http://localhost:{}/json/valid".format(self.mock_server_port))
self.assertEqual(page, {'oh': 'hai'})
page = self.wg.getJson("http://localhost:{}/json/no-coding".format(self.mock_server_port))
self.assertEqual(page, {'oh': 'hai'})
with self.assertRaises(json.decoder.JSONDecodeError):
page = self.wg.getJson("http://localhost:{}/json/invalid".format(self.mock_server_port))
def test_fetch_compressed(self):
page = self.wg.getpage("http://localhost:{}/compressed/gzip".format(self.mock_server_port))
self.assertEqual(page, 'Root OK?')
page = self.wg.getpage("http://localhost:{}/compressed/deflate".format(self.mock_server_port))
self.assertEqual(page, 'Root OK?')
def test_file_and_name_1(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename/path-only.txt".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'path-only.txt')
def test_file_and_name_2(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename/content-disposition".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'lolercoaster.txt')
def test_file_and_name_3(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename_mime/content-disposition-quotes-1".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'lolercoaster.html')
def test_file_and_name_4(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename_mime/content-disposition-quotes-2".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'lolercoaster.html')
def test_file_and_name_5(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename_mime/content-disposition-quotes-spaces-1".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'loler coaster.html')
def test_file_and_name_6(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename_mime/content-disposition-quotes-spaces-2".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'loler coaster.html')
def test_file_and_name_7(self):
page, fn = self.wg.getFileAndName(requestedUrl="http://localhost:{}/filename_mime/content-disposition-quotes-spaces-2".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'loler coaster.html')
def test_file_and_name_8(self):
page, fn = self.wg.getFileAndName(requestedUrl="http://localhost:{}/filename_mime/content-disposition-quotes-spaces-2".format(self.mock_server_port), addlHeaders={"Referer" : 'http://www.example.org'})
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'loler coaster.html')
def test_file_and_name_9(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename_mime/content-disposition-quotes-spaces-2".format(self.mock_server_port), addlHeaders={"Referer" : 'http://www.example.org'})
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'loler coaster.html')
def test_file_and_name_10(self):
page, fn = self.wg.getFileAndName("http://localhost:{}/filename/path-only-trailing-slash/".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, '')
def test_file_name_mime_1(self):
page, fn, mimet = self.wg.getFileNameMime(
"http://localhost:{}/filename_mime/path-only.txt".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'path-only.txt')
self.assertEqual(mimet, 'text/plain')
def test_file_name_mime_2(self):
page, fn, mimet = self.wg.getFileNameMime(
"http://localhost:{}/filename_mime/content-disposition".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'lolercoaster.txt')
self.assertEqual(mimet, 'text/plain')
def test_file_name_mime_3(self):
page, fn, mimet = self.wg.getFileNameMime(
"http://localhost:{}/filename_mime/content-disposition-html-suffix".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, 'lolercoaster.html')
self.assertEqual(mimet, 'text/plain')
def test_file_name_mime_5(self):
page, fn, mimet = self.wg.getFileNameMime(
"http://localhost:{}/filename/path-only-trailing-slash/".format(self.mock_server_port))
self.assertEqual(page, b'LOLWAT?')
self.assertEqual(fn, '')
self.assertEqual(mimet, 'text/plain')
def test_file_name_mime_4(self):
page, fn, mimet = self.wg.getFileNameMime(
"http://localhost:{}/filename_mime/explicit-html-mime".format(self.mock_server_port))
self.assertEqual(page, 'LOLWAT?')
self.assertEqual(fn, 'lolercoaster.html')
self.assertEqual(mimet, 'text/html')
def test_get_head_1(self):
inurl_1 = "http://localhost:{}".format(self.mock_server_port)
nurl_1 = self.wg.getHead(inurl_1)
self.assertEqual(inurl_1, nurl_1)
def test_get_head_2(self):
inurl_2 = "http://localhost:{}/filename_mime/content-disposition".format(self.mock_server_port)
nurl_2 = self.wg.getHead(inurl_2)
self.assertEqual(inurl_2, nurl_2)
def test_redirect_handling_1(self):
inurl_1 = "http://localhost:{}/redirect/from-1".format(self.mock_server_port)
ctnt_1 = self.wg.getpage(inurl_1)
self.assertEqual(ctnt_1, b"Redirect-To-1")
def test_redirect_handling_2(self):
inurl_2 = "http://localhost:{}/redirect/from-2".format(self.mock_server_port)
ctnt_2 = self.wg.getpage(inurl_2)
self.assertEqual(ctnt_2, b"Redirect-To-2")
def test_redirect_handling_3(self):
inurl_3 = "http://localhost:{}/redirect/from-1".format(self.mock_server_port)
outurl_3 = "http://localhost:{}/redirect/to-1".format(self.mock_server_port)
nurl_3 = self.wg.getHead(inurl_3)
self.assertEqual(outurl_3, nurl_3)
def test_redirect_handling_4(self):
inurl_4 = "http://localhost:{}/redirect/from-2".format(self.mock_server_port)
outurl_4 = "http://localhost:{}/redirect/to-2".format(self.mock_server_port)
nurl_4 = self.wg.getHead(inurl_4)
self.assertEqual(outurl_4, nurl_4)
def test_redirect_handling_5(self):
# This is a redirect without the actual redirect
with self.assertRaises(WebRequest.FetchFailureError):
inurl_5 = "http://localhost:{}/redirect/bad-1".format(self.mock_server_port)
self.wg.getHead(inurl_5)
def test_redirect_handling_6(self):
# This is a infinitely recursive redirect.
with self.assertRaises(WebRequest.FetchFailureError):
inurl_6 = "http://localhost:{}/redirect/bad-2".format(self.mock_server_port)
self.wg.getHead(inurl_6)
def test_redirect_handling_7(self):
# This is a infinitely recursive redirect.
with self.assertRaises(WebRequest.FetchFailureError):
inurl_6 = "http://localhost:{}/redirect/bad-3".format(self.mock_server_port)
self.wg.getHead(inurl_6)
def test_redirect_handling_8(self):
inurl_7 = "http://localhost:{}/redirect/from-3".format(self.mock_server_port)
# Assumes localhost seems to resolve to the listening address (here it's 0.0.0.0). Is this ever not true? IPv6?
outurl_7 = "http://0.0.0.0:{}/".format(self.mock_server_port)
nurl_7 = self.wg.getHead(inurl_7)
self.assertEqual(outurl_7, nurl_7)
# For the auth tests, we have to restart the test-server with the wg that's configured for password management
def test_http_auth_1(self):
self.mock_server.shutdown()
self.mock_server_thread.join()
self.wg = None
new_port_1 = testing_server.get_free_port()
wg_1 = WebRequest.WebGetRobust(creds=[("localhost:{}".format(new_port_1), "lol", "wat")])
# Configure mock server.
new_port_1, self.mock_server, self.mock_server_thread = testing_server.start_server(self, wg_1, port_override=new_port_1)
page = wg_1.getpage("http://localhost:{}/password/expect".format(new_port_1))
self.assertEqual(page, b'Password Ok?')
def test_http_auth_2(self):
self.mock_server.shutdown()
self.mock_server_thread.join()
self.wg = None
new_port_2 = testing_server.get_free_port()
wg_2 = WebRequest.WebGetRobust(creds=[("localhost:{}".format(new_port_2), "lol", "nope")])
# Configure mock server.
new_port_2, self.mock_server, self.mock_server_thread = testing_server.start_server(self, wg_2, port_override=new_port_2)
page = wg_2.getpage("http://localhost:{}/password/expect".format(new_port_2))
self.assertEqual(page, b'Password Bad!')
def test_get_item_1(self):
inurl_1 = "http://localhost:{}".format(self.mock_server_port)
content_1, fileN_1, mType_1 = self.wg.getItem(inurl_1)
self.assertEqual(content_1, 'Root OK?')
self.assertEqual(fileN_1, '')
self.assertEqual(mType_1, "text/html")
def test_get_item_2(self):
inurl_2 = "http://localhost:{}/filename_mime/content-disposition".format(self.mock_server_port)
content_2, fileN_2, mType_2 = self.wg.getItem(inurl_2)
# Lack of an explicit mimetype makes this not get decoded
self.assertEqual(content_2, b'LOLWAT?')
self.assertEqual(fileN_2, 'lolercoaster.txt')
self.assertEqual(mType_2, None)
def test_get_item_3(self):
inurl_3 = "http://localhost:{}/filename/path-only.txt".format(self.mock_server_port)
content_3, fileN_3, mType_3 = self.wg.getItem(inurl_3)
self.assertEqual(content_3, b'LOLWAT?')
self.assertEqual(fileN_3, 'path-only.txt')
self.assertEqual(mType_3, None)
def test_get_cookies_1(self):
inurl_1 = "http://localhost:{}/cookie_test".format(self.mock_server_port)
inurl_2 = "http://localhost:{}/cookie_require".format(self.mock_server_port)
self.wg.clearCookies()
cookies = self.wg.getCookies()
self.assertEqual(list(cookies), [])
page_resp_nocook = self.wg.getpage(inurl_2)
self.assertEqual(page_resp_nocook, '<html><body>Cookie is missing</body></html>')
_ = self.wg.getpage(inurl_1)
cookies = self.wg.getCookies()
print(cookies)
page_resp_cook = self.wg.getpage(inurl_2)
self.assertEqual(page_resp_cook, '<html><body>Cookie forwarded properly!</body></html>')
| 39.569079 | 203 | 0.750104 | 11,792 | 0.980298 | 0 | 0 | 0 | 0 | 0 | 0 | 3,629 | 0.301688 |
b2fdbd3d23a1257e8c2fb2f6d739b834e644b93d | 5,220 | py | Python | cplcom/moa/device/mcdaq.py | matham/cplcom | 54b1dc8445ff97bab248418d861354beb7c4e656 | [
"MIT"
] | null | null | null | cplcom/moa/device/mcdaq.py | matham/cplcom | 54b1dc8445ff97bab248418d861354beb7c4e656 | [
"MIT"
] | null | null | null | cplcom/moa/device/mcdaq.py | matham/cplcom | 54b1dc8445ff97bab248418d861354beb7c4e656 | [
"MIT"
] | null | null | null | '''Barst Measurement Computing DAQ Wrapper
==========================================
'''
from functools import partial
from pybarst.mcdaq import MCDAQChannel
from kivy.properties import NumericProperty, ObjectProperty
from moa.threads import ScheduledEventLoop
from moa.device.digital import ButtonViewPort
from cplcom.moa.device import DeviceExceptionBehavior
__all__ = ('MCDAQDevice', )
class MCDAQDevice(DeviceExceptionBehavior, ButtonViewPort, ScheduledEventLoop):
'''A :class:`moa.device.digital.ButtonViewPort` wrapper around a
:class:`pybarst.mcdaq.MCDAQChannel` instance which controls a Switch
and Sense 8/8.
For this class, :class:`moa.device.digital.ButtonViewPort.dev_map` must be
provided upon creation and it's a dict whose keys are the property names
and whose values are the Switch and Sense 8/8 channel numbers that the
property controls.
E.g. for a light switch connected to channel 3 on the Switch and Sense
8/8 output port define the class::
class MyMCDAQDevice(MCDAQDevice):
light = BooleanProperty(False)
And then create the instance with::
dev = MyMCDAQDevice(dev_map={'light': 3})
And then we can set it high by calling e.g.::
dev.set_state(high=['light'])
For an input devices it can defined similarly and the state of the property
reflects the value of the port. A switch and sense which has both a input
and output device still needs to create two device for each.
'''
__settings_attrs__ = ('SAS_chan', )
_read_event = None
def _write_callback(self, value, mask, result):
self.timestamp = result
for idx, name in self.chan_dev_map.iteritems():
if mask & (1 << idx):
setattr(self, name, bool(value & (1 << idx)))
self.dispatch('on_data_update', self)
def _read_callback(self, result, **kwargs):
t, val = result
self.timestamp = t
for idx, name in self.chan_dev_map.iteritems():
setattr(self, name, bool(val & (1 << idx)))
self.dispatch('on_data_update', self)
def set_state(self, high=[], low=[], **kwargs):
if self.activation != 'active':
raise TypeError('Can only set state of an active device. Device '
'is currently "{}"'.format(self.activation))
if 'o' not in self.direction:
raise TypeError('Cannot write state for a input device')
dev_map = self.dev_map
mask = 0
val = 0
for name in high:
idx = dev_map[name]
val |= (1 << idx)
mask |= (1 << idx)
for name in low:
mask |= (1 << dev_map[name])
self.request_callback(
self.chan.write, callback=partial(self._write_callback, val, mask),
mask=mask, value=val)
def get_state(self):
if self.activation != 'active':
raise TypeError('Can only read state of an active device. Device '
'is currently "{}"'.format(self.activation))
if 'i' in self.direction: # happens anyway
return
self._read_event = self.request_callback(
self.chan.read, callback=self._read_callback)
def activate(self, *largs, **kwargs):
kwargs['state'] = 'activating'
if not super(MCDAQDevice, self).activate(*largs, **kwargs):
return False
self.start_thread()
self.chan = MCDAQChannel(chan=self.SAS_chan, server=self.server.server)
def finish_activate(*largs):
self.activation = 'active'
if 'i' in self.direction:
self._read_event = self.request_callback(
self.chan.read, repeat=True, callback=self._read_callback)
self.request_callback(self._start_channel, finish_activate)
return True
def _start_channel(self):
chan = self.chan
chan.open_channel()
if 'o' in self.direction:
chan.write(mask=0xFF, value=0)
def deactivate(self, *largs, **kwargs):
kwargs['state'] = 'deactivating'
if not super(MCDAQDevice, self).deactivate(*largs, **kwargs):
return False
self.remove_request(self.chan.read, self._read_event)
self._read_event = None
def finish_deactivate(*largs):
self.activation = 'inactive'
self.stop_thread()
self.request_callback(self._stop_channel, finish_deactivate)
return True
def _stop_channel(self):
if 'o' in self.direction:
self.chan.write(mask=0xFF, value=0)
if 'i' in self.direction and self.chan.continuous:
self.chan.cancel_read(flush=True)
chan = ObjectProperty(None)
'''The internal :class:`pybarst.mcdaq.MCDAQChannel` instance.
It is read only and is automatically created.
'''
server = ObjectProperty(None, allownone=True)
'''The internal barst :class:`pybarst.core.server.BarstServer`. It
must be provided to the instance.
'''
SAS_chan = NumericProperty(0)
'''The channel number of the Switch & Sense 8/8 as configured in InstaCal.
Defaults to zero.
'''
| 35.033557 | 79 | 0.627203 | 4,824 | 0.924138 | 0 | 0 | 0 | 0 | 0 | 0 | 1,807 | 0.346169 |
b2fdd34a89c4f597f4f4706f3635728cd6c36c6a | 2,827 | py | Python | train_utils.py | BatyrM/QL-Net | b245aadeb106810d075064137f26d773b2dbd679 | [
"MIT"
] | null | null | null | train_utils.py | BatyrM/QL-Net | b245aadeb106810d075064137f26d773b2dbd679 | [
"MIT"
] | null | null | null | train_utils.py | BatyrM/QL-Net | b245aadeb106810d075064137f26d773b2dbd679 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).sum(0)
res.append(100*correct_k/batch_size)
return res
def adjust_learning_rate(lr, optimizer, epoch):
"""Sets the learning rate to the initial LR decayed by 10 after 3 and 6 epochs"""
lr = lr * (0.1 ** (epoch // 6))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def train(net,trainloader, testloader, num_epoch, lr, device):
criterion = nn.CrossEntropyLoss() #
optimizer = optim.SGD(net.parameters(), lr=lr, momentum=0.9, weight_decay=5e-4)
best = 0.0
for epoch in range(num_epoch): # loop over the dataset multiple times
net.train()
adjust_learning_rate(lr, optimizer, epoch)
running_loss = 0.0
for i, (inputs, labels) in enumerate(trainloader, 0):
inputs = inputs.to(device)
labels = labels.to(device)
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = net(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
prec1 = accuracy(outputs.data, labels, topk=(1,))[0]
if i % 30 == 0: # print every 2 mini-batches
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tAccuracy: {:.2f}'.format(
epoch, i * len(inputs), len(trainloader.dataset),
100. * i / len(trainloader), loss.item(), prec1.item()))
print("----- Validation ----------")
score = test(net, testloader, device)
if score > best:
print("Saving model")
torch.save(net.state_dict(), 'mnist_baseline.pth')
best = score
print("---------------------------")
print('Finished Training')
return net
def test(net, testloader, device):
net.eval()
correct = 0.0
total = 0.0
i = 0.0
for (images, labels) in testloader:
images, labels = images.to(device), labels.to(device)
with torch.no_grad():
outputs = net(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
i=i+1.0
accuracy = 100.0 * correct.item() / total
print('Accuracy of the network on the 10000 test images: %.2f %%' % (accuracy))
return accuracy
| 36.714286 | 97 | 0.573753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 510 | 0.180403 |
b2feb55d6f844492c6231b317cce3362c8ea498f | 69 | py | Python | Bronze/Bronze_V/17496.py | masterTyper/baekjoon_solved_ac | b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c | [
"MIT"
] | null | null | null | Bronze/Bronze_V/17496.py | masterTyper/baekjoon_solved_ac | b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c | [
"MIT"
] | null | null | null | Bronze/Bronze_V/17496.py | masterTyper/baekjoon_solved_ac | b9ce14d9bdaa5b5b06735ad075fb827de9f44b9c | [
"MIT"
] | null | null | null | N, T, C, P = map(int, input().split())
print(((N - 1) // T) * C * P) | 23 | 38 | 0.434783 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
b2ffff75cff848e9cc4d8a6143bf4d9bf43e64d3 | 5,702 | py | Python | sapy_script/SAP.py | fkfouri/sapy_script | 476041288367e2098b955bc2377f442ce503e822 | [
"MIT"
] | 3 | 2018-12-03T15:51:54.000Z | 2020-11-20T01:05:39.000Z | sapy_script/SAP.py | whrocha/sapy_script | 476041288367e2098b955bc2377f442ce503e822 | [
"MIT"
] | null | null | null | sapy_script/SAP.py | whrocha/sapy_script | 476041288367e2098b955bc2377f442ce503e822 | [
"MIT"
] | 3 | 2018-07-28T21:53:32.000Z | 2018-08-22T13:51:17.000Z | from multiprocessing import Pool, Manager
from time import sleep
from wmi import WMI
from win32com.client import GetObject
from subprocess import Popen
from collections import Iterable
from tqdm import tqdm
from os import getpid
from sapy_script.Session import Session
session_process = None
all_processes_id = []
def _on_init(sid, p_ids):
p_ids.append(getpid())
global session_process
app = SAP.app()
i = 0
while True:
con = app.Children(i)
if con.Children(0).Info.systemsessionid == sid:
session = con.Children(p_ids.index(getpid()))
session_process = Session(session)
break
i = i + 1
def _task_executor(task):
task['func'](task['data'])
class SAP:
def __init__(self, max_sessions=16):
self._con = None
self._tasks = []
self.max_sessions = max_sessions
self.session = lambda i=0: Session(self._con.Children(i))
@staticmethod
def app():
"""Open SAPGui"""
wmi_obj = WMI()
sap_exists = len(wmi_obj.Win32_Process(name='saplgpad.exe')) > 0
if not sap_exists:
Popen(['C:\Program Files (x86)\SAP\FrontEnd\SAPgui\saplgpad.exe'])
while True:
try:
#temp = GetObject("SAPGUI").GetScriptingEngine
#temp.Change("teste 456", "", "", "", "", ".\LocalSystem", "")
#objService.Change(,, , , , , ".\LocalSystem", "")
return GetObject("SAPGUI").GetScriptingEngine
except:
sleep(1)
pass
def connect(self, environment, client=None, user=None, password=None, lang=None, force=False):
con = SAP.app().OpenConnection(environment, True)
session = Session(con.Children(0))
if client is not None:
session.findById("wnd[0]/usr/txtRSYST-MANDT").Text = client
if user is not None:
session.findById("wnd[0]/usr/txtRSYST-BNAME").Text = user
if password is not None:
session.findById("wnd[0]/usr/pwdRSYST-BCODE").Text = password
if lang is not None:
session.findById("wnd[0]/usr/txtRSYST-LANGU").Text = lang
session.findById("wnd[0]").sendVKey(0)
# Eventual tela de mudanca de senha
change_pwd = False
try:
session.findById("wnd[1]/usr/pwdRSYST-NCODE").text = ''
session.findById("wnd[1]/usr/pwdRSYST-NCOD2").text = ''
change_pwd = True
except:
pass
if change_pwd:
raise ValueError('Please, set a new Password')
# Derruba conexão SAP
if force:
try:
session.findById("wnd[1]/usr/radMULTI_LOGON_OPT1").select()
session.findById("wnd[1]/tbar[0]/btn[0]").press()
except:
pass
else:
try:
session.findById("wnd[1]/usr/radMULTI_LOGON_OPT1").select()
session.findById("wnd[1]").sendVKey(12)
return False
except:
pass
# Teste da Conexao
if session.is_connected():
self._con = con
return True
self._con = None
return False
@property
def connected(self):
return self.session().is_connected()
@staticmethod
def session():
global session_process
return session_process
def sid(self):
return self.session().Info.systemsessionid
def logout(self):
session = self.session()
session.findById("wnd[0]/tbar[0]/okcd").text = "/nex"
session.findById("wnd[0]").sendVKey(0)
del session
self._con = None
@property
def number_of_sessions(self):
return 0 if self._con is None else len(self._con.Children)
@number_of_sessions.setter
def number_of_sessions(self, value):
size = self.number_of_sessions
if size == 0:
return
value = min(max(int(value), 1), self.max_sessions)
minus = value < size
arr = list(range(size, value))
arr.extend(reversed(range(value, size)))
for i in arr:
if minus:
session = self.session(i)
session.findById("wnd[0]/tbar[0]/okcd").text = "/i"
session.findById("wnd[0]").sendVKey(0)
else:
self.session().createSession()
sleep(0.5)
def clear_tasks(self):
self._tasks = []
def add_task(self, func, data):
for dt in data:
self._tasks.append({'func': func, 'data': dt})
def execute_tasks(self, resize_sessions=False):
total = len(self._tasks)
if total == 0:
return
if resize_sessions:
self.number_of_sessions = total
size = self.number_of_sessions
if size == 0:
return
sess_manager = Manager().list([])
pool = Pool(processes=self.number_of_sessions, initializer=_on_init, initargs=(self.sid(), sess_manager))
response = list(tqdm(pool.imap_unordered(_task_executor, self._tasks)))
pool.close()
pool.join()
return list(response)
def execute_function(self, func, data, resize_sessions=False):
if not isinstance(data, Iterable):
data = [data]
self.clear_tasks()
self.add_task(func=func, data=data)
response = self.execute_tasks(resize_sessions=resize_sessions)
self.clear_tasks()
return response
@staticmethod
def multi_arguments(func):
def convert_args(pr):
return func(**pr)
return convert_args
| 28.368159 | 113 | 0.573834 | 4,970 | 0.871471 | 0 | 0 | 1,677 | 0.294056 | 0 | 0 | 718 | 0.125899 |
65004332cb733aa8aa9fc8e64faf35799f2ce289 | 2,158 | py | Python | shepherd/blueprints/editor/__init__.py | Systemetric/shepherd | 28473503130cddd2c40702240f3deaad3a21e52b | [
"BSD-2-Clause"
] | null | null | null | shepherd/blueprints/editor/__init__.py | Systemetric/shepherd | 28473503130cddd2c40702240f3deaad3a21e52b | [
"BSD-2-Clause"
] | 8 | 2017-12-13T15:27:52.000Z | 2019-01-27T21:35:14.000Z | shepherd/blueprints/editor/__init__.py | Systemetric/shepherd | 28473503130cddd2c40702240f3deaad3a21e52b | [
"BSD-2-Clause"
] | null | null | null | import json
import os
import os.path as path
import re
from flask import Blueprint, request
blueprint = Blueprint("editor", __name__)
robotsrc_path = path.join(os.getcwd(), "robotsrc")
if not path.exists(robotsrc_path):
os.mkdir(robotsrc_path)
main_path = path.join(robotsrc_path, 'main.py')
main_file = open(main_path, 'w')
main_file.write('# DO NOT DELETE\n')
main_file.close()
blocks_path = path.join(robotsrc_path, 'blocks.json')
@blueprint.route('/')
def get_files():
project_paths = [f for f in os.listdir(robotsrc_path)
if path.isfile(path.join(robotsrc_path, f))
and (f.endswith('.py') or f.endswith(".xml") or f == "blocks.json")
and f != 'main.py']
def read_project(project_path):
with open(path.join(robotsrc_path, project_path), 'r') as project_file:
content = project_file.read()
return {
'filename': project_path,
'content': content
}
blocks = {}
if path.exists(blocks_path):
with open(blocks_path, 'r') as blocks_file:#
try:
blocks = json.load(blocks_file)
except ValueError:
pass
if "requires" not in blocks:
blocks["requires"] = []
if "header" not in blocks:
blocks["header"] = ""
if "footer" not in blocks:
blocks["footer"] = ""
if "blocks" not in blocks:
blocks["blocks"] = []
return json.dumps({
'main': main_path,
'blocks': blocks,
'projects': list(map(read_project, project_paths))
})
@blueprint.route("/save/<string:filename>", methods=["POST"])
def save_file(filename):
dots = len(re.findall("\.", filename))
if dots == 1:
with open(path.join(robotsrc_path, filename), 'w') as f:
f.write(request.data.decode('utf-8'))
return ""
@blueprint.route("/delete/<string:filename>", methods=["DELETE"])
def delete_file(filename):
if filename == "blocks.json":
return ""
dots = len(re.findall("\.", filename))
if dots == 1:
os.unlink(path.join(robotsrc_path, filename))
return ""
| 28.394737 | 88 | 0.596386 | 0 | 0 | 0 | 0 | 1,708 | 0.791474 | 0 | 0 | 323 | 0.149676 |
6501e436cf727b0f646b61fcf716e2f64d47d65c | 1,131 | py | Python | hai_tests/test_event_emitter.py | valohai/hai | f49c4eae2eb74b1738699e32b4b2aeb0f4d922dd | [
"MIT"
] | 2 | 2018-10-03T11:13:06.000Z | 2020-08-07T12:44:22.000Z | hai_tests/test_event_emitter.py | valohai/hai | f49c4eae2eb74b1738699e32b4b2aeb0f4d922dd | [
"MIT"
] | 16 | 2018-02-07T11:08:53.000Z | 2021-11-26T09:21:57.000Z | hai_tests/test_event_emitter.py | valohai/hai | f49c4eae2eb74b1738699e32b4b2aeb0f4d922dd | [
"MIT"
] | null | null | null | import pytest
from hai.event_emitter import EventEmitter
class Thing(EventEmitter):
event_types = {'one', 'two'}
@pytest.mark.parametrize('omni', (False, True))
def test_event_emitter(omni):
t = Thing()
events = []
def handle(sender, **args):
assert sender is t
events.append(args)
if omni:
t.on('*', handle)
else:
t.on('one', handle)
t.emit('one')
t.emit('two')
t.off('one', handle)
t.emit('one', {'oh': 'no'})
if omni:
assert events == [
{'event': 'one'},
{'event': 'two'},
{'event': 'one', 'oh': 'no'},
]
else:
assert events == [
{'event': 'one'},
]
def test_event_emitter_exceptions():
t = Thing()
def handle(**args):
raise OSError('oh no')
t.on('*', handle)
t.emit('one')
with pytest.raises(IOError):
t.emit('one', quiet=False)
def test_event_emitter_unknown_event_types():
t = Thing()
with pytest.raises(ValueError):
t.on('hullo', None)
with pytest.raises(ValueError):
t.emit('hello')
| 18.85 | 47 | 0.528736 | 59 | 0.052166 | 0 | 0 | 598 | 0.528736 | 0 | 0 | 142 | 0.125553 |
6502f4ca30fdd305a49eeefeb8dc2c19d45c0e83 | 2,598 | py | Python | dit/divergences/tests/test_jensen_shannon_divergence.py | chebee7i/dit | 59626e34c7938fddeec140522dd2a592ba4f42ef | [
"BSD-2-Clause"
] | null | null | null | dit/divergences/tests/test_jensen_shannon_divergence.py | chebee7i/dit | 59626e34c7938fddeec140522dd2a592ba4f42ef | [
"BSD-2-Clause"
] | null | null | null | dit/divergences/tests/test_jensen_shannon_divergence.py | chebee7i/dit | 59626e34c7938fddeec140522dd2a592ba4f42ef | [
"BSD-2-Clause"
] | null | null | null | """
Tests for dit.divergences.jensen_shannon_divergence.
"""
from nose.tools import assert_almost_equal, assert_raises
from dit import Distribution
from dit.exceptions import ditException
from dit.divergences.jensen_shannon_divergence import (
jensen_shannon_divergence as JSD,
jensen_shannon_divergence_pmf as JSD_pmf
)
def test_jsd0():
""" Test the JSD of a distribution but with weights misspecified."""
d1 = Distribution("AB", [0.5, 0.5])
assert_raises(ditException, JSD, d1, d1)
def test_jsd1():
""" Test the JSD of a distribution with itself """
d1 = Distribution("AB", [0.5, 0.5])
jsd = JSD([d1, d1])
assert_almost_equal(jsd, 0)
def test_jsd2():
""" Test the JSD with half-overlapping distributions """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("BC", [0.5, 0.5])
jsd = JSD([d1, d2])
assert_almost_equal(jsd, 0.5)
def test_jsd3():
""" Test the JSD with disjoint distributions """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("CD", [0.5, 0.5])
jsd = JSD([d1, d2])
assert_almost_equal(jsd, 1.0)
def test_jsd4():
""" Test the JSD with half-overlapping distributions with weights """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("BC", [0.5, 0.5])
jsd = JSD([d1, d2], [0.25, 0.75])
assert_almost_equal(jsd, 0.40563906222956625)
def test_jsd5():
""" Test that JSD fails when more weights than dists are given """
d1 = Distribution("AB", [0.5, 0.5])
d2 = Distribution("BC", [0.5, 0.5])
assert_raises(ditException, JSD, [d1, d2], [0.1, 0.6, 0.3])
def test_jsd_pmf1():
""" Test the JSD of a distribution with itself """
d1 = [0.5, 0.5]
jsd = JSD_pmf([d1, d1])
assert_almost_equal(jsd, 0)
def test_jsd_pmf2():
""" Test the JSD with half-overlapping distributions """
d1 = [0.5, 0.5, 0.0]
d2 = [0.0, 0.5, 0.5]
jsd = JSD_pmf([d1, d2])
assert_almost_equal(jsd, 0.5)
def test_jsd_pmf3():
""" Test the JSD with disjoint distributions """
d1 = [0.5, 0.5, 0.0, 0.0]
d2 = [0.0, 0.0, 0.5, 0.5]
jsd = JSD_pmf([d1, d2])
assert_almost_equal(jsd, 1.0)
def test_jsd_pmf4():
""" Test the JSD with half-overlapping distributions with weights """
d1 = [0.5, 0.5, 0.0]
d2 = [0.0, 0.5, 0.5]
jsd = JSD_pmf([d1, d2], [0.25, 0.75])
assert_almost_equal(jsd, 0.40563906222956625)
def test_jsd_pmf5():
""" Test that JSD fails when more weights than dists are given """
d1 = [0.5, 0.5, 0.0]
d2 = [0.0, 0.5, 0.5]
assert_raises(ditException, JSD_pmf, [d1, d2], [0.1, 0.6, 0.2, 0.1])
| 30.928571 | 73 | 0.624326 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 746 | 0.287144 |
65036c3303afa1e8b9728043f619a82fc6c9e04f | 261 | py | Python | demo/test_scans.py | zhanwj/multi-task-pytorch | 7d57645ec8be0ca0c258cfa99fb788e3cd37f106 | [
"MIT"
] | 2 | 2019-06-11T16:16:11.000Z | 2020-07-21T10:34:40.000Z | demo/test_scans.py | zhanwj/multi-task-pytorch | 7d57645ec8be0ca0c258cfa99fb788e3cd37f106 | [
"MIT"
] | null | null | null | demo/test_scans.py | zhanwj/multi-task-pytorch | 7d57645ec8be0ca0c258cfa99fb788e3cd37f106 | [
"MIT"
] | 2 | 2019-05-21T11:07:29.000Z | 2019-06-11T16:17:02.000Z | import torch
maxdisp = 10
disp_scans = torch.arange(maxdisp).view(1,maxdisp,1,1)
zeros_scans = torch.arange(maxdisp).view(1,maxdisp,1,1)
semseg = torch.arange(16).view(4,4)
zeros_scans = torch.cat([zeros_scans.repeat(1, repeat,1, 1) for i in range(1)],dim=0)
| 32.625 | 85 | 0.731801 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
65054cfd998e3e6858fee00ff01c36b5dddea1ff | 383 | py | Python | models.py | collingreen/yaib_plugin_leavemessage | c1e7254edee5255167c2015ee2566f9770b35412 | [
"MIT"
] | null | null | null | models.py | collingreen/yaib_plugin_leavemessage | c1e7254edee5255167c2015ee2566f9770b35412 | [
"MIT"
] | 1 | 2015-06-06T06:28:45.000Z | 2015-06-06T06:28:45.000Z | models.py | collingreen/yaib_plugin_leavemessage | c1e7254edee5255167c2015ee2566f9770b35412 | [
"MIT"
] | null | null | null | from sqlalchemy import Table, Column, String, DateTime, Text
from modules.persistence import Base, getModelBase
CustomBase = getModelBase('leavemessage')
class Message(Base, CustomBase):
user = Column(String(200))
nick = Column(String(100))
message_time = Column(DateTime)
to_nick = Column(String(100))
channel = Column(String(50))
message = Column(Text)
| 25.533333 | 60 | 0.723238 | 224 | 0.584856 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.036554 |
65055cb608c13b9ada0803b4c98f800c169f7118 | 1,490 | py | Python | data/raw_data/test.py | orion-orion/Cloze_Test | 021e550e4323d17832f992b9cd7000552b568bc8 | [
"MIT"
] | 1 | 2020-02-13T11:13:09.000Z | 2020-02-13T11:13:09.000Z | data/raw_data/test.py | lonelyprince7/cloze_test | 021e550e4323d17832f992b9cd7000552b568bc8 | [
"MIT"
] | 1 | 2020-02-08T06:34:19.000Z | 2020-02-12T13:02:19.000Z | data/raw_data/test.py | lonelyprince7/cloze_test | 021e550e4323d17832f992b9cd7000552b568bc8 | [
"MIT"
] | 1 | 2020-02-13T06:31:17.000Z | 2020-02-13T06:31:17.000Z | '''
Descripttion:
Version: 1.0
Author: ZhangHongYu
Date: 2022-02-05 18:23:00
LastEditors: ZhangHongYu
LastEditTime: 2022-05-17 16:26:12
'''
import os
import sys
import json
import argparse
from transformers import AlbertTokenizer
from pytorch_pretrained_bert import BertTokenizer, BertForMaskedLM
file_path = sys.argv[1]
#bert_model = BertForMaskedLM.from_pretrained('/data/jianghao/ralbert-cloth/model/albert-xxlarge-v2/pytorch_model.bin')
PAD, MASK, CLS, SEP = '[PAD]', '[MASK]', '[CLS]', '[SEP]'
bert_tokenizer = AlbertTokenizer.from_pretrained('albert-xxlarge-v2')
max=-1
cnt=0
tot=0
for file in os.listdir(file_path):
if file.endswith(".json"):
with open(os.path.join(file_path,file),'r') as f:
dict = json.load(f)
sentences=dict['article'].split('.')
str=""
for sentence in sentences:
sentence=sentence.replace('_','[MASK]')
tokens = bert_tokenizer.tokenize(sentence)
if len(tokens) == 0:
continue
if tokens[0] != CLS:
tokens = [CLS] + tokens
if tokens[-1] != SEP:
tokens.append(SEP)
str = ''.join(tokens)
# print(str)
# print('完了')
tot=tot+1
if len(str)>max:
max=len(str)
if len(str)>512:
cnt=cnt+1
#os.system("rm "+os.path.join(file_path,file))
print(cnt/tot)
| 31.041667 | 119 | 0.573826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 419 | 0.280455 |
6506db5995970d6837a7164f54f13ccdaecfc008 | 1,651 | py | Python | steampipe_alchemy/models/aws_route53_resolver_rule.py | RyanJarv/steampipe_alchemy | c8a31303252c1bd8d83d0f9c429d7d0ef7e1690f | [
"BSD-3-Clause"
] | 9 | 2021-04-21T04:21:01.000Z | 2021-06-19T19:33:36.000Z | steampipe_alchemy/models/aws_route53_resolver_rule.py | RyanJarv/steampipe_alchemy | c8a31303252c1bd8d83d0f9c429d7d0ef7e1690f | [
"BSD-3-Clause"
] | null | null | null | steampipe_alchemy/models/aws_route53_resolver_rule.py | RyanJarv/steampipe_alchemy | c8a31303252c1bd8d83d0f9c429d7d0ef7e1690f | [
"BSD-3-Clause"
] | 1 | 2021-04-26T21:08:20.000Z | 2021-04-26T21:08:20.000Z | from sqlalchemy import Column
from sqlalchemy.types import JSON, Text, Boolean, TIMESTAMP, BigInteger
from sqlalchemy.dialects import postgresql as psql
from steampipe_alchemy.mixins import FormatMixins
from steampipe_alchemy import Base
class AwsRoute53ResolverRule(Base, FormatMixins):
__tablename__ = 'aws_route53_resolver_rule'
tags = Column('tags', JSON, nullable=True)
akas = Column('akas', JSON, nullable=True)
target_ips = Column('target_ips', JSON, nullable=True)
tags_src = Column('tags_src', JSON, nullable=True)
resolver_rule_associations = Column('resolver_rule_associations', JSON, nullable=True)
domain_name = Column('domain_name', Text, nullable=True)
owner_id = Column('owner_id', Text, nullable=True)
resolver_endpoint_id = Column('resolver_endpoint_id', Text, nullable=True)
rule_type = Column('rule_type', Text, nullable=True)
share_status = Column('share_status', Text, nullable=True)
status_message = Column('status_message', Text, nullable=True)
creation_time = Column('creation_time', Text, nullable=True)
modification_time = Column('modification_time', Text, nullable=True)
title = Column('title', Text, nullable=True)
partition = Column('partition', Text, nullable=True)
region = Column('region', Text, nullable=True)
name = Column('name', Text, nullable=True)
account_id = Column('account_id', Text, nullable=True)
id = Column('id', Text, nullable=True)
arn = Column('arn', Text, primary_key=True, nullable=True)
status = Column('status', Text, nullable=True)
creator_request_id = Column('creator_request_id', Text, nullable=True) | 53.258065 | 90 | 0.740763 | 1,411 | 0.854634 | 0 | 0 | 0 | 0 | 0 | 0 | 290 | 0.175651 |
650ab0f46dda1e9c953f58c0e88233c1aedea04d | 8,825 | py | Python | harvester/sharekit/extraction.py | nppo/search-portal | aedf21e334f178c049f9d6cf37cafd6efc07bc0d | [
"MIT"
] | 1 | 2022-01-10T00:26:12.000Z | 2022-01-10T00:26:12.000Z | harvester/sharekit/extraction.py | nppo/search-portal | aedf21e334f178c049f9d6cf37cafd6efc07bc0d | [
"MIT"
] | 48 | 2021-11-11T13:43:09.000Z | 2022-03-30T11:33:37.000Z | harvester/sharekit/extraction.py | nppo/search-portal | aedf21e334f178c049f9d6cf37cafd6efc07bc0d | [
"MIT"
] | null | null | null | import re
from mimetypes import guess_type
from django.conf import settings
from datagrowth.processors import ExtractProcessor
from datagrowth.utils import reach
from core.constants import HIGHER_EDUCATION_LEVELS, RESTRICTED_MATERIAL_SETS
class SharekitMetadataExtraction(ExtractProcessor):
youtube_regex = re.compile(r".*(youtube\.com|youtu\.be).*", re.IGNORECASE)
@classmethod
def get_record_state(cls, node):
return node.get("meta", {}).get("status", "active")
#############################
# GENERIC
#############################
@classmethod
def get_files(cls, node):
files = node["attributes"].get("files", []) or []
links = node["attributes"].get("links", []) or []
output = [
{
"mime_type": file["resourceMimeType"],
"url": file["url"],
"title": file["fileName"]
}
for file in files if file["resourceMimeType"] and file["url"]
]
output += [
{
"mime_type": "text/html",
"url": link["url"],
"title": link.get("urlName", None) or f"URL {ix+1}"
}
for ix, link in enumerate(links)
]
return output
@classmethod
def get_url(cls, node):
files = cls.get_files(node)
if not files:
return
return files[0]["url"].strip()
@classmethod
def get_mime_type(cls, node):
files = cls.get_files(node)
if not files:
return
return files[0]["mime_type"]
@classmethod
def get_technical_type(cls, node):
technical_type = node["attributes"].get("technicalFormat", None)
if technical_type:
return technical_type
files = cls.get_files(node)
if not files:
return
technical_type = settings.MIME_TYPE_TO_TECHNICAL_TYPE.get(files[0]["mime_type"], None)
if technical_type:
return technical_type
file_url = files[0]["url"]
if not file_url:
return
mime_type, encoding = guess_type(file_url)
return settings.MIME_TYPE_TO_TECHNICAL_TYPE.get(mime_type, "unknown")
@classmethod
def get_material_types(cls, node):
material_types = node["attributes"].get("typesLearningMaterial", [])
if not material_types:
return []
elif isinstance(material_types, list):
return [material_type for material_type in material_types if material_type]
else:
return [material_types]
@classmethod
def get_copyright(cls, node):
return node["attributes"].get("termsOfUse", None)
@classmethod
def get_from_youtube(cls, node):
url = cls.get_url(node)
if not url:
return False
return cls.youtube_regex.match(url) is not None
@classmethod
def get_authors(cls, node):
authors = node["attributes"].get("authors", []) or []
return [
{
"name": author["person"]["name"],
"email": author["person"]["email"]
}
for author in authors
]
@classmethod
def get_publishers(cls, node):
publishers = node["attributes"].get("publishers", []) or []
if isinstance(publishers, str):
publishers = [publishers]
keywords = node["attributes"].get("keywords", []) or []
# Check HBOVPK tags
hbovpk_keywords = [keyword for keyword in keywords if keyword and "hbovpk" in keyword.lower()]
if hbovpk_keywords:
publishers.append("HBO Verpleegkunde")
return publishers
@classmethod
def get_lom_educational_levels(cls, node):
educational_levels = node["attributes"].get("educationalLevels", [])
if not educational_levels:
return []
return list(set([
educational_level["value"] for educational_level in educational_levels
if educational_level["value"]
]))
@classmethod
def get_lowest_educational_level(cls, node):
educational_levels = cls.get_lom_educational_levels(node)
current_numeric_level = 3 if len(educational_levels) else -1
for education_level in educational_levels:
for higher_education_level, numeric_level in HIGHER_EDUCATION_LEVELS.items():
if not education_level.startswith(higher_education_level):
continue
# One of the records education levels matches a higher education level.
# We re-assign current level and stop processing this education level,
# as it shouldn't match multiple higher education levels
current_numeric_level = min(current_numeric_level, numeric_level)
break
else:
# No higher education level found inside current education level.
# Dealing with an "other" means a lower education level than we're interested in.
# So this record has the lowest possible level. We're done processing this seed.
current_numeric_level = 0
break
return current_numeric_level
@classmethod
def get_ideas(cls, node):
compound_ideas = [vocabulary["value"] for vocabulary in node["attributes"].get("vocabularies", [])]
if not compound_ideas:
return []
ideas = []
for compound_idea in compound_ideas:
ideas += compound_idea.split(" - ")
return list(set(ideas))
@classmethod
def get_is_restricted(cls, data):
link = data["links"]["self"]
for restricted_set in RESTRICTED_MATERIAL_SETS:
if restricted_set in link:
return True
return False
@classmethod
def get_analysis_allowed(cls, node):
# We disallow analysis for non-derivative materials as we'll create derivatives in that process
# NB: any material that is_restricted will also have analysis_allowed set to False
copyright = SharekitMetadataExtraction.get_copyright(node)
return (copyright is not None and "nd" not in copyright) and copyright != "yes"
@classmethod
def get_is_part_of(cls, node):
return reach("$.attributes.partOf", node)
@classmethod
def get_research_themes(cls, node):
theme_value = node["attributes"].get("themesResearchObject", [])
if not theme_value:
return []
return theme_value if isinstance(theme_value, list) else [theme_value]
@classmethod
def get_empty_list(cls, node):
return []
@classmethod
def get_none(cls, node):
return None
@classmethod
def get_learning_material_themes(cls, node):
theme_value = node["attributes"].get("themesLearningMaterial", [])
if not theme_value:
return []
return theme_value if isinstance(theme_value, list) else [theme_value]
SHAREKIT_EXTRACTION_OBJECTIVE = {
"url": SharekitMetadataExtraction.get_url,
"files": SharekitMetadataExtraction.get_files,
"title": "$.attributes.title",
"language": "$.attributes.language",
"keywords": "$.attributes.keywords",
"description": "$.attributes.abstract",
"mime_type": SharekitMetadataExtraction.get_mime_type,
"technical_type": SharekitMetadataExtraction.get_technical_type,
"material_types": SharekitMetadataExtraction.get_material_types,
"copyright": SharekitMetadataExtraction.get_copyright,
"copyright_description": SharekitMetadataExtraction.get_none,
"aggregation_level": "$.attributes.aggregationlevel",
"authors": SharekitMetadataExtraction.get_authors,
"publishers": SharekitMetadataExtraction.get_publishers,
"publisher_date": "$.attributes.publishedAt",
"lom_educational_levels": SharekitMetadataExtraction.get_lom_educational_levels,
"lowest_educational_level": SharekitMetadataExtraction.get_lowest_educational_level,
"disciplines": SharekitMetadataExtraction.get_empty_list,
"ideas": SharekitMetadataExtraction.get_ideas,
"from_youtube": SharekitMetadataExtraction.get_from_youtube,
"#is_restricted": SharekitMetadataExtraction.get_is_restricted,
"analysis_allowed": SharekitMetadataExtraction.get_analysis_allowed,
"is_part_of": SharekitMetadataExtraction.get_is_part_of,
"has_parts": "$.attributes.hasParts",
"doi": "$.attributes.doi",
"research_object_type": "$.attributes.typeResearchObject",
"research_themes": SharekitMetadataExtraction.get_research_themes,
"parties": SharekitMetadataExtraction.get_empty_list,
"learning_material_themes": SharekitMetadataExtraction.get_learning_material_themes,
"consortium": "$.attributes.consortium"
}
| 37.394068 | 107 | 0.643853 | 6,785 | 0.768839 | 0 | 0 | 6,451 | 0.730992 | 0 | 0 | 2,040 | 0.231161 |
650b61eb839964413b4047a7102a2ba07a9d68e0 | 1,518 | py | Python | jake/test/test_audit.py | lvcarlosja/jake | 0ecbcdd89352d27f50e35d1d73b624b86456e568 | [
"Apache-2.0"
] | null | null | null | jake/test/test_audit.py | lvcarlosja/jake | 0ecbcdd89352d27f50e35d1d73b624b86456e568 | [
"Apache-2.0"
] | 4 | 2021-07-29T18:51:06.000Z | 2021-12-13T20:50:20.000Z | jake/test/test_audit.py | lvcarlosja/jake | 0ecbcdd89352d27f50e35d1d73b624b86456e568 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2019-Present Sonatype Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" test_audit.py , for all your testing of audit py needs """
import unittest
import json
from pathlib import Path
from ..audit.audit import Audit
from ..types.results_decoder import ResultsDecoder
class TestAudit(unittest.TestCase):
""" TestAudit is responsible for testing the Audit class """
def setUp(self):
self.func = Audit()
def test_call_audit_results_prints_output(self):
""" test_call_audit_results_prints_output ensures that when called with
a valid result, audit_results returns the number of vulnerabilities found """
filename = Path(__file__).parent / "ossindexresponse.txt"
with open(filename, "r") as stdin:
response = json.loads(
stdin.read(),
cls=ResultsDecoder)
self.assertEqual(self.func.audit_results(response),
self.expected_results())
@staticmethod
def expected_results():
""" Weeee, I'm helping! """
return 3
| 33 | 81 | 0.725296 | 725 | 0.477602 | 0 | 0 | 84 | 0.055336 | 0 | 0 | 901 | 0.593544 |
650bd2653086235614c46bb1b73a337dfc0ba477 | 6,046 | py | Python | vintage_commands.py | ktuan89/Vintage | 81f178043d1dad4ec9bd50ad4db2df9ef994f098 | [
"MIT",
"Unlicense"
] | null | null | null | vintage_commands.py | ktuan89/Vintage | 81f178043d1dad4ec9bd50ad4db2df9ef994f098 | [
"MIT",
"Unlicense"
] | null | null | null | vintage_commands.py | ktuan89/Vintage | 81f178043d1dad4ec9bd50ad4db2df9ef994f098 | [
"MIT",
"Unlicense"
] | null | null | null | import sublime, sublime_plugin
import os
#import subprocess
def is_legal_path_char(c):
# XXX make this platform-specific?
return c not in " \n\"|*<>{}[]()"
def move_while_path_character(view, start, is_at_boundary, increment=1):
while True:
if not is_legal_path_char(view.substr(start)):
break
start = start + increment
if is_at_boundary(start):
break
return start
class ViOpenFileUnderSelectionCommand(sublime_plugin.TextCommand):
def run(self, edit):
#sel = self.view.sel()[0]
region = self.view.sel()[0]
"""if not sel.empty():
file_name = self.view.substr(sel)
else:
caret_pos = self.view.sel()[0].begin()
current_line = self.view.line(caret_pos)
left = move_while_path_character(
self.view,
caret_pos,
lambda x: x < current_line.begin(),
increment=-1)
right = move_while_path_character(
self.view,
caret_pos,
lambda x: x > current_line.end(),
increment=1)
file_name = self.view.substr(sublime.Region(left + 1, right))"""
if region.empty():
line = self.view.line(region)
file_name = self.view.substr(line)
else:
file_name = self.view.substr(region)
"""file_name = os.path.join(os.path.dirname(self.view.file_name()),
file_name)"""
if file_name.endswith(":"):
file_name = file_name[0:-1]
if os.path.exists(file_name):
self.view.window().open_file(file_name)
class CopyCurrentWord(sublime_plugin.TextCommand):
def run(self, edit):
for region in self.view.sel():
if region.empty():
sublime.set_clipboard(self.view.substr(self.view.word(region.begin())))
class OpenFileInXcode(sublime_plugin.TextCommand):
def run(self, edit):
if self.view.file_name() is not None:
#print self.view.file_name()
#subprocess.call(["open", "-a", "/Applications/Xcode.app", self.view.file_name()])
os.system("open -a /Applications/Xcode.app '" + self.view.file_name() + "'")
class ViSaveAndExit(sublime_plugin.WindowCommand):
def run(self):
self.window.run_command('save')
self.window.run_command('close')
if len(self.window.views()) == 0:
self.window.run_command('close')
#class MoveFocusedViewToBeginning(sublime_plugin.EventListener):
# def on_activated(self, view):
# view.window().set_view_index(view, 0, 0)
class ExtendedSwitcherHaha(sublime_plugin.WindowCommand):
# declarations
open_files = []
open_views = []
window = []
settings = []
# lets go
def run(self, list_mode):
print "Here here here"
# self.view.insert(edit, 0, "Hello, World!")
self.open_files = []
self.open_views = []
self.window = sublime.active_window()
self.settings = sublime.load_settings('ExtendedSwitcher.sublime-settings')
for f in self.getViews(list_mode):
# if skip the current active is enabled do not add the current file it for selection
if self.settings.get('skip_current_file') == True:
if f.id() == self.window.active_view().id():
continue
self.open_views.append(f) # add the view object
file_name = f.file_name() # get the full path
if file_name:
if f.is_dirty():
file_name += self.settings.get('mark_dirty_file_char') # if there are any unsaved changes to the file
if self.settings.get('show_full_file_path') == True:
self.open_files.append(os.path.basename(file_name) + ' - ' + os.path.dirname(file_name))
else:
self.open_files.append(os.path.basename(file_name))
else:
self.open_files.append("Untitled")
if self.check_for_sorting() == True:
self.sort_files()
self.window.show_quick_panel(self.open_files, self.tab_selected) # show the file list
# display the selected open file
def tab_selected(self, selected):
if selected > -1:
self.window.focus_view(self.open_views[selected])
return selected
# sort the files for display in alphabetical order
def sort_files(self):
open_files = self.open_files
open_views = []
open_files.sort()
for f in open_files:
for fv in self.open_views:
if fv.file_name():
f = f.replace(" - " + os.path.dirname(fv.file_name()),'')
if (f == os.path.basename(fv.file_name())) or (f == os.path.basename(fv.file_name())+self.settings.get('mark_dirty_file_char')):
open_views.append(fv)
self.open_views.remove(fv)
if f == "Untitled" and not fv.file_name():
open_views.append(fv)
self.open_views.remove(fv)
self.open_views = open_views
# flags for sorting
def check_for_sorting(self):
if self.settings.has("sort"):
return self.settings.get("sort", False)
def getViews(self, list_mode):
views = []
# get only the open files for the active_group
if list_mode == "active_group":
views = self.window.views_in_group(self.window.active_group())
# get all open view if list_mode is window or active_group doesnt not have any files open
if (list_mode == "window") or (len(views) < 1):
views = self.window.views()
return views
| 35.775148 | 148 | 0.561528 | 5,453 | 0.901919 | 0 | 0 | 0 | 0 | 0 | 0 | 2,073 | 0.342871 |
650c687c2aa892784fed03faf887190ac6a55992 | 3,718 | py | Python | bitten/tests/notify.py | dokipen/bitten | d4d2829c63eec84bcfab05ec7035a23e85d90c00 | [
"BSD-3-Clause"
] | 1 | 2016-08-28T03:13:03.000Z | 2016-08-28T03:13:03.000Z | bitten/tests/notify.py | dokipen/bitten | d4d2829c63eec84bcfab05ec7035a23e85d90c00 | [
"BSD-3-Clause"
] | null | null | null | bitten/tests/notify.py | dokipen/bitten | d4d2829c63eec84bcfab05ec7035a23e85d90c00 | [
"BSD-3-Clause"
] | null | null | null | #-*- coding: utf-8 -*-
#
# Copyright (C) 2007 Ole Trenner, <ole@jayotee.de>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from trac.db import DatabaseManager
from trac.test import EnvironmentStub, Mock
from trac.web.session import DetachedSession
from bitten.model import schema, Build, BuildStep, BuildLog
from bitten.notify import BittenNotify, BuildNotifyEmail
class BittenNotifyBaseTest(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub(enable=['trac.*', 'bitten.notify.*'])
repos = Mock(get_changeset=lambda rev: Mock(author='author', rev=rev))
self.env.get_repository = lambda authname=None: repos
db = self.env.get_db_cnx()
cursor = db.cursor()
connector, _ = DatabaseManager(self.env)._get_connector()
for table in schema:
for stmt in connector.to_sql(table):
cursor.execute(stmt)
db.commit()
class BittenNotifyTest(BittenNotifyBaseTest):
"""unit tests for BittenNotify dispatcher class"""
def setUp(self):
BittenNotifyBaseTest.setUp(self)
self.dispatcher = BittenNotify(self.env)
self.failed_build = Build(self.env, status=Build.FAILURE)
self.successful_build = Build(self.env, status=Build.SUCCESS)
def test_do_notify_on_failed_build(self):
self.set_option(BittenNotify.notify_on_failure, 'true')
self.assertTrue(self.dispatcher._should_notify(self.failed_build),
'notifier should be called for failed builds.')
def test_do_not_notify_on_failed_build(self):
self.set_option(BittenNotify.notify_on_failure, 'false')
self.assertFalse(self.dispatcher._should_notify(self.failed_build),
'notifier should not be called for failed build.')
def test_do_notify_on_successful_build(self):
self.set_option(BittenNotify.notify_on_success, 'true')
self.assertTrue(self.dispatcher._should_notify(self.successful_build),
'notifier should be called for successful builds when configured.')
def test_do_not_notify_on_successful_build(self):
self.set_option(BittenNotify.notify_on_success, 'false')
self.assertFalse(self.dispatcher._should_notify(self.successful_build),
'notifier should not be called for successful build.')
def set_option(self, option, value):
self.env.config.set(option.section, option.name, value)
class BuildNotifyEmailTest(BittenNotifyBaseTest):
"""unit tests for BittenNotifyEmail class"""
def setUp(self):
BittenNotifyBaseTest.setUp(self)
self.env.config.set('notification','smtp_enabled','true')
self.notifications_sent_to = []
def send(to, cc, hdrs={}):
self.notifications_sent_to = to
def noop(*args, **kw):
pass
self.email = Mock(BuildNotifyEmail, self.env,
begin_send=noop,
finish_send=noop,
send=send)
self.build = Build(self.env, status=Build.SUCCESS, rev=123)
def test_notification_is_sent_to_author(self):
self.email.notify(self.build)
self.assertTrue('author' in self.notifications_sent_to,
'Recipient list should contain the author')
# TODO functional tests of generated mails
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(BittenNotifyTest, 'test'))
suite.addTest(unittest.makeSuite(BuildNotifyEmailTest, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 38.329897 | 83 | 0.687197 | 2,959 | 0.795858 | 0 | 0 | 0 | 0 | 0 | 0 | 741 | 0.199301 |
650d0ad17e404144a026ce3f06aafc17ea1fda8f | 1,962 | py | Python | sparse gamma def/gamma_def_score.py | blei-lab/ars-reparameterization | b20a84c28537d85e0aaf62cbbaacb6de9370f0a3 | [
"MIT"
] | 33 | 2017-03-11T10:00:32.000Z | 2022-03-08T14:23:45.000Z | ars-reparameterization/sparse gamma def/gamma_def_score.py | astirn/neural-inverse-cdf-sampling | 80eb2eb7cf396a4e53df62bc126e9a1828f55ca9 | [
"MIT"
] | 2 | 2018-02-05T17:14:00.000Z | 2019-08-02T14:37:25.000Z | ars-reparameterization/sparse gamma def/gamma_def_score.py | astirn/neural-inverse-cdf-sampling | 80eb2eb7cf396a4e53df62bc126e9a1828f55ca9 | [
"MIT"
] | 10 | 2017-03-05T13:31:01.000Z | 2020-03-29T01:09:01.000Z | from autograd import grad
import autograd.numpy as np
import autograd.numpy.random as npr
import autograd.scipy.special as sp
from gamma_def import *
# Define helper functions for score fnc estimator
def logQ(sample, alpha, m):
"""
Evaluates log of variational approximation, vectorized.
"""
temp = alpha*(np.log(alpha)-np.log(m))
temp += (alpha-1.)*np.log(sample)
temp -= alpha*sample/m
temp -= np.log(sp.gamma(alpha))
return temp
def grad_logQ(sample,alpha,m):
"""
Evaluates the gradient of the log of variational approximation, vectorized.
"""
gradient = np.zeros((alpha.shape[0],2))
gradient[:,0] = np.log(alpha) - np.log(m) + 1. + np.log(sample) - sample/m
gradient[:,0] -= sp.digamma(alpha)
gradient[:,1] = -alpha/m + alpha*sample/m**2
return gradient
# Define score function estimator
def score_estimator(alpha,m,x,K,alphaz,S=100):
"""
Form score function estimator based on samples lmbda.
"""
N = x.shape[0]
if x.ndim == 1:
D = 1
else:
D = x.shape[1]
num_z = N*np.sum(K)
L = K.shape[0]
gradient = np.zeros((alpha.shape[0],2))
f = np.zeros((2*S,alpha.shape[0],2))
h = np.zeros((2*S,alpha.shape[0],2))
for s in range(2*S):
lmbda = npr.gamma(alpha,1.)
lmbda[lmbda < 1e-300] = 1e-300
zw = m*lmbda/alpha
lQ = logQ(zw,alpha,m)
gradLQ = grad_logQ(zw,alpha,m)
lP = logp(zw, K, x, alphaz)
temp = lP - np.sum(lQ)
f[s,:,:] = temp*gradLQ
h[s,:,:] = gradLQ
# CV
covFH = np.zeros((alpha.shape[0],2))
covFH[:,0] = np.diagonal(np.cov(f[S:,:,0],h[S:,:,0],rowvar=False)[:alpha.shape[0],alpha.shape[0]:])
covFH[:,1] = np.diagonal(np.cov(f[S:,:,1],h[S:,:,1],rowvar=False)[:alpha.shape[0],alpha.shape[0]:])
a = covFH / np.var(h[S:,:,:],axis=0)
return np.mean(f[:S,:,:],axis=0) - a*np.mean(h[:S,:,:],axis=0)
| 29.727273 | 104 | 0.574414 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 317 | 0.16157 |
650d548dfcf197b677b3b51c6953dea2bc1cb40b | 305 | py | Python | codes/Ex036.py | BelfortJoao/Curso-phyton01 | 79376233be228f39bf548f90b8d9bd5419ac067a | [
"MIT"
] | 3 | 2021-08-17T14:02:14.000Z | 2021-08-19T02:37:30.000Z | codes/Ex036.py | BelfortJoao/Curso-phyton01 | 79376233be228f39bf548f90b8d9bd5419ac067a | [
"MIT"
] | null | null | null | codes/Ex036.py | BelfortJoao/Curso-phyton01 | 79376233be228f39bf548f90b8d9bd5419ac067a | [
"MIT"
] | null | null | null | x = float(input('Qual o valor da casa que quer comprar? '))
y = int(input("em quantos anos quer comprar a casa? "))
z = int(input("Qual seu salario? "))
w = y*12
if x / w > (z/100)*30:
print("Voce não pode comprar a casa")
else:
print('Voce pode comprar a casa a parcela é de {:.2f}'.format(x/y))
| 33.888889 | 71 | 0.636066 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 180 | 0.586319 |
650e93f40d797c0460bad1ce4c72fe47deb0c2b7 | 3,313 | py | Python | datasets/base_dataset.py | iclr2022submission4/cgca | 3e6ea65c0ebf72a8291dde3ffdb06b50e4d2900a | [
"MIT"
] | 13 | 2022-01-10T05:28:26.000Z | 2022-02-02T10:22:42.000Z | datasets/base_dataset.py | iclr2022submission4/cgca | 3e6ea65c0ebf72a8291dde3ffdb06b50e4d2900a | [
"MIT"
] | null | null | null | datasets/base_dataset.py | iclr2022submission4/cgca | 3e6ea65c0ebf72a8291dde3ffdb06b50e4d2900a | [
"MIT"
] | null | null | null | import torch
import random
from torch.utils.data import Dataset, DataLoader
from abc import ABC
from models.base_model import Model
from torch.utils.tensorboard import SummaryWriter
from typing import List
class BaseDataset(Dataset, ABC):
name = 'base'
def __init__(self, config: dict, mode: str = 'train'):
self.config = config
self.mode = mode
self.device = config['device']
self.data_dim = config['data_dim']
self.summary_name = self.name
'''
Note that dataset's __getitem__() returns (x_coord, x_feat, y_coord, y_feat, name)
But the collated batch returns type of (SparseTensorWrapper, SparseTensorWrapper)
'''
def __getitem__(self, idx) \
-> (torch.tensor, torch.tensor, torch.tensor, torch.tensor, List[str]):
# sparse tensor and tensor should have equal size
raise NotImplemented
def __iter__(self):
while True:
idx = random.randint(0, len(self) - 1)
yield self[idx]
def collate_fn(self, batch: List) -> dict:
# convert list of dict to dict of list
batch = {k: [d[k] for d in batch] for k in batch[0]}
return batch
def evaluate(self, model: Model, writer: SummaryWriter, step):
training = model.training
model.eval()
data_loader = DataLoader(
self,
batch_size=self.config['eval_batch_size'],
num_workers=self.config['num_workers'],
collate_fn=self.collate_fn,
drop_last=False,
)
print('')
eval_losses = []
for eval_step, data in enumerate(data_loader):
mode = self.mode
if len(self.config['eval_datasets']) != 1:
mode += '_' + self.summary_name
eval_loss = model.evaluate(data, step, mode)
eval_losses.append(eval_loss)
print('\r[Evaluating, Step {:7}, Loss {:5}]'.format(
eval_step, '%.3f' % eval_loss), end=''
)
print('')
model.write_dict_summaries(step)
model.train(training)
def test(self, model: Model, writer: SummaryWriter, step):
raise NotImplementedError()
def visualize(self, model: Model, options: List, step):
training = model.training
model.eval()
# fix vis_indices
vis_indices = self.config['vis']['indices']
if isinstance(vis_indices, int):
# sample data points from n data points with equal interval
n = len(self)
vis_indices = torch.linspace(0, n - 1, vis_indices).int().tolist()
# override to the index when in overfitting debug mode
if isinstance(self.config['overfit_one_ex'], int):
vis_indices = torch.tensor([self.config['overfit_one_ex']])
for option in options:
# calls the visualizing function
if hasattr(model, option):
getattr(model, option)(self, vis_indices, step)
else:
raise ValueError(
'model {} has no method {}'.format(
model.__class__.__name__, option
)
)
model.train(training)
def visualize_test(self, model: Model, writer: SummaryWriter, step):
training = model.training
model.eval()
# fix vis_indices
vis_indices = self.config['vis']['indices']
if isinstance(vis_indices, int):
# sample data points from n data points with equal interval
vis_indices = torch.linspace(0, len(self) - 1, vis_indices).int().tolist()
# override to the index when in overfitting debug mode
if isinstance(self.config['overfit_one_ex'], int):
vis_indices = torch.tensor([self.config['overfit_one_ex']])
model.visualize_test(self, vis_indices, step)
model.train(training)
| 29.061404 | 83 | 0.705101 | 3,103 | 0.936613 | 94 | 0.028373 | 0 | 0 | 0 | 0 | 802 | 0.242077 |
650ec3bb5d3381c505f9bd3240d3f221d5e35e00 | 660 | py | Python | open_data/dataset/migrations/0005_keyword_squashed_0006_remove_keyword_relevancy.py | balfroim/OpenData | f0334dae16c2806e81f7d2d53adeabc72403ecce | [
"MIT"
] | null | null | null | open_data/dataset/migrations/0005_keyword_squashed_0006_remove_keyword_relevancy.py | balfroim/OpenData | f0334dae16c2806e81f7d2d53adeabc72403ecce | [
"MIT"
] | null | null | null | open_data/dataset/migrations/0005_keyword_squashed_0006_remove_keyword_relevancy.py | balfroim/OpenData | f0334dae16c2806e81f7d2d53adeabc72403ecce | [
"MIT"
] | null | null | null | # Generated by Django 3.2 on 2021-04-21 13:01
from django.db import migrations, models
class Migration(migrations.Migration):
replaces = [('dataset', '0005_keyword'), ('dataset', '0006_remove_keyword_relevancy')]
dependencies = [
('dataset', '0004_alter_theme_id'),
]
operations = [
migrations.CreateModel(
name='Keyword',
fields=[
('word', models.CharField(max_length=64, primary_key=True, serialize=False)),
('datasets', models.ManyToManyField(blank=True, related_name='keywords', to='dataset.ProxyDataset')),
],
),
]
| 28.695652 | 118 | 0.587879 | 563 | 0.85303 | 0 | 0 | 0 | 0 | 0 | 0 | 196 | 0.29697 |
650f4d544268699293dfae61c4d5b0971b890ccb | 50 | py | Python | src/converters/__init__.py | Peilonrayz/json_to_object | ae5ba42dcab71010302f42d78dbfd559c12496c9 | [
"MIT"
] | null | null | null | src/converters/__init__.py | Peilonrayz/json_to_object | ae5ba42dcab71010302f42d78dbfd559c12496c9 | [
"MIT"
] | null | null | null | src/converters/__init__.py | Peilonrayz/json_to_object | ae5ba42dcab71010302f42d78dbfd559c12496c9 | [
"MIT"
] | null | null | null | from .converter import Converter, Converters, ron
| 25 | 49 | 0.82 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
65106635994d91a479c18b140e8932588b6e8457 | 2,385 | py | Python | result/forms.py | Uqhs-1/uqhs | 1c7199d8c23a9d9eb3f75b1e36633a145fd2cd40 | [
"MIT"
] | 3 | 2020-06-16T20:03:31.000Z | 2021-01-17T20:45:51.000Z | result/forms.py | Uqhs-1/uqhs | 1c7199d8c23a9d9eb3f75b1e36633a145fd2cd40 | [
"MIT"
] | 8 | 2020-02-08T09:04:08.000Z | 2021-06-09T18:31:03.000Z | result/forms.py | Uqhs-1/uqhs | 1c7199d8c23a9d9eb3f75b1e36633a145fd2cd40 | [
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 23 08:25:37 2019
@author: AdeolaOlalekan
"""
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .models import BTUTOR, CNAME, Edit_User, QSUBJECT, Post
class login_form(forms.Form):
username = forms.CharField(max_length=18)#, help_text="Just type 'renew'")
password1 = forms.CharField(widget=forms.PasswordInput)#
def clean_data(self):
data = self.cleaned_data['username']
data = self.cleaned_data['password1']
return data
###############################################################################
class ProfileForm(forms.ModelForm):
class Meta:
model = Edit_User
fields = ('title', 'first_name', 'last_name', 'bio', 'phone', 'city', 'country', 'organization', 'location', 'birth_date', 'department', 'photo',)
exclude = ['user']
class SignUpForm(UserCreationForm):
email = forms.EmailField(max_length=254, help_text='Required for a valid signup!')
class Meta:
model = User
fields = ('username', 'email', 'password1', 'password2')
def save(self, commit=True):
user = super(SignUpForm, self).save(commit=False)
user.email = self.cleaned_data['email']
user.password = self.cleaned_data['password1']
if commit:
user.save()
return user
class subject_class_term_Form(forms.ModelForm):
class Meta:
model = BTUTOR
fields = ('Class', 'subject',)
class class_term(forms.ModelForm):
class Meta:
model = BTUTOR
fields = ('Class', 'term', )
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = ('Account_Username', 'subject', 'text')
class a_student_form_new(forms.ModelForm):
class Meta:
model = QSUBJECT
fields = ('student_name','test', 'agn','atd', 'exam','tutor',)
class student_name(forms.ModelForm):
class Meta:
model = CNAME
fields = ('last_name', 'first_name', 'gender', "birth_date",)
class new_student_name(forms.Form):
student_name = forms.CharField(help_text="enter student's surename to search.")
def clean_renewal_date(self):
data = self.cleaned_data['student_name']
return data
# | 29.8125 | 154 | 0.609224 | 1,932 | 0.810063 | 0 | 0 | 0 | 0 | 0 | 0 | 632 | 0.26499 |
6512ccce127a82c68b221b5453c2086be0c2bb8b | 3,165 | py | Python | 00-basic/04_01_data_structures_lists.py | TranXuanHoang/Python | 6e62282540a7f1e2b2d0dff99b1803715bf6c4b0 | [
"CNRI-Python"
] | null | null | null | 00-basic/04_01_data_structures_lists.py | TranXuanHoang/Python | 6e62282540a7f1e2b2d0dff99b1803715bf6c4b0 | [
"CNRI-Python"
] | null | null | null | 00-basic/04_01_data_structures_lists.py | TranXuanHoang/Python | 6e62282540a7f1e2b2d0dff99b1803715bf6c4b0 | [
"CNRI-Python"
] | null | null | null | # Lists
# Basics
foods = ['rice', 'Meat', 'vegetables', 'Eggs']
print(foods)
# Same as foods[len(foods):] = ['butter']
foods.append('butter')
print(foods)
# Same as foods[len(foods):] = ['tomatoes', 'chili sauce']
foods.extend(['tomatoes', 'Chili sauce'])
print(foods)
# Reverse order of elements in the list
foods.reverse()
print(foods)
# Copy the list
copy_of_foods = foods.copy()
print(copy_of_foods)
# Sort in ascending order
foods.sort()
print(foods)
# Sort in descending order without considering lower or upper case
copy_of_foods.sort(key=str.lower, reverse=True)
print(copy_of_foods)
# Using Lists as Stacks
stack_normal = ['+', 4, '*', 7, '-', 3, 6]
stack_error = ['+', 4, '?', 7, '-', 3, 6]
def evaluate(stack):
expression = ''
round = 0
while len(stack) >= 3:
first_operand = stack.pop()
second_operand = stack.pop()
operator = stack.pop()
subexpression = str(first_operand) + ' ' + operator + \
' ' + str(second_operand)
if round == 0:
expression = '(' + subexpression + ')'
else:
expression = '(' + expression + ' ' + operator + \
' ' + str(second_operand) + ')'
round += 1
if operator == '+':
stack.append(first_operand + second_operand)
elif operator == '-':
stack.append(first_operand - second_operand)
elif operator == '*':
stack.append(first_operand * second_operand)
elif operator == '/':
stack.append(first_operand / second_operand)
else:
stack.append('Error [Invalid Operator]: ' + subexpression)
break
result = str(stack.pop())
if 'Error' in result:
return result
else:
return expression + ' = ' + result
print(evaluate(stack_normal))
print(evaluate(stack_error))
# Using List as Queues
from collections import deque
queue = deque(["(", "c", "+", "d", ")"])
print(queue)
queue.append('/')
queue.append('d')
print(queue)
queue.appendleft('*')
queue.appendleft('a')
print(queue)
# List Comprehensions
drinks = [' Beer ', ' Tea', 'Coca Cola ', ' Pepsi', 'Water']
trimmed_drinks = [drink.strip()
for drink in drinks] # trim all trailing spaces
print(drinks)
print(trimmed_drinks)
# filter drinks whose name length is longer that or equal to 5
print([drink for drink in trimmed_drinks if len(drink) >= 5])
foods = ['rice', 'Meat', 'vegetables', 'Eggs']
menus = [(food.upper(), drink.lower())
for food in foods for drink in trimmed_drinks]
print(menus)
vector = [
[1, 2, 3],
['Monday', 'Tuesday', 'Wednesday'],
['Morning', 'Afternoon', 'Night']
]
# [1, 2, 3, 'Monday', 'Tuesday', 'Wednesday', 'Morning', 'Afternoon', 'Night']
flatten_vector = [el for row in vector for el in row]
print(flatten_vector)
# [
# [1, 'Monday', 'Morning'],
# [2, 'Tuesday', 'Afternoon'],
# [3, 'Wednesday', 'Night']
# ]
transposed_vector = [[row[i] for row in vector] for i in range(3)]
print(transposed_vector)
| 25.524194 | 79 | 0.581043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 919 | 0.290363 |
6513f213b7ae02509adbae8bebb7d8031a8a3323 | 110 | py | Python | pond5/blueprints/webui/views.py | IvanFrezzaJr/pond5 | 02cc39262b33eac59727ee416645f7006a99b099 | [
"MIT"
] | null | null | null | pond5/blueprints/webui/views.py | IvanFrezzaJr/pond5 | 02cc39262b33eac59727ee416645f7006a99b099 | [
"MIT"
] | null | null | null | pond5/blueprints/webui/views.py | IvanFrezzaJr/pond5 | 02cc39262b33eac59727ee416645f7006a99b099 | [
"MIT"
] | null | null | null | from flask import render_template
def index():
return render_template("index.html", title='pond5 test')
| 18.333333 | 60 | 0.745455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 24 | 0.218182 |
6514ff55c2d00ee1deed3eaa7d8b92b095c218ad | 141 | py | Python | featureflags/__init__.py | enverbisevac/ff-python-server-sdk | e7c809229d13517e0bf4b28fc0a556e693c9034e | [
"Apache-2.0"
] | null | null | null | featureflags/__init__.py | enverbisevac/ff-python-server-sdk | e7c809229d13517e0bf4b28fc0a556e693c9034e | [
"Apache-2.0"
] | null | null | null | featureflags/__init__.py | enverbisevac/ff-python-server-sdk | e7c809229d13517e0bf4b28fc0a556e693c9034e | [
"Apache-2.0"
] | null | null | null | """Top-level package for Feature Flag Server SDK."""
__author__ = """Enver Bisevac"""
__email__ = "enver@bisevac.com"
__version__ = "0.1.0"
| 23.5 | 52 | 0.695035 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 97 | 0.687943 |
651500c04d2f53be7a66dad2701232feba9db8b0 | 4,919 | py | Python | MachineLearning/potential_field.py | JhaAman/lihax | c0421f62d7b86908a7c74251c1dc35b1407c4568 | [
"MIT"
] | null | null | null | MachineLearning/potential_field.py | JhaAman/lihax | c0421f62d7b86908a7c74251c1dc35b1407c4568 | [
"MIT"
] | null | null | null | MachineLearning/potential_field.py | JhaAman/lihax | c0421f62d7b86908a7c74251c1dc35b1407c4568 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import math, socket, struct, numpy as np, sys
# Get the gradient of the potential of an obstacle
# particle at (ox, oy) with the origin at (mx, my)
# Get the potential of an obstacle particle at (ox, oy)
# with the origin at (mx, my)
def potential(mx, my, ox, oy):
1.0 / ((mx - ox)**2 + (my - oy)**2)**0.5
class PotentialField():
def __init__(self):
#socket initialization
self.host_ip = socket.gethostname()
self.receiving_port = 5510
self.sending_port = 6510
self.sockR = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sockS = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sockS.connect((self.host_ip, self.sending_port))
self.sockR.bind((self.host_ip, self.receiving_port))
# cumulative speed - used to build up momentum
self.speed_c = 0
def grad(self,dist, mx, my, ox, oy):
c = -1/((mx - ox)**2 + (my - oy)**2)**1.5
return c*(mx - ox), c*(my - oy)
# calculate the total gradient from an array of lidar ranges
# with origin at (my_x, my_y)
def calc_gradient(self, ranges, my_x, my_y):
gradient_x = 0 # sum of dU/dx
gradient_y = 0 # sum of dU/dy
# ignore the edges of the lidar FOV, usually noisy
for i in range(len(ranges) - 180, 180, -1):
r = ranges[i]
deg = -(270.0/1080) * i # convert index of range to degree of range
deg += 225 # lidar FOV starts at -45 deg
px = r * math.cos(math.radians(deg)) # convert from polar to x coord
py = r * math.sin(math.radians(deg)) # convert from polar to y coord
gx, gy = self.grad(r, my_x, my_y, px, py) # compute gradient at rectangular coordinates
# add point's gradient into sum
gradient_x += gx
gradient_y += gy
return (gradient_x, gradient_y)
# lidar subscriber callback
def receive_lidar(self, STEER_BIAS=0, PUSH_MULTIPLIER=19.7, STEER_GRAD_PROPORTION=20.0, SPEED_GRAD_PROPORTION=-0.001, MOMENTUM_MU=0.95, UPDATE_INFLUENCE=0.11, REVERSE_SPEED_MULTIPLIER=-2.3, MIN_SPEED_CLAMP=-0.9, MAX_SPEED_CLAMP=1.0):
while True:
packet = self.sockR.recvfrom(65565)[0]
ranges = struct.unpack("1080f", packet)
# compute gradient sums from lidar ranges
grad_x, grad_y = self.calc_gradient(ranges, 0, 0)
grad_x += STEER_BIAS * self.grad(0.1, 0, 0, 0.1, 0)[0]
# place repelling particle behind origin (the car) to
# push the car forward. 14 is a multiplier to give more push.
grad_y += PUSH_MULTIPLIER * self.grad(0.1, 0, 0, 0, -0.1)[1]
# magnitude of gradient (euclidian dist)
grad_magnitude = math.sqrt(grad_x**2 + grad_y**2)
# steering proportional to potential gradient w.r.t. x
steer = grad_x / STEER_GRAD_PROPORTION # OR? math.atan2(grad_x, grad_y)
# the speed update at this instance: proportional to gradient magnitude
# and sign depends of sign of gradient w.r.t y
speed = (SPEED_GRAD_PROPORTION * grad_magnitude * np.sign(grad_y))*100-194
# update the cumulative momentum using the speed update at this instance.
# speed_c is multiplied by some constant < 1 to simulate friction and
# speed is multiplied by some constant > 0 to determine the influence of the
# speed update at this instance.
self.speed_c = MOMENTUM_MU*self.speed_c + UPDATE_INFLUENCE * speed
# if speed is less than -1, clamp it. also, the steering is multiplied
# by a negative constant < -1 to make it back out in a way that
# orients the car in the direction it would want to turn if it were
# not too close.
speed_now = self.speed_c
if self.speed_c < 0:
if self.speed_c > -0.2:
speed_now = -0.7
steer *= REVERSE_SPEED_MULTIPLIER
# print("reversing")
if self.speed_c < MIN_SPEED_CLAMP:
speed_now = MIN_SPEED_CLAMP
elif self.speed_c > MAX_SPEED_CLAMP:
# if speed is greater than 1, clamp it
speed_now = MAX_SPEED_CLAMP
# create and publish drive message using steer and speed_c
# print "Speed: " + str(speed)
# print "Speed c: " + str(self.speed_c)
# print "Speed now: " + str(speed_now)
message = struct.pack("2f", speed_now, -steer)
self.sockS.send(message)
self.sockR.close()
self.sockS.close()
print "STOPPED!!!"
sys.exit(1)
pf = PotentialField()
pf.receive_lidar()
| 42.042735 | 237 | 0.584468 | 4,537 | 0.922342 | 0 | 0 | 0 | 0 | 0 | 0 | 1,787 | 0.363285 |
6515124d4e3250bc474b369d4832ef055edc0d00 | 5,031 | py | Python | decomp/c/gen.py | nihilus/epanos | 998a9e3f45337df98b1bbc40b64954b079a1d4de | [
"MIT"
] | 1 | 2017-07-25T00:05:09.000Z | 2017-07-25T00:05:09.000Z | decomp/c/gen.py | nihilus/epanos | 998a9e3f45337df98b1bbc40b64954b079a1d4de | [
"MIT"
] | null | null | null | decomp/c/gen.py | nihilus/epanos | 998a9e3f45337df98b1bbc40b64954b079a1d4de | [
"MIT"
] | null | null | null | from itertools import imap, chain
from pycparser import c_generator, c_ast
from decomp import data, ida
from decomp.c import decl as cdecl, types as ep_ct
from decomp.cpu import ida as cpu_ida
XXX_INTRO_HACK = cpu_ida.ida_current_cpu().insns.support_header + '''
#include <stdint.h>
typedef union EPANOS_REG {
uint8_t u8;
int32_t i32;
uint32_t u32;
int64_t i64;
uint64_t u64;
float s;
double d;
} EPANOS_REG;
typedef struct EPANOS_ARGS {
EPANOS_REG v0;
EPANOS_REG v1;
EPANOS_REG a0;
EPANOS_REG a1;
EPANOS_REG a2;
EPANOS_REG a3;
EPANOS_REG a4;
EPANOS_REG a5;
EPANOS_REG a6;
EPANOS_REG a7;
EPANOS_REG f0;
EPANOS_REG f2;
EPANOS_REG f12;
EPANOS_REG f13;
EPANOS_REG f14;
EPANOS_REG f15;
EPANOS_REG f16;
EPANOS_REG f17;
EPANOS_REG f18;
EPANOS_REG f19;
} EPANOS_ARGS;
'''
gen_from_node = c_generator.CGenerator().visit
flatten = chain.from_iterable
def c_for_insn(ea, our_fns, extern_reg_map, stkvars):
while True:
(ea, c) = cpu_ida.ida_current_cpu().gen.fmt_insn(ea, our_fns, extern_reg_map, stkvars, from_delay=False)
yield c
if ea == ida.BADADDR:
break
def generate(ea, decl, our_fns, extern_reg_map, stkvar_map, stkvar_decls):
'''ea_t -> c_ast() -> frozenset(str) -> {str : reg_sig} ->
{str : {int : tinfo_t}} {str : [c_ast]} -> c_ast'''
try:
stkvars = stkvar_map[decl.name]
var_decls = stkvar_decls[decl.name]
except KeyError:
stkvars = {}
var_decls = []
start_ea = ida.get_func(ea).startEA
body = [XXX_STACKVAR_HACK()] + [var_decls] + [x for x in
c_for_insn(start_ea, our_fns, extern_reg_map, stkvars)]
funcdef = c_ast.FuncDef(decl, None, c_ast.Compound(flatten(body)))
return funcdef
def XXX_STACKVAR_HACK():
# XXX FIXME this will be going away once we've added elision of unnecessary
# stack variables (probably will just stick declarations into the AST)
regs = list(c_ast.Decl(x, [], [], [], c_ast.TypeDecl(x, [], c_ast.IdentifierType(['EPANOS_REG'])), None, None)
for x in
list('t%s' % str(n) for n in range(4, 8))
+ list('s%s' % str(n) for n in range(0, 8))
+ ['at', 't8', 't9', 'gp', 'sp', 'ra', 'fp', 'f1']
+ list('f%s' % str(n) for n in range(3, 12))
+ list('f%s' % str(n) for n in range(20, 32)))
regs += [c_ast.Decl('EPANOS_fp_cond', [], [], [], c_ast.TypeDecl('EPANOS_fp_cond', [], c_ast.IdentifierType(['int'])), None, None)]
return regs
def run(externs, our_fns, cpp_filter, cpp_all, decompile=True):
'''frozenset(str) -> frozenset(str) -> str -> str -> opt:bool -> [c_ast]'''
global OUR_FNS, EXTERN_REG_MAP, STKVAR_MAP # for repl convenience
OUR_FNS = our_fns
fn_segs = data.get_segs(['extern', '.text'])
rodata_segs = data.get_segs(['.rodata', '.srdata'])
data_segs = data.get_segs(['.data', '.bss'])
lit_segs = data.get_segs(['.lit4', '.lit8'])
num_lits = data.get_num_literals(lit_segs)
str_lits = data.get_str_literals(rodata_segs)
data_txt = data.get_data(data_segs, cpp_filter)
# XXX FIXME this will be going away once we've added emitting numeric and
# string constants directly at their site of use
if decompile is True:
for (k, v) in num_lits.iteritems():
ty = type(v)
if ty is ep_ct.cfloat:
print 'float %s = %s;' % (k, v)
elif ty is ep_ct.cdouble:
print 'double %s = %s;' % (k, v)
else:
raise Exception('o no')
for (k, v) in str_lits.iteritems():
print 'const char *%s = %s;' % (k, data.c_stringify(v))
protos = map(cdecl.make_internal_fn_decl, our_fns)
(lib_fns, tds) = data.get_fns_and_types(fn_segs, externs, cpp_all)
all_tds = {x.name: x for x in tds}
typedefs = cdecl.resolve_typedefs(all_tds)
EXTERN_REG_MAP = data.get_fn_arg_map(lib_fns, typedefs)
STKVAR_MAP = data.get_stkvars(our_fns)
stkvar_decls = data.make_stkvar_txt(our_fns, STKVAR_MAP, cpp_filter)
if decompile is True:
print XXX_INTRO_HACK
return gen_from_node(c_ast.FileAST(
data_txt +
protos +
list(generate(ida.loc_by_name(decl.name), decl, our_fns,
EXTERN_REG_MAP, STKVAR_MAP, stkvar_decls)
for decl in protos)))
else:
return
def repl_make_insn(ea, from_delay):
# for testing: print the C that will be generated from a line of assembly.
# note that if you ask for the ea of an insn in a delay slot, you get only
# that instruction; if you ask for a delayed instruction, you get both
try:
stkvars = STKVAR_MAP[ida.get_func_name(ea)]
except KeyError:
stkvars = {}
return list(gen_from_node(x) for x in
cpu_ida.ida_current_cpu().gen.fmt_insn(
ea, OUR_FNS, EXTERN_REG_MAP, stkvars, from_delay).c)
| 33.993243 | 135 | 0.624329 | 0 | 0 | 246 | 0.048897 | 0 | 0 | 0 | 0 | 1,531 | 0.304313 |
6515bd49015bb821ccbe010e9e5fbc460bed9de7 | 1,112 | py | Python | ocelot/io/__init__.py | cmutel/Ocelot | 20e9639570c43f84ae255750a6c402ebabe00981 | [
"BSD-3-Clause"
] | 21 | 2016-06-01T14:10:07.000Z | 2022-02-28T01:56:31.000Z | ocelot/io/__init__.py | cmutel/Ocelot | 20e9639570c43f84ae255750a6c402ebabe00981 | [
"BSD-3-Clause"
] | 152 | 2016-05-16T21:33:22.000Z | 2019-06-24T12:57:14.000Z | ocelot/io/__init__.py | cmutel/Ocelot | 20e9639570c43f84ae255750a6c402ebabe00981 | [
"BSD-3-Clause"
] | 12 | 2016-09-05T15:35:59.000Z | 2021-07-03T19:28:47.000Z | # -*- coding: utf-8 -*-
__all__ = (
"extract_directory",
"cleanup_data_directory",
"dataset_schema",
"validate_directory",
"validate_directory_against_xsd",
)
from .extract_ecospold2 import extract_ecospold2_directory
from ..filesystem import check_cache_directory, get_from_cache, cache_data
import os
def extract_directory(data_path, use_cache=True, use_mp=True):
"""Extract ecospold2 files in directory ``dirpath``.
Uses and writes to cache if ``use_cache`` is ``True``.
Returns datasets in Ocelot internal format."""
data_path = os.path.abspath(data_path)
if not use_cache:
return extract_ecospold2_directory(data_path, use_mp)
elif check_cache_directory(data_path):
print("Using cached ecospold2 data")
return get_from_cache(data_path)
else:
data = extract_ecospold2_directory(data_path, use_mp)
cache_data(data, data_path)
return data
from .cleanup import cleanup_data_directory
from .validate_ecospold2 import validate_directory_against_xsd, validate_directory
from .validate_internal import dataset_schema
| 31.771429 | 82 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 327 | 0.294065 |
65164ebc73c88c773a56c1c8312715ea40cc9b6c | 626 | py | Python | Data-Structure/Basic-Struct/Stack-Queues/Evaluation-of-Expressions/Eval.py | tz70s/lesson | a647105682bb0d1420c450d0cd7c56fe1de6e21b | [
"Apache-2.0"
] | 1 | 2015-08-27T17:27:43.000Z | 2015-08-27T17:27:43.000Z | Data-Structure/Basic-Struct/Stack-Queues/Evaluation-of-Expressions/Eval.py | tz70s/lesson | a647105682bb0d1420c450d0cd7c56fe1de6e21b | [
"Apache-2.0"
] | null | null | null | Data-Structure/Basic-Struct/Stack-Queues/Evaluation-of-Expressions/Eval.py | tz70s/lesson | a647105682bb0d1420c450d0cd7c56fe1de6e21b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import stack
import queue
opset = {'*','+','(',")"}
def readinfix():
fh = open("infix.txt")
li = []
li += fh.read()
print (li)
return li
def eval():
evlist = readinfix()
postlist = []
st = stack.Stack()
for op in evlist:
if op in opset:
st.push(op)
else:
postlist.append(op)
print (op)
if st.isEmpty():
continue
elif not st.isEmpty():
postlist.append(st.pop())
print (st.pop())
print (postlist)
if __name__ == "__main__":
eval()
| 17.885714 | 41 | 0.469649 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 55 | 0.087859 |
65185342728b8ea46c26ce2b78a8072737b087c9 | 2,102 | py | Python | example/py/CHSHInequality/chsh_inequality.py | samn33/qlazy | b215febfec0a3b8192e57a20ec85f14576745a89 | [
"Apache-2.0"
] | 15 | 2019-04-09T13:02:58.000Z | 2022-01-13T12:57:08.000Z | example/py/CHSHInequality/chsh_inequality.py | samn33/qlazy | b215febfec0a3b8192e57a20ec85f14576745a89 | [
"Apache-2.0"
] | 3 | 2020-02-26T16:21:18.000Z | 2022-03-31T00:46:53.000Z | example/py/CHSHInequality/chsh_inequality.py | samn33/qlazy | b215febfec0a3b8192e57a20ec85f14576745a89 | [
"Apache-2.0"
] | 3 | 2021-01-28T05:38:55.000Z | 2021-10-30T12:19:19.000Z | import random
from qlazy import QState
def classical_strategy(trials=1000):
win_cnt = 0
for _ in range(trials):
# random bits by Charlie (x,y)
x = random.randint(0,1)
y = random.randint(0,1)
# response by Alice (a)
a = 0
# response by Bob (b)
b = 0
# count up if win
if (x and y) == (a+b)%2:
win_cnt += 1
print("== result of classical strategy (trials:{0:d}) ==".format(trials))
print("* win prob. = ", win_cnt/trials)
def quantum_strategy(trials=1000):
win_cnt = 0
for _ in range(trials):
# random bits by Charlie (x,y)
x = random.randint(0,1)
y = random.randint(0,1)
# make entangled 2 qubits (one for Alice and another for Bob)
qs = QState(2).h(0).cx(0,1)
# response by Alice (a)
if x == 0:
# measurement of Z-basis (= Ry(0.0)-basis)
sa = qs.m([0], shots=1, angle=0.0, phase=0.0).lst
if sa == 0:
a = 0
else:
a = 1
else:
# measurement of X-basis (or Ry(0.5*PI)-basis)
sa = qs.mx([0], shots=1).lst
# sa = qs.m([0], shots=1, angle=0.5, phase=0.0).lst
if sa == 0:
a = 0
else:
a = 1
# response by Bob (b)
if y == 0:
# measurement of Ry(0.25*PI)-basis
sb = qs.m([1], shots=1, angle=0.25, phase=0.0).lst
if sb == 0:
b = 0
else:
b = 1
else:
# measurement of Ry(-0.25*PI)-basis
sb = qs.m([1], shots=1, angle=-0.25, phase=0.0).lst
if sb == 0:
b = 0
else:
b = 1
# count up if win
if (x and y) == (a+b)%2:
win_cnt += 1
print("== result of quantum strategy (trials:{0:d}) ==".format(trials))
print("* win prob. = ", win_cnt/trials)
if __name__ == '__main__':
classical_strategy()
quantum_strategy()
| 25.634146 | 77 | 0.450523 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 593 | 0.282112 |
65192f76bf69dd1b91580c7c3d8c5be7720e2e54 | 410 | py | Python | peerplays/blockchainobject.py | farisshajahan/python-peerplays | 260e43d39307554f5c35574fe7a6f5451a51697f | [
"MIT"
] | 10 | 2019-03-14T03:09:42.000Z | 2021-03-17T10:20:05.000Z | peerplays/blockchainobject.py | farisshajahan/python-peerplays | 260e43d39307554f5c35574fe7a6f5451a51697f | [
"MIT"
] | 8 | 2019-04-02T17:07:20.000Z | 2020-04-30T08:24:01.000Z | peerplays/blockchainobject.py | farisshajahan/python-peerplays | 260e43d39307554f5c35574fe7a6f5451a51697f | [
"MIT"
] | 4 | 2019-04-02T17:00:22.000Z | 2021-08-09T11:28:20.000Z | # -*- coding: utf-8 -*-
from .instance import BlockchainInstance
from graphenecommon.blockchainobject import (
BlockchainObject as GrapheneBlockchainObject,
BlockchainObjects as GrapheneBlockchainObjects,
ObjectCache,
)
@BlockchainInstance.inject
class BlockchainObject(GrapheneBlockchainObject):
pass
@BlockchainInstance.inject
class BlockchainObjects(GrapheneBlockchainObjects):
pass
| 22.777778 | 51 | 0.807317 | 118 | 0.287805 | 0 | 0 | 172 | 0.419512 | 0 | 0 | 23 | 0.056098 |
651a2825c2a207bb2573070d6b6a820935667fe9 | 102 | py | Python | Code/Python/LeetCode/tempCodeRunnerFile.py | dks1018/CoffeeShopCoding | 13ac1700673c86c601eb2758570920620a956e4c | [
"ADSL"
] | null | null | null | Code/Python/LeetCode/tempCodeRunnerFile.py | dks1018/CoffeeShopCoding | 13ac1700673c86c601eb2758570920620a956e4c | [
"ADSL"
] | null | null | null | Code/Python/LeetCode/tempCodeRunnerFile.py | dks1018/CoffeeShopCoding | 13ac1700673c86c601eb2758570920620a956e4c | [
"ADSL"
] | null | null | null | arr_1 = ["1","2","3","4","5","6","7"]
arr_2 = []
for n in arr_1:
arr_2.insert(0,n)
print(arr_2)
| 12.75 | 37 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 21 | 0.205882 |
651ec842e96c1b1f0d0e9ffe6067c0199b8a1424 | 124 | py | Python | pyvat/utils.py | ponyville/pyvat | 75fb781d3c00cf323544eb929a96344c1978e2c8 | [
"Apache-2.0"
] | 48 | 2015-07-22T12:02:20.000Z | 2022-02-07T16:54:13.000Z | pyvat/utils.py | ponyville/pyvat | 75fb781d3c00cf323544eb929a96344c1978e2c8 | [
"Apache-2.0"
] | 34 | 2015-03-27T17:47:38.000Z | 2022-02-08T18:14:55.000Z | pyvat/utils.py | ponyville/pyvat | 75fb781d3c00cf323544eb929a96344c1978e2c8 | [
"Apache-2.0"
] | 40 | 2015-04-08T14:03:06.000Z | 2022-02-09T12:29:04.000Z | from decimal import Decimal
def ensure_decimal(value):
return value if isinstance(value, Decimal) else Decimal(value)
| 20.666667 | 66 | 0.782258 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
651f360c505b719ccafc01d1fe50f1f24c9b71d7 | 117 | py | Python | pype/modules/idle_manager/__init__.py | simonebarbieri/pype | a6dc83aa1300738749cbe8e5e2e6d2d1794e0289 | [
"MIT"
] | null | null | null | pype/modules/idle_manager/__init__.py | simonebarbieri/pype | a6dc83aa1300738749cbe8e5e2e6d2d1794e0289 | [
"MIT"
] | null | null | null | pype/modules/idle_manager/__init__.py | simonebarbieri/pype | a6dc83aa1300738749cbe8e5e2e6d2d1794e0289 | [
"MIT"
] | null | null | null | from .idle_module import (
IdleManager,
IIdleManager
)
__all__ = (
"IdleManager",
"IIdleManager"
)
| 10.636364 | 26 | 0.632479 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.230769 |
652012211d75f39757469a76ffbd1e2d590d912a | 289 | py | Python | server/src/tests/samples/typeConstraint3.py | higoshi/pyright | 183c0ef56d2c010d28018149949cda1a40aa59b8 | [
"MIT"
] | null | null | null | server/src/tests/samples/typeConstraint3.py | higoshi/pyright | 183c0ef56d2c010d28018149949cda1a40aa59b8 | [
"MIT"
] | null | null | null | server/src/tests/samples/typeConstraint3.py | higoshi/pyright | 183c0ef56d2c010d28018149949cda1a40aa59b8 | [
"MIT"
] | null | null | null | # This sample exercises the type analyzer's assert type constraint logic.
from typing import Union
def foo(a: Union[str, int]) -> int:
if True:
# This should generate an error because
# a could be a str.
return a
assert isinstance(a, int)
return a
| 19.266667 | 73 | 0.643599 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 131 | 0.453287 |
65204673556284e6d6670bed61daa5eb343644d4 | 2,764 | py | Python | Attention.py | Pok0923/Time-series-forecasting-model | 58c0b1fd151b7fa73be323e8e1a6072876f06d10 | [
"MIT"
] | null | null | null | Attention.py | Pok0923/Time-series-forecasting-model | 58c0b1fd151b7fa73be323e8e1a6072876f06d10 | [
"MIT"
] | null | null | null | Attention.py | Pok0923/Time-series-forecasting-model | 58c0b1fd151b7fa73be323e8e1a6072876f06d10 | [
"MIT"
] | null | null | null | import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import backend as K
from keras.models import Sequential,Model
from keras.layers import *
from keras.optimizers import SGD,Adam
class OurLayer(Layer):
def reuse(self, layer, *args, **kwargs):
if not layer.built:
if len(args) > 0:
inputs = args[0]
else:
inputs = kwargs['inputs']
if isinstance(inputs, list):
input_shape = [K.int_shape(x) for x in inputs]
else:
input_shape = K.int_shape(inputs)
layer.build(input_shape)
outputs = layer.call(*args, **kwargs)
for w in layer.trainable_weights:
if w not in self._trainable_weights:
self._trainable_weights.append(w)
for w in layer.non_trainable_weights:
if w not in self._non_trainable_weights:
self._non_trainable_weights.append(w)
for u in layer.updates:
if not hasattr(self, '_updates'):
self._updates = []
if u not in self._updates:
self._updates.append(u)
return outputs
class SelfAttention(OurLayer):
def __init__(self, heads, size_per_head, key_size=None,
mask_right=False, **kwargs):
super(SelfAttention, self).__init__(**kwargs)
self.heads = heads
self.size_per_head = size_per_head
self.out_dim = heads * size_per_head
self.key_size = key_size if key_size else size_per_head
self.mask_right = mask_right
def build(self, input_shape):
super(SelfAttention, self).build(input_shape)
self.attention = Attention_1(
self.heads,
self.size_per_head,
self.key_size,
self.mask_right
)
def call(self, inputs):
if isinstance(inputs, list):
x, x_mask = inputs
o = self.reuse(self.attention, [x, x, x, x_mask, x_mask])
else:
x = inputs
o = self.reuse(self.attention, [x, x, x])
return o
def compute_output_shape(self, input_shape):
if isinstance(input_shape, list):
return (input_shape[0][0], input_shape[0][1], self.out_dim)
else:
return (input_shape[0], input_shape[1], self.out_dim)
def selfattention_timeseries(nb_class, input_dim,):
model_input = Input(shape=input_dim)
#model_input = SinCosPositionEmbedding(4)(model_input)
O_seq = SelfAttention(16,32)(model_input)
O_seq = GlobalAveragePooling1D()(O_seq)
O_seq = Dropout(0.5)(O_seq)
outputs = Dense(1,activation='relu')(O_seq)
model = Model(inputs=model_input, outputs=outputs)
return model | 36.368421 | 71 | 0.608538 | 2,153 | 0.778944 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.02822 |
6523b70720c5c976b6f562eb7fe63867146d03b1 | 774 | py | Python | tests/remix/test_audio_chunk_merger.py | nroldanf/infiniteremixer | 5ffc2fc85e9cbebf6196d5baa374ba4630f2e3c0 | [
"MIT"
] | null | null | null | tests/remix/test_audio_chunk_merger.py | nroldanf/infiniteremixer | 5ffc2fc85e9cbebf6196d5baa374ba4630f2e3c0 | [
"MIT"
] | null | null | null | tests/remix/test_audio_chunk_merger.py | nroldanf/infiniteremixer | 5ffc2fc85e9cbebf6196d5baa374ba4630f2e3c0 | [
"MIT"
] | null | null | null | import numpy as np
import pytest
from infiniteremixer.remix.audiochunkmerger import AudioChunkMerger
@pytest.fixture
def chunk_manager():
return AudioChunkMerger()
def test_chunk_manager_is_instantiated(chunk_manager):
assert isinstance(chunk_manager, AudioChunkMerger)
def test_audio_chunks_are_concatenated(chunk_manager, mocker):
mocker.patch(
"infiniteremixer.remix.audiochunkmerger.AudioChunkMerger" ".load_audio_file",
return_value=np.array([1, 2]),
)
audio_file_paths = [
"dummy_audio_file1.wav",
"dummy_audio_file2.wav",
"dummy_audio_file3.wav",
]
concatenated_signal = chunk_manager.concatenate(audio_file_paths)
assert np.array_equal(concatenated_signal, np.array([1, 2, 1, 2, 1, 2]))
| 27.642857 | 85 | 0.744186 | 0 | 0 | 0 | 0 | 66 | 0.085271 | 0 | 0 | 144 | 0.186047 |
6524869e78875d68bc7830017790dd24a2253209 | 738 | py | Python | app/libpinanny/__init__.py | MelonSmasher/piNanny | 2bc3a7b5eda83615e21518a2f36c3844b241d201 | [
"MIT"
] | null | null | null | app/libpinanny/__init__.py | MelonSmasher/piNanny | 2bc3a7b5eda83615e21518a2f36c3844b241d201 | [
"MIT"
] | 2 | 2021-03-10T05:28:36.000Z | 2021-09-02T05:40:44.000Z | app/libpinanny/__init__.py | MelonSmasher/piNanny | 2bc3a7b5eda83615e21518a2f36c3844b241d201 | [
"MIT"
] | null | null | null | from subprocess import PIPE, Popen
# Converts celsius temps to fahrenheit
def c2f(celsius):
return (9.0 / 5) * celsius + 32
# Gets the CPU temperature in degrees C
def get_cpu_temperature():
process = Popen(['vcgencmd', 'measure_temp'], stdout=PIPE)
output, _error = process.communicate()
return float(str(str(output).split('=')[1]).split("'")[0])
def debugOutCFH(sensor, valueC, valueF, valueH):
print('Debug Values [' + sensor + ']:')
print('C: ' + str(valueC))
print('F: ' + str(valueF))
print('H: ' + str(valueH) + '%')
print('')
def debugOutCF(sensor, valueC, valueF):
print('Debug Values [' + sensor + ']:')
print('C: ' + str(valueC))
print('F: ' + str(valueF))
print('')
| 25.448276 | 62 | 0.609756 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 179 | 0.242547 |
6525a7237e80a2dde7d058af9b7795a2a3cd2f21 | 4,247 | py | Python | paas-ce/paas/paas/esb/apps/guide/component_template/en/hcp/get_host_list.py | renmcc/bk-PaaS | 1c9e4e9cfb40fc3375cd6b5f08af8c84203de246 | [
"Apache-2.0"
] | 2 | 2019-09-22T13:54:53.000Z | 2021-07-29T02:31:40.000Z | paas-ce/paas/paas/esb/apps/guide/component_template/en/hcp/get_host_list.py | renmcc/bk-PaaS | 1c9e4e9cfb40fc3375cd6b5f08af8c84203de246 | [
"Apache-2.0"
] | 10 | 2021-02-08T20:32:31.000Z | 2022-03-11T23:47:06.000Z | paas-ce/paas/paas/esb/apps/guide/component_template/en/hcp/get_host_list.py | renmcc/bk-PaaS | 1c9e4e9cfb40fc3375cd6b5f08af8c84203de246 | [
"Apache-2.0"
] | 3 | 2019-08-22T09:05:31.000Z | 2021-03-23T14:21:19.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
import json
from django import forms
from common.forms import BaseComponentForm, TypeCheckField
from components.component import Component
from .toolkit import configs
class GetHostList(Component):
"""
apiLabel get host list
apiMethod GET
### Functional Description
Get host list
### Request Parameters
{{ common_args_desc }}
#### Interface Parameters
| Field | Type | Required | Description |
|-----------|------------|--------|------------|
| app_id | int | Yes | Business ID |
| ip_list | array | No | Host IP address, including ip and bk_cloud_id, bk_cloud_id represents cloud area ID |
### Request Parameters Example
```python
{
"bk_app_code": "esb_test",
"bk_app_secret": "xxx",
"bk_token": "xxx-xxx-xxx-xxx-xxx",
"bk_biz_id": 1,
"ip_list": [
{
"ip": "10.0.0.1",
"bk_cloud_id": 0
},
{
"ip": "10.0.0.2"
"bk_cloud_id": 0
}
]
}
```
### Return Result Example
```python
{
"result": true,
"code": 0,
"message": "",
"data": [
{
"inner_ip": "10.0.0.1",
"bk_cloud_id": 0,
"host_name": "db-1",
"maintainer": "admin"
},
{
"inner_ip": "10.0.0.2",
"bk_cloud_id": 2,
"host_name": "db-2",
"maintainer": "admin"
}
]
}
```
"""
# Name of the system to which the component belongs
sys_name = configs.SYSTEM_NAME
# Form Processing Parameters Validation
class Form(BaseComponentForm):
bk_biz_id = forms.CharField(label='Business ID', required=True)
ip_list = TypeCheckField(label='Host IP address', promise_type=list, required=False)
# The data returned in clean method is available through the component's form_data property
def clean(self):
return self.get_cleaned_data_when_exist(keys=['bk_biz_id', 'ip_list'])
# Component Processing Access
def handle(self):
# Get the data processed in Form clean
data = self.form_data
# Set Current Operator
data['operator'] = self.current_user.username
# Request System Interface
try:
response = self.outgoing.http_client.post(
host=configs.host,
path='/hcp/get_host_list/',
data=json.dumps(data),
)
except Exception:
# TODO: To delete, only fake data for testing
response = {
'code': 0,
'data': [
{
'inner_ip': '10.0.0.1',
'bk_cloud_id': 0,
'host_name': 'just_for_test',
'maintainer': 'admin',
},
]
}
# Analyze the Results
code = response['code']
if code == 0:
result = {
'result': True,
'data': response['data'],
}
else:
result = {
'result': False,
'message': response['message']
}
# Set the component return result, and payload is the actual return result of component
self.response.payload = result
| 30.120567 | 305 | 0.536379 | 3,334 | 0.781712 | 0 | 0 | 0 | 0 | 0 | 0 | 2,831 | 0.663775 |
6525b9fa24410fbd6e88a3ac0d7c4387bef24c34 | 1,354 | py | Python | pretix_googlepaypasses/views.py | pc-coholic/pretix-googlepaypasses | 1591ebf2b53294989ffca6ffcb185842196e034d | [
"Apache-2.0"
] | 2 | 2018-11-02T11:32:21.000Z | 2018-12-07T07:18:06.000Z | pretix_googlepaypasses/views.py | pc-coholic/pretix-googlepaypasses | 1591ebf2b53294989ffca6ffcb185842196e034d | [
"Apache-2.0"
] | 5 | 2018-10-30T19:49:56.000Z | 2018-11-26T21:11:42.000Z | pretix_googlepaypasses/views.py | pc-coholic/pretix-googlepaypasses | 1591ebf2b53294989ffca6ffcb185842196e034d | [
"Apache-2.0"
] | null | null | null | import json
import logging
from json import JSONDecodeError
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseForbidden,
)
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from pretix.base.models import Organizer
from . import tasks
logger = logging.getLogger(__name__)
@csrf_exempt
@require_POST
def webhook(request, *args, **kwargs):
# Google is not actually sending their documented UA m(
# if request.META['HTTP_USER_AGENT'] != 'Google-Valuables':
if request.META['HTTP_USER_AGENT'] != "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)":
return HttpResponseForbidden()
if request.META.get('CONTENT_TYPE') != 'application/json':
return HttpResponseBadRequest()
try:
webhook_json = json.loads(request.body.decode('utf-8'))
except JSONDecodeError:
return False
if all(k in webhook_json for k in ('signature', 'intermediateSigningKey', 'protocolVersion', 'signedMessage')):
organizer = Organizer.objects.filter(
slug=request.resolver_match.kwargs['organizer'],
).first()
tasks.process_webhook.apply_async(
args=(request.body.decode('utf-8'), organizer.settings.googlepaypasses_issuer_id)
)
return HttpResponse()
| 31.488372 | 117 | 0.716396 | 0 | 0 | 0 | 0 | 990 | 0.731167 | 0 | 0 | 329 | 0.242984 |
652624be473f589627bafdda2fb46234674c0cff | 12,300 | py | Python | alf/utils/checkpoint_utils.py | breakds/alf | b3d60048daee2c9625ba44f778e49570d0d029a7 | [
"Apache-2.0"
] | 1 | 2021-11-17T17:08:04.000Z | 2021-11-17T17:08:04.000Z | alf/utils/checkpoint_utils.py | ipsec/alf | 15fd71896eac5ad0987dbe14a9f630b32e0e131f | [
"Apache-2.0"
] | null | null | null | alf/utils/checkpoint_utils.py | ipsec/alf | 15fd71896eac5ad0987dbe14a9f630b32e0e131f | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 Horizon Robotics and ALF Contributors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl import logging
import glob
import os
import torch
from torch import nn
import warnings
import alf
def is_checkpoint_enabled(module):
"""Whether ``module`` will checkpointed.
By default, a module used in ``Algorithm`` will be checkpointed. The checkpointing
can be disabled by calling ``enable_checkpoint(module, False)``
Args:
module (torch.nn.Module): module in question
Returns:
bool: True if the parameters of this module will be checkpointed
"""
if hasattr(module, "_alf_checkpoint_enabled"):
return module._alf_checkpoint_enabled
return True
def enable_checkpoint(module, flag=True):
"""Enable/disable checkpoint for ``module``.
Args:
module (torch.nn.Module):
flag (bool): True to enable checkpointing, False to disable.
"""
module._alf_checkpoint_enabled = flag
class Checkpointer(object):
"""A checkpoint manager for saving and loading checkpoints."""
def __init__(self, ckpt_dir, **kwargs):
"""A class for making checkpoints.
Example usage:
.. code-block:: python
alg_root = MyAlg(params=[p1, p2], sub_algs=[a1, a2], optimizer=opt)
ckpt_mngr = ckpt_utils.Checkpointer(ckpt_dir,
alg=alg_root)
Args:
ckpt_dir: The directory to save checkpoints. Create ckpt_dir if
it doesn't exist.
kwargs: Items to be included in the checkpoint. Each item needs
to have state_dict and load_state_dict implemented.
For instance of Algorithm, only the root need to be passed in,
all the children modules and optimizers are automatically
extracted and checkpointed. If a child module is also passed
in, it will be treated as the root to be recursively processed.
"""
self._modules = kwargs
self._ckpt_dir = ckpt_dir
self._global_step = -1
os.makedirs(self._ckpt_dir, exist_ok=True)
@alf.configurable
def load(self,
global_step="latest",
ignored_parameter_prefixes=[],
including_optimizer=True,
including_replay_buffer=True,
including_data_transformers=True,
strict=True):
"""Load checkpoint
Args:
global_step (int|str): the number of training steps which is used to
specify the checkpoint to be loaded. If global_step is 'latest',
the most recent checkpoint named 'latest' will be loaded.
ingored_parameter_prefixes (list[str]): ignore the parameters whose
name has one of these prefixes in the checkpoint.
including_optimizer (bool): whether load optimizer checkpoint
including_replay_buffer (bool): whether load replay buffer checkpoint.
including_data_transformers (bool): whether load data transformer checkpoint.
strict (bool, optional): whether to strictly enforce that the keys
in ``state_dict`` match the keys returned by this module's
``torch.nn.Module.state_dict`` function. If ``strict=True``, will
keep lists of missing and unexpected keys and raise error when
any of the lists is non-empty; if ``strict=False``, missing/unexpected
keys will be omitted and no error will be raised.
(Default: ``True``)
Returns:
current_step_num (int): the current step number for the loaded
checkpoint. current_step_num is set to - 1 if the specified
checkpoint does not exist.
"""
if not including_data_transformers:
ignored_parameter_prefixes.append("_data_transformer")
def _remove_ignored_parameters(checkpoint):
to_delete = []
for k in checkpoint.keys():
for prefix in ignored_parameter_prefixes:
if k.startswith(prefix):
to_delete.append(k)
break
for k in to_delete:
checkpoint.pop(k)
def _convert_legacy_parameter(checkpoint):
"""
Due to different implmentation of FC layer, the old checkpoints cannot
be loaded directly. Hence we check if the checkpoint uses old FC layer
and convert to the new FC layer format.
_log_alpha for SacAlgorithm was changed from [1] Tensor to [] Tensor.
"""
d = {}
for k, v in checkpoint.items():
if k.endswith('._linear.weight') or k.endswith(
'._linear.bias'):
d[k] = v
elif k.endswith('._log_alpha') and v.shape == (1, ):
d[k] = v[0]
for k, v in d.items():
del checkpoint[k]
logging.info("Converted legacy parameter %s" % k)
if k.endswith('.weight'):
checkpoint[k[:-13] + 'weight'] = v
elif k.endswith('.bias'):
checkpoint[k[:-11] + 'bias'] = v
else:
checkpoint[k] = v
def _load_one(module, checkpoint):
if isinstance(module, nn.Module):
missing_keys, unexpected_keys = module.load_state_dict(
checkpoint, strict=strict)
else:
module.load_state_dict(checkpoint)
missing_keys, unexpected_keys = [], []
if not including_optimizer:
missing_keys = list(
filter(lambda k: k.find('_optimizers.') < 0, missing_keys))
if not including_replay_buffer:
missing_keys = list(
filter(lambda k: not k.startswith('_replay_buffer.'),
missing_keys))
if strict:
error_msgs = []
if len(unexpected_keys) > 0:
error_msgs.insert(
0, 'Unexpected key(s) in state_dict: {}. '.format(
', '.join(
'"{}"'.format(k) for k in unexpected_keys)))
if len(missing_keys) > 0:
error_msgs.insert(
0, 'Missing key(s) in state_dict: {}. '.format(
', '.join('"{}"'.format(k) for k in missing_keys)))
if len(error_msgs) > 0:
raise RuntimeError(
'Error(s) in loading state_dict for {}:\n\t{}'.format(
module.__class__.__name__,
"\n\t".join(error_msgs)))
def _merge_checkpoint(merged, new):
for mk in self._modules.keys():
if not isinstance(new[mk], dict):
continue
for k in new[mk].keys():
merged[mk][k] = new[mk][k]
if global_step == "latest":
global_step = self._get_latest_checkpoint_step()
if global_step is None:
warnings.warn("There is no checkpoint in directory %s. "
"Train from scratch" % self._ckpt_dir)
return self._global_step
f_path = os.path.join(self._ckpt_dir, "ckpt-{0}".format(global_step))
if not os.path.isfile(f_path):
warnings.warn(
"Checkpoint '%s' does not exist. Train from scratch." % f_path)
return self._global_step
map_location = None
if not torch.cuda.is_available():
map_location = torch.device('cpu')
checkpoint = torch.load(f_path, map_location=map_location)
if including_optimizer:
opt_checkpoint = torch.load(
f_path + '-optimizer', map_location=map_location)
_merge_checkpoint(checkpoint, opt_checkpoint)
if including_replay_buffer:
replay_buffer_checkpoint = torch.load(
f_path + '-replay_buffer', map_location=map_location)
_merge_checkpoint(checkpoint, replay_buffer_checkpoint)
self._global_step = checkpoint["global_step"]
for k in self._modules.keys():
_remove_ignored_parameters(checkpoint[k])
_convert_legacy_parameter(checkpoint[k])
_load_one(self._modules[k], checkpoint[k])
logging.info(
"Checkpoint 'ckpt-{}' is loaded successfully.".format(global_step))
return self._global_step
def _get_latest_checkpoint_step(self):
file_names = glob.glob(os.path.join(self._ckpt_dir, "ckpt-*"))
if not file_names:
return None
latest_step = None
for file_name in file_names:
try:
step = int(os.path.basename(file_name)[5:])
except ValueError:
continue
if latest_step is None:
latest_step = step
elif step > latest_step:
latest_step = step
return latest_step
def has_checkpoint(self, global_step="latest"):
"""Whether there is a checkpoint in the checkpoint directory.
Args:
global_step (int|str): If an int, return True if file "ckpt-{global_step}"
is in the checkpoint directory. If "lastest", return True if
"latest" is in the checkpoint directory.
"""
if global_step == "latest":
global_step = self._get_latest_checkpoint_step()
if global_step is None:
return False
f_path = os.path.join(self._ckpt_dir, "ckpt-{0}".format(global_step))
return os.path.isfile(f_path)
def _separate_state(self, state):
model_state = {}
optimizer_state = {}
replay_buffer_state = {}
for k, v in state.items():
if k.find('_optimizers.') >= 0 and isinstance(
v, dict) and 'param_groups' in v:
optimizer_state[k] = v
elif k.startswith('_replay_buffer.'):
replay_buffer_state[k] = v
elif not k.startswith('_offline_replay_buffer.'):
model_state[k] = v
return model_state, optimizer_state, replay_buffer_state
def save(self, global_step):
"""Save states of all modules to checkpoint
Args:
global_step (int): the number of training steps corresponding to the
current state to be saved. It will be appended to the name of
the checkpoint as a suffix. This function will also save a copy
of the latest checkpoint in a file named 'latest'.
"""
self._global_step = global_step
f_path = os.path.join(self._ckpt_dir, "ckpt-{0}".format(global_step))
state = {
k: v.module.state_dict()
if type(v) == torch.nn.DataParallel else v.state_dict()
for k, v in self._modules.items()
}
model_state = {}
optimizer_state = {}
replay_buffer_state = {}
for k, v in state.items():
ms, opts, rs = self._separate_state(v)
model_state[k] = ms
optimizer_state[k] = opts
replay_buffer_state[k] = rs
model_state['global_step'] = global_step
torch.save(model_state, f_path)
torch.save(optimizer_state, f_path + '-optimizer')
torch.save(replay_buffer_state, f_path + '-replay_buffer')
logging.info(
"Checkpoint 'ckpt-{}' is saved successfully.".format(global_step))
| 39.805825 | 89 | 0.579106 | 10,794 | 0.877561 | 0 | 0 | 6,553 | 0.532764 | 0 | 0 | 5,216 | 0.424065 |
6526bd14e91e02ab2897caf04024aa618b204008 | 956 | py | Python | automon/integrations/slack/config.py | TheShellLand/automonisaur | b5f304a44449b8664c93d8a8a3c3cf2d73aa0ce9 | [
"MIT"
] | 2 | 2021-09-15T18:35:44.000Z | 2022-01-18T05:36:54.000Z | automon/integrations/slack/config.py | TheShellLand/automonisaur | b5f304a44449b8664c93d8a8a3c3cf2d73aa0ce9 | [
"MIT"
] | 16 | 2021-08-29T22:51:53.000Z | 2022-03-09T16:08:19.000Z | automon/integrations/slack/config.py | TheShellLand/automonisaur | b5f304a44449b8664c93d8a8a3c3cf2d73aa0ce9 | [
"MIT"
] | null | null | null | import os
from automon.log import Logging
log = Logging(name=__name__, level=Logging.ERROR)
class ConfigSlack:
slack_name = os.getenv('SLACK_USER') or ''
slack_webhook = os.getenv('SLACK_WEBHOOK') or ''
slack_proxy = os.getenv('SLACK_PROXY') or ''
slack_token = os.getenv('SLACK_TOKEN') or ''
SLACK_DEFAULT_CHANNEL = os.getenv('SLACK_DEFAULT_CHANNEL') or ''
SLACK_INFO_CHANNEL = os.getenv('SLACK_INFO_CHANNEL') or ''
SLACK_DEBUG_CHANNEL = os.getenv('SLACK_DEBUG_CHANNEL') or ''
SLACK_ERROR_CHANNEL = os.getenv('SLACK_ERROR_CHANNEL') or ''
SLACK_CRITICAL_CHANNEL = os.getenv('SLACK_CRITICAL_CHANNEL') or ''
SLACK_WARN_CHANNEL = os.getenv('SLACK_WARN_CHANNEL') or ''
SLACK_TEST_CHANNEL = os.getenv('SLACK_TEST_CHANNEL') or ''
if not slack_token:
log.warn(f'missing SLACK_TOKEN')
def __init__(self, slack_name: str = ''):
self.slack_name = os.getenv('SLACK_USER') or slack_name or ''
| 35.407407 | 70 | 0.707113 | 859 | 0.898536 | 0 | 0 | 0 | 0 | 0 | 0 | 262 | 0.274059 |
652a2bec3a1bbb15dcd3f0465f2e3a5760856d24 | 182 | py | Python | z2z_metadata/__init__.py | Mitchellpkt/z2z-metadata | 261a694225efc21d65a9f3be69a9017bbc29864e | [
"MIT"
] | null | null | null | z2z_metadata/__init__.py | Mitchellpkt/z2z-metadata | 261a694225efc21d65a9f3be69a9017bbc29864e | [
"MIT"
] | null | null | null | z2z_metadata/__init__.py | Mitchellpkt/z2z-metadata | 261a694225efc21d65a9f3be69a9017bbc29864e | [
"MIT"
] | null | null | null | """Top-level package for z2z Metadata analysis."""
__author__ = """Isthmus // Mitchell P. Krawiec-Thayer"""
__email__ = 'project_z2z_metadata@mitchellpkt.com'
__version__ = '0.0.1'
| 30.333333 | 56 | 0.730769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 138 | 0.758242 |
652bea27b54e41523374e9b0f3eb5a5744c81344 | 81 | py | Python | pyeparse/tests/__init__.py | Eric89GXL/pyeparse | c1907c39276aacb0fad80034d69b537b07f82786 | [
"BSD-3-Clause"
] | null | null | null | pyeparse/tests/__init__.py | Eric89GXL/pyeparse | c1907c39276aacb0fad80034d69b537b07f82786 | [
"BSD-3-Clause"
] | null | null | null | pyeparse/tests/__init__.py | Eric89GXL/pyeparse | c1907c39276aacb0fad80034d69b537b07f82786 | [
"BSD-3-Clause"
] | null | null | null | # Authors: Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
| 20.25 | 52 | 0.716049 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.962963 |
652cc48a5f826ddc6ae73b306f5bcedc4dba7eb7 | 1,659 | py | Python | Murphi/ModularMurphi/TemplateClass.py | icsa-caps/HieraGen | 4026c1718878d2ef69dd13d3e6e10cab69174fda | [
"MIT"
] | 6 | 2020-07-07T15:45:13.000Z | 2021-08-29T06:44:29.000Z | Murphi/ModularMurphi/TemplateClass.py | icsa-caps/HieraGen | 4026c1718878d2ef69dd13d3e6e10cab69174fda | [
"MIT"
] | null | null | null | Murphi/ModularMurphi/TemplateClass.py | icsa-caps/HieraGen | 4026c1718878d2ef69dd13d3e6e10cab69174fda | [
"MIT"
] | null | null | null | import inspect
import os
import re
class TemplateHandler:
# Constant definitions
tab = " "
nl = "\n"
sem = ";"
end = sem + nl
def __init__(self, template_dir: str):
self.templatepath = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + \
"/../" + template_dir
####################################################################################################################
# REPLACE DYNAMIC
####################################################################################################################
def _openTemplate(self, filename):
return re.sub(r'^\#.*\n?', '', open(self.templatepath + "/" + filename, "r").read(), flags=re.MULTILINE)
def _stringReplKeys(self, refstring, replacekeys):
inputstr = refstring
for ind in range(0, len(replacekeys)):
inputstr = self._stringRepl(inputstr, ind, replacekeys[ind])
return inputstr
def _stringRepl(self, string, ind, keyword):
return re.sub(r"\$" + str(ind) + "\$", keyword, string)
def _addtabs(self, string, count):
tabstring = ""
for ind in range(0, count):
tabstring += self.tab
outstr = ""
for line in string.splitlines():
outstr += tabstring + line + self.nl
return outstr
@staticmethod
def _testInt(string):
try:
int(string)
return True
except ValueError:
return False
@staticmethod
def _testOperator(string):
if string.isalpha():
return True
return False
| 29.105263 | 120 | 0.485835 | 1,621 | 0.977095 | 0 | 0 | 270 | 0.162749 | 0 | 0 | 320 | 0.192887 |
652d0559a2243d1e92871449e1c32a5d4ef6cbbb | 8,660 | py | Python | bot/exts/filters/token_remover.py | adhikariprajitraj/bot | 207eb16e7be7a339bb3e670ffd74283e755a73a7 | [
"MIT"
] | null | null | null | bot/exts/filters/token_remover.py | adhikariprajitraj/bot | 207eb16e7be7a339bb3e670ffd74283e755a73a7 | [
"MIT"
] | null | null | null | bot/exts/filters/token_remover.py | adhikariprajitraj/bot | 207eb16e7be7a339bb3e670ffd74283e755a73a7 | [
"MIT"
] | null | null | null | import base64
import binascii
import logging
import re
import typing as t
from discord import Colour, Message, NotFound
from discord.ext.commands import Cog
from bot import utils
from bot.bot import Bot
from bot.constants import Channels, Colours, Event, Icons
from bot.exts.moderation.modlog import ModLog
from bot.utils.messages import format_user
log = logging.getLogger(__name__)
LOG_MESSAGE = (
"""Censored a seemingly valid token sent by {author} in {channel}, \
token was `{user_id}.{timestamp}.{hmac}`"""
)
UNKNOWN_USER_LOG_MESSAGE = "Decoded user ID: `{user_id}` (Not present in server)."
KNOWN_USER_LOG_MESSAGE = (
"Decoded user ID: `{user_id}` **(Present in server)**.\n"
"This matches `{user_name}` and means this is likely a valid **{kind}** token."
)
DELETION_MESSAGE_TEMPLATE = (
"""Hey {mention}! I noticed you posted a seemingly valid Discord API \
token in your message and have removed your message.
This means that your token has been **compromised**.
Please change your token **immediately** at:
<https://discordapp.com/developers/applications/me>\n\n
Feel free to re-post it with the token removed.
If you believe this was a mistake, please let us know!"""
)
DISCORD_EPOCH = 1_420_070_400
TOKEN_EPOCH = 1_293_840_000
# Three parts delimited by dots: user ID, creation timestamp, HMAC.
# The HMAC isn't parsed further, but it's in the regex to ensure it at least exists in the string.
# Each part only matches base64 URL-safe characters.
# Padding has never been observed, but the padding character '=' is matched just in case.
TOKEN_RE = re.compile(r"([\w\-=]+)\.([\w\-=]+)\.([\w\-=]+)", re.ASCII)
class Token(t.NamedTuple):
"""A Discord Bot token."""
user_id: str
timestamp: str
hmac: str
class TokenRemover(Cog):
"""Scans messages for potential discord.py bot tokens and removes them."""
def __init__(self, bot: Bot):
self.bot = bot
@property
def mod_log(self) -> ModLog:
"""Get currently loaded ModLog cog instance."""
return self.bot.get_cog("ModLog")
@Cog.listener()
async def on_message(self, msg: Message) -> None:
"""
Check each message for a string that matches Discord's token pattern.
See: https://discordapp.com/developers/docs/reference#snowflakes
"""
# Ignore DMs; can't delete messages in there anyway.
if not msg.guild or msg.author.bot:
return
found_token = self.find_token_in_message(msg)
if found_token:
await self.take_action(msg, found_token)
@Cog.listener()
async def on_message_edit(self, before: Message, after: Message) -> None:
"""
Check each edit for a string that matches Discord's token pattern.
See: https://discordapp.com/developers/docs/reference#snowflakes
"""
await self.on_message(after)
async def take_action(self, msg: Message, found_token: Token) -> None:
"""Remove the `msg` containing the `found_token` and send a mod log message."""
self.mod_log.ignore(Event.message_delete, msg.id)
try:
await msg.delete()
except NotFound:
log.debug(f"Failed to remove token in message {msg.id}: message already deleted.")
return
await msg.channel.send(DELETION_MESSAGE_TEMPLATE.format(mention=msg.author.mention))
log_message = self.format_log_message(msg, found_token)
userid_message, mention_everyone = self.format_userid_log_message(msg, found_token)
log.debug(log_message)
# Send pretty mod log embed to mod-alerts
await self.mod_log.send_log_message(
icon_url=Icons.token_removed,
colour=Colour(Colours.soft_red),
title="Token removed!",
text=log_message + "\n" + userid_message,
thumbnail=msg.author.avatar_url_as(static_format="png"),
channel_id=Channels.mod_alerts,
ping_everyone=mention_everyone,
)
self.bot.stats.incr("tokens.removed_tokens")
@classmethod
def format_userid_log_message(cls, msg: Message, token: Token) -> t.Tuple[str, bool]:
"""
Format the portion of the log message that includes details about the detected user ID.
If the user is resolved to a member, the format includes the user ID, name, and the
kind of user detected.
If we resolve to a member and it is not a bot, we also return True to ping everyone.
Returns a tuple of (log_message, mention_everyone)
"""
user_id = cls.extract_user_id(token.user_id)
user = msg.guild.get_member(user_id)
if user:
return KNOWN_USER_LOG_MESSAGE.format(
user_id=user_id,
user_name=str(user),
kind="BOT" if user.bot else "USER",
), not user.bot
else:
return UNKNOWN_USER_LOG_MESSAGE.format(user_id=user_id), False
@staticmethod
def format_log_message(msg: Message, token: Token) -> str:
"""Return the generic portion of the log message to send for `token` being censored in `msg`."""
return LOG_MESSAGE.format(
author=format_user(msg.author),
channel=msg.channel.mention,
user_id=token.user_id,
timestamp=token.timestamp,
hmac='x' * len(token.hmac),
)
@classmethod
def find_token_in_message(cls, msg: Message) -> t.Optional[Token]:
"""Return a seemingly valid token found in `msg` or `None` if no token is found."""
# Use finditer rather than search to guard against method calls prematurely returning the
# token check (e.g. `message.channel.send` also matches our token pattern)
for match in TOKEN_RE.finditer(msg.content):
token = Token(*match.groups())
if (
(cls.extract_user_id(token.user_id) is not None)
and cls.is_valid_timestamp(token.timestamp)
and cls.is_maybe_valid_hmac(token.hmac)
):
# Short-circuit on first match
return token
# No matching substring
return
@staticmethod
def extract_user_id(b64_content: str) -> t.Optional[int]:
"""Return a user ID integer from part of a potential token, or None if it couldn't be decoded."""
b64_content = utils.pad_base64(b64_content)
try:
decoded_bytes = base64.urlsafe_b64decode(b64_content)
string = decoded_bytes.decode('utf-8')
if not (string.isascii() and string.isdigit()):
# This case triggers if there are fancy unicode digits in the base64 encoding,
# that means it's not a valid user id.
return None
return int(string)
except (binascii.Error, ValueError):
return None
@staticmethod
def is_valid_timestamp(b64_content: str) -> bool:
"""
Return True if `b64_content` decodes to a valid timestamp.
If the timestamp is greater than the Discord epoch, it's probably valid.
See: https://i.imgur.com/7WdehGn.png
"""
b64_content = utils.pad_base64(b64_content)
try:
decoded_bytes = base64.urlsafe_b64decode(b64_content)
timestamp = int.from_bytes(decoded_bytes, byteorder="big")
except (binascii.Error, ValueError) as e:
log.debug(f"Failed to decode token timestamp '{b64_content}': {e}")
return False
# Seems like newer tokens don't need the epoch added, but add anyway since an upper bound
# is not checked.
if timestamp + TOKEN_EPOCH >= DISCORD_EPOCH:
return True
else:
log.debug(f"Invalid token timestamp '{b64_content}': smaller than Discord epoch")
return False
@staticmethod
def is_maybe_valid_hmac(b64_content: str) -> bool:
"""
Determine if a given HMAC portion of a token is potentially valid.
If the HMAC has 3 or less characters, it's probably a dummy value like "xxxxxxxxxx",
and thus the token can probably be skipped.
"""
unique = len(set(b64_content.lower()))
if unique <= 3:
log.debug(
f"""Considering the HMAC {b64_content} a dummy because it has {unique} \
case-insensitively unique characters"""
)
return False
else:
return True
def setup(bot: Bot) -> None:
"""Load the TokenRemover cog."""
bot.add_cog(TokenRemover(bot))
| 37.008547 | 105 | 0.639145 | 6,878 | 0.794226 | 0 | 0 | 5,373 | 0.620439 | 1,940 | 0.224018 | 3,699 | 0.427136 |