max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
shared/data/simulators/drunk.py | DougMahoney/metatools | 12 | 12758151 | <reponame>DougMahoney/metatools<gh_stars>10-100
import random
class DrunkenWalk(object):
"""A random walker that can get progressively more or less random over time."""
_reasonable = dict(
money=100.0,
tolerance=3.0, # ABV div 3, soberiety gained per recoveryRate steps
alcoholism=100,
recoveryRate=1000.0
)
_maxBounce = 0.2
_antiTopple = (0.0, 0.0) # (0.2,0.8)
def __init__(self, initValue=0, inebriation=0.12,
money=None, tolerance=None, alcoholism=None, recoveryRate=None,
handrails=(0,10.0), leaning=None, stride=None,
boozeMenu=[(10,0.12)]):
"""Perform a random walk. The more inebriated, the more the value wanders.
If money is given, more booze will be bought and indebriation gets worse each step.
Inebriation goes down each step depending on the tolerance.
Alcoholism determines how many steps before another drink it purchased.
"""
self.value = initValue
self.inebriation = inebriation
self.money = money or 0
self.alcoholism = alcoholism or 0
self.tolerance = tolerance or 0
self.recoveryRate = recoveryRate or 0
if self.tolerance and not self.recoveryRate:
self.recoveryRate = self._reasonable['recoveryRate']
self.leaning = leaning or 0.5
self.stride = stride or 2
self.boozeMenu = boozeMenu
self.steps = 0
self.handrails = handrails
def stumble(self):
self.steps += 1
if self.alcoholism and (self.steps % self.alcoholism == 0):
self.drink()
if self.tolerance:
self.inebriation -= self.tolerance / self.recoveryRate
self.inebriation = max((0, self.inebriation))
self.leaning += (self.leaning * 0.1)
if self._antiTopple:
left,right = self._antiTopple
if self.leaning > right:
self.leaning = right
elif self.leaning < left:
self.leaning = left
self.value += (random.random()-(self.leaning+0.5))*self.inebriation*self.stride
if self.handrails:
left,right = self.handrails
if self.value < left:
self.value = left
self.leaning = 0.0 + abs(self.leaning/2)
elif self.value > right:
self.value = right
self.leaning = 0.0 - abs(self.leaning/2)
return self.value
def traipse(self, steps=None):
for step in range(steps or self.alcoholism):
_ = self.stumble()
return self.value
def drink(self):
if self.money:
cost,abv = random.choice(abv for cost,abv in self.boozeMenu if cost <=self.money)
self.money -= cost
self.inebriation += abv/(self.tolerance) | 3.359375 | 3 |
shenmeGUI/bindings.py | tigerjang/ShenMeGUI | 1 | 12758152 | <filename>shenmeGUI/bindings.py<gh_stars>1-10
class BindingTypes:
JSFunction = 1
JSObject = 2
class Binding(object):
def __init__(self, type, src, dest):
self.type = type
self.src = src
self.dest = dest
| 1.734375 | 2 |
Alpha & Beta/Django/WaifuTracker/main_app/urls.py | Mdlkxzmcp/various_python | 0 | 12758153 | from django.conf.urls import url
from django.conf import settings
from django.views.static import serve
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^([0-9]+)/$', views.detail, name='detail'),
url(r'^user/(\w+)/$', views.profile, name='profile'),
url(r'^post_url/$', views.post_waifu, name='post_waifu'),
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.login_view, name='login'),
url(r'^logout/$', views.logout_view, name='logout'),
url(r'^heart_waifu/$', views.heart_waifu, name='heart_waifu'),
url(r'^search/$', views.search, name='search'),
]
if settings.DEBUG:
urlpatterns += [
url(r'^media/(?P<path>.*)$', serve, {'document_root': settings.MEDIA_ROOT, }),
] | 1.765625 | 2 |
jupyterhub_config.py | MathHubInfo/JupyterHub-Deployment | 1 | 12758154 | # adapted from original JupyterHub_config.py file
# which is Copyright (c) Jupyter Development Team, see LICENSE
import os
from dockerspawner import DockerSpawner
c = get_config()
# use our custom spawnwer dor jupyterhub config
class MathHubSpawner(DockerSpawner):
def __init__(self, *args, **kwargs):
super(MathHubSpawner, self).__init__(*args, **kwargs)
self.env_keep += ['MMT_FRONTEND_BASE_URL', 'UPLOAD_REDIRECT_PREFIX']
self.container_image = os.environ['DOCKER_NOTEBOOK_IMAGE']
c.JupyterHub.spawner_class = MathHubSpawner
# Command used to spawn jupyter hub inside of DockerSpawner
spawn_cmd = os.environ.get('DOCKER_SPAWN_CMD', "start-singleuser.sh")
c.DockerSpawner.extra_create_kwargs.update({ 'command': spawn_cmd })
# Docker Spawner network configuration
c.DockerSpawner.network_name = os.environ['DOCKER_NETWORK_NAME']
c.DockerSpawner.extra_host_config = { 'network_mode': c.DockerSpawner.network_name }
c.DockerSpawner.use_internal_ip = True
# Docker Volumes and Notebook directory
c.DockerSpawner.notebook_dir = os.environ.get('DOCKER_NOTEBOOK_DIR') or '/home/jovyan/work'
c.DockerSpawner.volumes = { 'jupyterhub-user-{username}': c.DockerSpawner.notebook_dir }
c.DockerSpawner.volumes[os.environ.get('MMT_VOLUME_HOST')] = {'bind' : '/content/', 'mode' : 'ro' }
c.DockerSpawner.remove_containers = True
c.DockerSpawner.debug = True
# Jupyter Hub Public URL
c.JupyterHub.port = 80
# Jupyter Hub Internal URL (for docker containers)
c.JupyterHub.hub_ip = 'jupyterhub'
c.JupyterHub.hub_port = 8080
# cookie secret can be found in the data_volume container under /data
data_dir = os.environ.get('DATA_VOLUME_CONTAINER', '/data')
c.JupyterHub.cookie_secret_file = os.path.join(data_dir, 'jupyterhub_cookie_secret')
# postgresql database config
c.JupyterHub.db_url = 'postgresql://postgres:{password}@{host}/{db}'.format(
host=os.environ['POSTGRES_HOST'],
password=os.environ['POSTGRES_PASSWORD'],
db=os.environ['POSTGRES_DB']
)
# Authentication Setup for JupyterHub
# TODO: Try using GitLab OAuth here
c.JupyterHub.authenticator_class = 'tmpauthenticator.TmpAuthenticator' | 2.078125 | 2 |
clouddb/__init__.py | adregner/python-clouddb | 1 | 12758155 | <reponame>adregner/python-clouddb<gh_stars>1-10
# -*- encoding: utf-8 -*-
__author__ = "<NAME> <<EMAIL>>"
from clouddb.connection import Connection
from clouddb.models import * | 1.398438 | 1 |
BasicExerciseAndKnowledge/w3cschool/n20_ball_bounces_back .py | Jonathan1214/learn-python | 0 | 12758156 | #coding:utf-8
# 题目:一球从100米高度自由落下,每次落地后反跳回原高度的一半;再落下,求它在第10次落地时,共经过多少米?第10次反弹多高?
h = 100 #反弹后的高度
sum_h = 100
for n in range(10):
h /= 2.0
sum_h += h * 2
print sum_h, h
| 3.703125 | 4 |
boa3/compiler/codegenerator/initstatementsvisitor.py | hal0x2328/neo3-boa | 25 | 12758157 | <reponame>hal0x2328/neo3-boa<filename>boa3/compiler/codegenerator/initstatementsvisitor.py
import ast
from typing import Dict, List, Tuple
from boa3 import constants
from boa3.analyser.astanalyser import IAstAnalyser
from boa3.model.symbol import ISymbol
class InitStatementsVisitor(IAstAnalyser):
"""
This class is separate the instructions that should be included in the '_deploy" internal method from those
from the '_initialize' internal method.
The methods with the name starting with 'visit_' are implementations of methods from the :class:`NodeVisitor` class.
These methods are used to walk through the Python abstract syntax tree.
"""
def __init__(self, symbols: Dict[str, ISymbol]):
super().__init__(ast.parse(""), log=True)
self.symbols = symbols.copy()
self._deploy_instructions: List[ast.AST] = []
self._init_instructions: List[ast.AST] = []
@classmethod
def separate_global_statements(cls,
symbol_table: Dict[str, ISymbol],
statements: List[ast.AST]) -> Tuple[List[ast.AST], List[ast.AST]]:
visitor = InitStatementsVisitor(symbol_table)
root_ast = ast.parse("")
root_ast.body = statements
visitor.visit(root_ast)
return visitor._deploy_instructions, visitor._init_instructions
def get_symbol_id(self, node: ast.AST, parent: ast.AST) -> str:
symbol_id = self.visit(node)
# filter to find the imported variables
if symbol_id not in self.symbols and hasattr(parent, 'origin') and isinstance(parent.origin, ast.AST):
symbol_id = '{0}{2}{1}'.format(parent.origin.__hash__(), symbol_id, constants.VARIABLE_NAME_SEPARATOR)
return symbol_id
def append_symbol(self, symbol: ISymbol, node: ast.AST):
if self._should_be_on_deploy(symbol):
if node not in self._deploy_instructions:
self._deploy_instructions.append(node)
else:
if node not in self._init_instructions:
self._init_instructions.append(node)
def _should_be_on_deploy(self, symbol: ISymbol) -> bool:
return hasattr(symbol, 'is_reassigned') and symbol.is_reassigned
# region AST visitors
def visit_Assign(self, node: ast.Assign):
deploy_var_nodes = []
init_var_nodes = []
deploy_symbol = None
init_symbol = None
for target in node.targets:
target_id = self.get_symbol_id(target, node)
symbol = self.get_symbol(target_id)
if self._should_be_on_deploy(symbol):
deploy_var_nodes.append(target)
deploy_symbol = symbol
else:
init_var_nodes.append(target)
init_symbol = symbol
if len(deploy_var_nodes) == len(node.targets) or len(init_var_nodes) == len(node.targets):
self.append_symbol(deploy_symbol if deploy_symbol is not None else init_symbol,
node)
else:
deploy_ast = ast.Assign(targets=deploy_var_nodes,
value=node.value)
ast.copy_location(deploy_ast, node)
init_ast = ast.Assign(targets=init_var_nodes,
value=node.value)
ast.copy_location(init_ast, node)
if hasattr(node, 'origin'):
deploy_ast.origin = node.origin
init_ast.origin = node.origin
self.append_symbol(deploy_symbol, deploy_ast)
self.append_symbol(init_symbol, init_ast)
def visit_AnnAssign(self, node: ast.AnnAssign):
var_id = self.get_symbol_id(node.target, node)
symbol = self.get_symbol(var_id)
self.append_symbol(symbol, node)
def visit_AugAssign(self, node: ast.AugAssign):
var_id = self.get_symbol_id(node.target, node)
symbol = self.get_symbol(var_id)
self.append_symbol(symbol, node)
def visit_Name(self, name: ast.Name) -> str:
"""
Visitor of a name node
:param name: the python ast name identifier node
:return: the identifier of the name
"""
return name.id
# endregion
| 2.203125 | 2 |
test/wpu.py | JittoThomas/IOT | 0 | 12758158 | import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM) # set up BCM GPIO numbering
GPIO.setup(4, GPIO.IN, pull_up_down=GPIO.PUD_UP)
| 2.421875 | 2 |
example/prj_name/pkg_name/__init__.py | glimix/build-capi | 0 | 12758159 | <reponame>glimix/build-capi
def get_include():
import pkg_name
from os.path import join, dirname
return join(dirname(pkg_name.__file__), 'include')
def get_lib():
import pkg_name
from os.path import join, dirname
return join(dirname(pkg_name.__file__), 'lib')
| 1.867188 | 2 |
config.py | yunuszend/Telegram_Vc_Bot | 0 | 12758160 | # Calls Music 1 - Telegram bot for streaming audio in group calls
# Copyright (C) 2021 <NAME>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# Modified by Inukaasith
from os import getenv
from dotenv import load_dotenv
load_dotenv()
que = {}
SESSION_NAME = getenv("SESSION_NAME", "session")
BOT_TOKEN = getenv("BOT_TOKEN", "")
BOT_NAME = getenv("BOT_NAME", "")
admins = {}
API_ID = int(getenv("API_ID"))
API_HASH = getenv("API_HASH", "")
CHANNEL_ID = int(getenv("CHANNEL_ID", "")
CHANNEL_USERNAME = os.getenv("CHANNEL_USERNAME", "")
DURATION_LIMIT = int(getenv("DURATION_LIMIT", "10"))
COMMAND_PREFIXES = list(getenv("COMMAND_PREFIXES", "/ !").split())
SUDO_USERS = list(map(int, getenv("SUDO_USERS", "").split()))
| 2.078125 | 2 |
tools/python/odin_data/meta_writer/meta_writer_app.py | stfc-aeg/odin-data | 0 | 12758161 | """Meta Writer application
Configures and starts up an instance of the MetaListener for receiving and writing meta data.
<NAME>, Diamond Light Source
"""
from argparse import ArgumentParser
from odin_data.meta_writer.meta_listener import MetaListener
from odin_data.logconfig import setup_logging, add_graylog_handler, set_log_level
def parse_args():
"""Parse program arguments"""
parser = ArgumentParser()
parser.add_argument(
"-c", "--ctrl", default=5659, help="Control channel port to listen on"
)
parser.add_argument(
"-d",
"--data-endpoints",
default="tcp://127.0.0.1:5558,tcp://127.0.0.1:5559",
help="Data endpoints - comma separated list",
)
parser.add_argument(
"-w",
"--writer",
default="odin_data.meta_writer.meta_writer.MetaWriter",
help="Module path to detector specific meta writer class",
)
parser.add_argument("-l", "--log-level", default="INFO", help="Logging level")
parser.add_argument(
"--log-server",
default=None,
help="Graylog server address and port - e.g. 127.0.0.1:8000",
)
parser.add_argument(
"--static-log-fields",
default=None,
help="Comma separated list of key=value fields to be attached to every log message",
)
return parser.parse_args()
def main():
args = parse_args()
static_fields = None
if args.static_log_fields is not None:
static_fields = dict(f.split("=") for f in args.static_log_fields.split(","))
if args.log_server is not None:
log_server_address, log_server_port = args.log_server.split(":")
log_server_port = int(log_server_port)
add_graylog_handler(
log_server_address, log_server_port, static_fields=static_fields
)
set_log_level(args.log_level)
setup_logging()
data_endpoints = args.data_endpoints.split(",")
meta_listener = MetaListener(args.ctrl, data_endpoints, args.writer)
meta_listener.run()
if __name__ == "__main__":
main()
| 2.6875 | 3 |
pytezos/cli/cli.py | bantalon/pytezos | 0 | 12758162 | <gh_stars>0
from glob import glob
from os.path import abspath, dirname, join, exists
from pprint import pprint
import fire
from pytezos import pytezos, ContractInterface
from pytezos.rpc.errors import RpcError
from pytezos.operation.result import OperationResult
from pytezos.context.mixin import default_network
from pytezos.michelson.types.base import generate_pydoc
from pytezos.cli.github import create_deployment, create_deployment_status
kernel_js_path = join(dirname(dirname(__file__)), 'assets', 'kernel.js')
kernel_json = {
"argv": ['pytezos', 'kernel', 'run', "-file", "{connection_file}"],
"display_name": "Michelson",
"language": "michelson",
"codemirror_mode": "michelson"
}
def make_bcd_link(network, address):
return f'https://better-call.dev/{network}/{address}'
def get_contract(path):
if path is None:
files = glob('*.tz')
assert len(files) == 1
contract = ContractInterface.from_file(abspath(files[0]))
elif exists(path):
contract = ContractInterface.from_file(path)
else:
network, address = path.split(':')
contract = pytezos.using(shell=network).contract(address)
return contract
class PyTezosCli:
def storage(self, action, path=None):
""" Manage contract storage.
:param action: One of `schema`, `default`
:param path: Path to the .tz file, or the following uri: <network>:<KT-address>
"""
contract = get_contract(path)
if action == 'schema':
print(generate_pydoc(type(contract.storage.data), title='storage'))
elif action == 'default':
pprint(contract.storage.dummy())
else:
assert False, action
def parameter(self, action, path=None):
""" Manage contract parameter.
:param action: One of `schema`
:param path: Path to the .tz file, or the following uri: <network>:<KT-address>
"""
contract = get_contract(path)
if action == 'schema':
print(contract.parameter.__doc__)
else:
assert False, action
def activate(self, path, network=default_network):
""" Activates and reveals key from the faucet file.
:param path: Path to the .json file downloaded from https://faucet.tzalpha.net/
:param network: Default is Babylonnet
"""
ptz = pytezos.using(key=path, shell=network)
print(f'Activating {ptz.key.public_key_hash()} in the {network}')
if ptz.balance() == 0:
try:
opg = ptz.activate_account().autofill().sign()
print(f'Injecting activation operation:')
pprint(opg.json_payload())
opg.inject(_async=False)
except RpcError as e:
pprint(e)
exit(-1)
else:
print(f'Activation succeeded! Claimed balance: {ptz.balance()} ꜩ')
else:
print('Already activated')
try:
opg = ptz.reveal().autofill().sign()
print(f'Injecting reveal operation:')
pprint(opg.json_payload())
opg.inject(_async=False)
except RpcError as e:
pprint(e)
exit(-1)
else:
print(f'Your key {ptz.key.public_key_hash()} is now active and revealed')
def deploy(self, path, storage=None, network=default_network, key=None,
github_repo_slug=None, github_oauth_token=None, dry_run=False):
""" Deploy contract to the specified network.
:param path: Path to the .tz file
:param storage: Storage in JSON format (not Micheline)
:param network:
:param key:
:param github_repo_slug:
:param github_oauth_token:
:param dry_run: Set this flag if you just want to see what would happen
"""
ptz = pytezos.using(shell=network, key=key)
print(f'Deploying contract using {ptz.key.public_key_hash()} in the {network}')
contract = get_contract(path)
try:
opg = ptz.origination(script=contract.script(initial_storage=storage)).autofill().sign()
print(f'Injecting origination operation:')
pprint(opg.json_payload())
if dry_run:
pprint(opg.preapply())
exit(0)
else:
opg = opg.inject(_async=False)
except RpcError as e:
pprint(e)
exit(-1)
else:
originated_contracts = OperationResult.originated_contracts(opg)
assert len(originated_contracts) == 1
bcd_link = make_bcd_link(network, originated_contracts[0])
print(f'Contract was successfully deployed: {bcd_link}')
if github_repo_slug:
deployment = create_deployment(github_repo_slug, github_oauth_token,
environment=network)
pprint(deployment)
status = create_deployment_status(github_repo_slug, github_oauth_token,
deployment_id=deployment['id'],
state='success',
environment=network,
environment_url=bcd_link)
pprint(status)
def main():
return fire.Fire(PyTezosCli)
if __name__ == '__main__':
main()
| 2.015625 | 2 |
CareerSuggestionGUI.py | akshaynagpal/career_path_suggestion | 3 | 12758163 | <reponame>akshaynagpal/career_path_suggestion
from Tkinter import *
import tkMessageBox
from numpy import array
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
import csv
cd_list = []
# opening csv file
with open("C:\/Users\/advance\/Documents\/GitHub\/career_path_prediction\/Career_data.csv",'rb') as csvfile:
filereader = csv.reader(csvfile,delimiter=',')
for row in filereader:
cd_list.append(row)
data_array = array(cd_list) # making an array from list of lists
def predictor(str1,inp):
# print str1
user_input = inp
user_future = user_input.title()
user_output = []
count_result = -1
if str1 == 'HS': # high school selected
flag = 0
for i in range(1,data_array.shape[0]):
if fuzz.ratio(user_future,data_array[i][9])>60:
flag = 1
count_result += 1
user_output.append(["Result "+ str(count_result + 1)])
user_output[count_result].append(str(fuzz.ratio(user_future,data_array[i][9])))
if data_array[i][0] != '':
user_output[count_result].append("Bachelors :" + data_array[i][0])
if data_array[i][3] != '':
user_output[count_result].append("Masters :" + data_array[i][3])
if data_array[i][6] != '':
user_output[count_result].append("Doctoral :" + data_array[i][6])
user_output[count_result].append(data_array[i][9])
if flag == 0: # No simple ratio match, try partial match
print "No simple ratio match...trying for partial match..."
for i in range(1,data_array.shape[0]):
if fuzz.partial_ratio(user_future,data_array[i][9])>80:
flag = 1
count_result += 1
user_output.append(["Result "+ str(count_result+1)])
user_output[count_result].append(str(fuzz.partial_ratio(user_future,data_array[i][9])))
if data_array[i][0] != '':
user_output[count_result].append("Bachelors :" + data_array[i][0])
if data_array[i][3] != '':
user_output[count_result].append("Masters :" + data_array[i][3])
if data_array[i][6] != '':
user_output[count_result].append("Doctoral :" + data_array[i][6])
user_output[count_result].append(data_array[i][9])
return user_output
elif str1 == 'B': # Bachelors selected
flag = 0
for i in range(1,data_array.shape[0]):
if fuzz.ratio(user_future,data_array[i][9])>60: # relevance > 60
flag = 1
count_result += 1
user_output.append(["Result "+ str(count_result+1)])
user_output[count_result].append(str(fuzz.ratio(user_future,data_array[i][9])))
if data_array[i][3] != '':
user_output[count_result].append("Masters: " + data_array[i][3])
if data_array[i][6] != '':
user_output[count_result].append("Doctoral: " + data_array[i][6])
user_output[count_result].append(data_array[i][9])
if flag == 0:
print "no simple ratio match...trying for partial match..."
for i in range(1,data_array.shape[0]):
if fuzz.partial_ratio(user_future,data_array[i][9])>80: # partial relevance > 80
flag = 1
count_result += 1
user_output.append(["Result "+ str(count_result+1)])
user_output[count_result].append(str(fuzz.partial_ratio(user_future,data_array[i][9])))
if data_array[i][3] != '':
user_output[count_result].append("Masters: " + data_array[i][3])
if data_array[i][6] != '':
user_output[count_result].append("Doctoral: " + data_array[i][6])
user_output[count_result].append(data_array[i][9])
return user_output
def help_callback():
tkMessageBox.showinfo(
"Help INFORMATION",
"help help help"
)
return
def callback():
if str(listbox1.curselection()) == str(('0',)):
print "Event : High school Selected!!"
text_box_input = str(e.get())
x = predictor('HS',text_box_input)
#print x
x.sort(key = lambda x : int(x[1]),reverse=True)
#print x
if not x:
tkMessageBox.showinfo(
"RESULT",
'\n Oops! No results found! Try again with a better search keyword!'
)
else:
tkMessageBox.showinfo(
"RESULT",
'\n'.join(map(' -> '.join, x))
)
elif str(listbox1.curselection()) == str(('1',)):
print "Event : Bachelor's Selected!!"
text_box_input = str(e.get())
x = predictor('B',text_box_input)
#print x
x.sort(key = lambda x : int(x[1]),reverse=True)
#print x
if not x:
tkMessageBox.showinfo(
"RESULT",
'\nOops! No results found! Try again with a better search keyword!'
)
else:
tkMessageBox.showinfo(
"RESULT",
'\n'.join(map(' -> '.join, x))
)
return
root = Tk()
root.wm_title("Career Suggestion")
root.geometry("250x250") #width x height
#creating menu
menu = Menu(root)
root.config(menu=menu)
helpmenu=Menu(menu)
menu.add_cascade(label="Help",menu=helpmenu)
helpmenu.add_command(label="Help",command=help_callback)
frame = Frame(root, width=100, height=100)
w = Label(root,text = "Enter your target profession:")
w.pack(fill=X)
e = Entry(root)
e.pack(fill=X)
w0 = Label(root,text = "Select your current education:")
w0.pack(fill=X)
listbox1 = Listbox(root)
listbox1.pack(fill=X)
listbox1.insert(END,"High School")
listbox1.insert(END,"Bachelors")
b = Button(root,text="Get Career Path",width=10,command=callback)
b.pack(fill=X)
root.mainloop()
| 3.125 | 3 |
makeGroupFasta.py | hzi-bifo/MI-TIP | 0 | 12758164 | #!/usr/bin/env python
'''
Put sequences of the same regions into a same file.
Gene loss is also detected in this script, and for these groups it is not needed to run alignment.
'''
from os import listdir
import os
from os.path import join
import sys
import re
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
from Bio.SeqRecord import SeqRecord
import argparse
from os import path
def check_dir(d):
if not os.path.exists(d):
os.makedirs(d)
def cluster(strain, f, output_dir):
# strain= re.search('.+\/([^\/]+)\.fa', f).group(1)
gene_gain= []
for record in SeqIO.parse(f.strip(), 'fasta'):
group= record.id
group_f= path.join(output_dir, group+'.fa')
seq= record.seq
if re.search('^\w+$', str(seq)) is None:## gene loss (no reads in this region)
continue
else:
gene_gain.append(group_f)
new_seq_record= SeqRecord(Seq(str(seq), generic_dna), id= strain, name= '', description= '')
## print the sequence
with open(group_f, 'a') as handle:
SeqIO.write(new_seq_record, handle, 'fasta')
return(gene_gain)
def run(files_list, out_d, com_gene_f):
fh= open(files_list, 'r')
files= {l.strip().split('\t')[0]:l.strip().split('\t')[1] for l in fh.readlines()}
#files= [l.strip() for l in fh.readlines()]
common_genes= []
for strain in files:
gene_gain= cluster(strain, files[strain], out_d)
common_genes= (gene_gain if len(common_genes)==0 else list(set(common_genes) & set(gene_gain)))
fh.close()
# write the common genes, which are not nessacerily core genes
ch= open(com_gene_f, 'w')
map(lambda g: ch.write(g+"\n"), common_genes)
ch.close()
if __name__=='__main__':
parser= argparse.ArgumentParser(description='Collect sequences of same gene from gene files of strains')
parser.add_argument('--l', dest= 'f_list', required= True, help= 'list of sequence files of each strain')
parser.add_argument('--d', dest= 'g_d', required= True, help= 'folder of output fasta files')
parser.add_argument('--c', dest= 'common_genes', required= True, help= 'output file to record common genes')
args= parser.parse_args()
files_list= args.f_list
out_d= args.g_d
check_dir(out_d)
com_gene_f= args.common_genes
run(files_list, out_d, com_gene_f)
| 2.65625 | 3 |
src/assignments/assignment2.py | acc-cosc-1336/cosc-1336-spring-2018-Skynet2020 | 0 | 12758165 | <gh_stars>0
def faculty_evaluation_result(nev, rar, som, oft, voft, alw):
''' Write code to calculate faculty evaluation rating according to asssignment instructions
:param nev: Never
:param rar: Rarely
:param som: Sometimes
:param oft: Often
:param voft: Very Often
:param alw: Always
:return: rating as a string'''
total = nev + rar + som + oft + voft + alw
nev_ratio = nev / total
rar_ratio = rar / total
som_ratio = som / total
oft_ratio = oft / total
voft_ratio = voft / total
alw_ratio = alw / total
if alw_ratio + voft_ratio >= 0.9:
return "Excellent"
elif alw_ratio + voft_ratio + oft_ratio >= 0.8:
return "Very Good"
elif alw_ratio + voft_ratio + oft_ratio + som_ratio >= 0.7:
return "Good"
elif alw_ratio + voft_ratio + oft_ratio + som_ratio + rar_ratio >= 0.6:
return "Needs Improvement"
else:
return "Unacceptable"
def get_ratings(nev,rar,som, oft,voft, alw):
'''
Students aren't expected to know this material yet!
'''
ratings = []
total = nev + rar + som + oft + voft + alw
ratings.append(round(alw / total, 2))
ratings.append(round(voft / total, 2))
ratings.append(round(oft / total, 2))
ratings.append(round(som / total, 2))
ratings.append(round(rar / total, 2))
ratings.append(round(nev / total, 2))
return ratings
| 3.21875 | 3 |
ssd_resnet10.py | supby/faced_test | 0 | 12758166 | import cv2
import tensorflow as tf
import numpy as np
modelFile = "./opencv_face_detector_uint8.pb"
configFile = "./opencv_face_detector.pbtxt"
net = cv2.dnn.readNetFromTensorflow(modelFile, configFile)
cap = cv2.VideoCapture(0)
while(True):
ret, frame = cap.read()
frameHeight = frame.shape[0]
frameWidth = frame.shape[1]
blob = cv2.dnn.blobFromImage(frame, 1.0, (300, 300), [104, 117, 123], False, False)
net.setInput(blob)
detections = net.forward()
bboxes = []
for i in range(detections.shape[2]):
confidence = detections[0, 0, i, 2]
if confidence > 0.3:
x1 = int(detections[0, 0, i, 3] * frameWidth)
y1 = int(detections[0, 0, i, 4] * frameHeight)
x2 = int(detections[0, 0, i, 5] * frameWidth)
y2 = int(detections[0, 0, i, 6] * frameHeight)
cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), (23, 230, 210), thickness=2)
cv2.imshow('frame', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
| 2.671875 | 3 |
setup.py | wellcometrust/grants_tagger | 2 | 12758167 | <reponame>wellcometrust/grants_tagger
from setuptools import setup, find_packages
import os
from grants_tagger import __version__
setup(
name='grants-tagger',
author='<NAME>',
author_email='<EMAIL>',
description='A machine learning model to tag grants',
packages=find_packages(),
version=__version__,
entry_points = {
'console_scripts': 'grants_tagger=grants_tagger.__main__:app'
},
install_requires=[
'pandas',
'xlrd',
'scikit-learn==0.23.2',
'nltk',
'matplotlib',
'wellcomeml[tensorflow]==1.2.0',
'docutils==0.15',
'scipy==1.4.1',
'wasabi',
'typer',
'scispacy',
'dvc',
'tqdm',
'sagemaker',
'streamlit'
],
tests_require=[
'pytest',
'pytest-cov',
'openpyxl'
]
)
| 1.46875 | 1 |
tests/test_pychpp.py | DioPires/pychpp | 0 | 12758168 | import os
import datetime
import pytest
import re
from pychpp import __version__
from pychpp import CHPP
from pychpp.ht_team import HTTeam, HTYouthTeam
from pychpp.ht_user import HTUser
from pychpp.ht_player import HTPlayer, HTYouthPlayer, HTLineupPlayer
from pychpp.ht_arena import HTArena
from pychpp.ht_region import HTRegion
from pychpp.ht_match import HTMatch
from pychpp.ht_match_lineup import HTMatchLineup
from pychpp.ht_matches_archive import HTMatchesArchive, HTMatchesArchiveItem
from pychpp.ht_skill import HTSkill, HTSkillYouth
from pychpp.ht_challenge import HTChallengeManager
from pychpp.ht_league import HTLeague
from pychpp.ht_rank import HTRank
from pychpp.ht_world import HTCountry, HTCup, HTCountryLeague, HTRegionItem, HTWorld
from pychpp.ht_error import HTUnauthorizedAction, UnknownLeagueError
PYCHPP_CONSUMER_KEY = os.environ["PYCHPP_CONSUMER_KEY"]
PYCHPP_CONSUMER_SECRET = os.environ["PYCHPP_CONSUMER_SECRET"]
PYCHPP_ACCESS_TOKEN_KEY = os.environ["PYCHPP_ACCESS_TOKEN_KEY"]
PYCHPP_ACCESS_TOKEN_SECRET = os.environ["PYCHPP_ACCESS_TOKEN_SECRET"]
PYCHPP_SCOPE = os.environ["PYCHPP_SCOPE"]
YOUTH_PLAYER_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/Club/Players/YouthPlayer.aspx\?YouthPlayerID=(\d+)"
PLAYER_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/Club/Players/Player.aspx\?playerId=(\d+)"
YOUTH_TEAM_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/Club/Youth/\?YouthTeamID=(\d+)"
ARENA_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/Club/Arena/\?ArenaID=(\d+)"
USER_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/Club/Manager/\?userId=(\d+)"
REGION_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/World/Regions/Region.aspx\?RegionID=(\d+)"
MATCH_ARCHIVE_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=%2FClub%2FMatches%2FArchive.aspx%3F(TeamID%3D(\d*))?(%26)?(season%3D(\d*))?"
MATCH_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/Club/Matches/Match.aspx\?matchID=(\d+)"
COUNTRY_LEAGUE_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/World/Leagues/League.aspx\?LeagueID=(\d+)"
CUP_PATTERN = r"https://www.hattrick.org/goto.ashx\?path=/World/Cup/Cup.aspx\?CupID=(\d+)"
def test_version():
assert __version__ == '0.2.6'
def test_request_token():
chpp = CHPP(consumer_key=PYCHPP_CONSUMER_KEY,
consumer_secret=PYCHPP_CONSUMER_SECRET,
)
auth = chpp.get_auth(scope='')
assert isinstance(auth, dict)
for key in auth.keys():
assert key in ('request_token', 'request_token_secret', 'url',)
assert isinstance(auth['request_token'], str) and auth['request_token']
assert isinstance(auth['request_token_secret'],
str) and auth['request_token_secret']
assert (isinstance(auth['url'], str)
and 'https://chpp.hattrick.org/oauth/authorize.aspx?scope=&oauth_token=' in auth['url'])
@pytest.fixture
def chpp():
return CHPP(consumer_key=PYCHPP_CONSUMER_KEY,
consumer_secret=PYCHPP_CONSUMER_SECRET,
access_token_key=PYCHPP_ACCESS_TOKEN_KEY,
access_token_secret=PYCHPP_ACCESS_TOKEN_SECRET,
)
def test_get_current_team(chpp):
team = chpp.team()
assert isinstance(team, HTTeam)
assert isinstance(team.ht_id, int)
assert isinstance(team.name, str)
assert isinstance(team.url, str)
assert isinstance(team.is_bot, bool)
youth_team = team.youth_team
assert isinstance(youth_team, HTYouthTeam) or youth_team is None
user = team.user
test_user = chpp.user()
assert user.ht_id == test_user.ht_id
players = team.players
assert isinstance(players, list)
for p in players:
assert isinstance(p, HTPlayer)
def test_get_specific_team(chpp):
team = chpp.team(ht_id=591993)
assert isinstance(team, HTTeam)
assert team.ht_id == 591993
assert team.name == "thekiki's"
assert team.short_name == 'thekikis'
assert team.is_primary_club is True
assert team.is_bot is False
assert team.power_rating > 0
assert team.url == "https://www.hattrick.org/goto.ashx?path=/Club/?TeamID=591993"
user = team.user
assert isinstance(user, HTUser)
assert user.ht_id == 6336642
assert user.username == 'thekiki76'
assert user.supporter_tier == 'platinum'
assert user.url == "https://www.hattrick.org/goto.ashx?path=/Club/Manager/?userId=6336642"
youthteam = team.youth_team
assert isinstance(youthteam, HTYouthTeam)
assert youthteam.name == 'thebabykikis'
assert re.match(YOUTH_TEAM_PATTERN, youthteam.url)
arena = team.arena
assert isinstance(arena, HTArena)
assert arena.name == "thekiki's evil"
assert re.match(ARENA_PATTERN, arena.url)
def test_get_secondary_team(chpp):
team = chpp.team(ht_id=44307)
assert isinstance(team, HTTeam)
assert team.ht_id == 44307
assert team.name == "<NAME>"
assert team.short_name == 'Grynvalla'
assert team.is_primary_club is False
assert team.url == "https://www.hattrick.org/goto.ashx?path=/Club/?TeamID=44307"
user = team.user
assert isinstance(user, HTUser)
assert user.ht_id == 182085
assert user.username == "Kvarak"
assert user.url == "https://www.hattrick.org/goto.ashx?path=/Club/Manager/?userId=182085"
youthteam = team.youth_team
assert isinstance(youthteam, HTYouthTeam)
assert youthteam.name == "<NAME>"
assert re.match(YOUTH_TEAM_PATTERN, youthteam.url)
arena = team.arena
assert isinstance(arena, HTArena)
assert arena.name == "Grynvallen"
assert re.match(ARENA_PATTERN, arena.url)
def test_get_current_user(chpp):
user = chpp.user()
assert isinstance(user, HTUser)
assert isinstance(user.ht_id, int)
assert isinstance(user.username, str)
assert isinstance(user.url, str)
assert re.match(USER_PATTERN, user.url)
def test_get_player(chpp):
player = chpp.player(ht_id=432002549)
assert isinstance(player, HTPlayer)
assert isinstance(player.skills, dict)
assert {i for i in player.skills.keys()}.issubset(HTSkill.SKILLS_NAME)
assert player.owner_notes is None
assert player.ht_id == 432002549
assert player.agreeability == 2
assert player.aggressiveness == 3
assert player.honesty == 3
assert player.url == "https://www.hattrick.org/goto.ashx?path=/Club/Players/Player.aspx?playerId=432002549"
assert isinstance(player.skills, dict)
assert len(player.skills) == 8
for i in player.skills.keys():
assert i in ("stamina", "keeper", "defender", "playmaker",
"winger", "scorer", "passing", "set_pieces")
assert isinstance(player.tsi, int)
assert isinstance(player.injury_level, int)
def test_get_youth_player(chpp):
youthteam = chpp.youth_team()
assert isinstance(youthteam, HTYouthTeam)
if youthteam.ht_id != 0:
youthplayer = youthteam.players[0]
assert isinstance(youthplayer, HTYouthPlayer)
assert {i for i in youthplayer.skills.keys()}.issubset(
HTSkillYouth.SKILLS_TAG)
assert re.match(YOUTH_PLAYER_PATTERN, youthplayer.url)
def test_get_current_user_arena(chpp):
arena = chpp.arena()
assert isinstance(arena, HTArena)
assert isinstance(arena.ht_id, int) or arena.ht_id is None
assert isinstance(arena.name, str)
assert isinstance(arena.url, str)
assert re.match(ARENA_PATTERN, arena.url)
def test_get_specific_arena(chpp):
arena = chpp.arena(ht_id=295023)
assert isinstance(arena, HTArena)
assert arena.ht_id == 295023
assert arena.name == 'Les piments verts Arena'
assert arena.url == "https://www.hattrick.org/goto.ashx?path=/Club/Arena/?ArenaID=295023"
team = arena.team
assert isinstance(team, HTTeam)
assert team.ht_id == 295023
assert team.name == 'Les piments verts'
assert team.url == "https://www.hattrick.org/goto.ashx?path=/Club/?TeamID=295023"
def test_get_current_user_region(chpp):
region = chpp.region()
assert isinstance(region, HTRegion)
assert isinstance(region.ht_id, int)
assert isinstance(region.name, str)
assert isinstance(region.number_of_users, int)
assert isinstance(region.number_of_online, int)
assert isinstance(region.weather, int)
assert isinstance(region.tomorrow_weather, int)
assert isinstance(region.url, str)
assert re.match(REGION_PATTERN, region.url)
def test_get_specific_region(chpp):
region = chpp.region(ht_id=149)
assert isinstance(region, HTRegion)
assert region.ht_id == 149
assert region.name == "Provence-Alpes-Côte d'Azur"
assert isinstance(region.number_of_users, int)
assert isinstance(region.number_of_online, int)
assert isinstance(region.weather, int)
assert isinstance(region.tomorrow_weather, int)
assert region.url == "https://www.hattrick.org/goto.ashx?path=/World/Regions/Region.aspx?RegionID=149"
def test_get_current_user_matches_archive(chpp):
ma1 = chpp.matches_archive()
assert isinstance(ma1, HTMatchesArchive)
assert isinstance(ma1.url, str)
assert re.match(MATCH_ARCHIVE_PATTERN, ma1.url)
m = ma1[0]
assert isinstance(m, HTMatchesArchiveItem)
assert isinstance(m.home_team, HTTeam)
assert isinstance(m.url, str)
assert re.match(MATCH_PATTERN, m.url)
ma2 = chpp.matches_archive(ht_id=1165592,
first_match_date=datetime.datetime(2020, 1, 1),
last_match_date=datetime.datetime(2020, 3, 31), )
assert ma2[0].ht_id == 652913955
assert ma2[0].home_team_name == "Les Poitevins de La Chapelle"
assert ma2[0].away_team_name == "FC Traversonne"
assert ma2[0].date == datetime.datetime(2020, 1, 1, 15, 10)
assert ma2[0].type == 5
assert ma2[0].context_id == 0
assert ma2[0].rule_id == 0
assert ma2[0].cup_level == 0
assert ma2[0].cup_level_index == 0
assert ma2[0].home_goals == 2
assert ma2[0].away_goals == 0
assert ma2[0].url == "https://www.hattrick.org/goto.ashx?path=/Club/Matches/Match.aspx?matchID=652913955"
for m in ma2:
assert datetime.datetime(
2020, 1, 1) <= m.date <= datetime.datetime(2020, 3, 31)
def test_get_other_user_matches_archives(chpp):
ma1 = chpp.matches_archive(ht_id=1755906,
first_match_date=datetime.datetime(2018, 4, 10),
last_match_date=datetime.datetime(2018, 4, 30),
)
assert re.match(MATCH_ARCHIVE_PATTERN, ma1.url)
for m in ma1:
assert datetime.datetime(
2018, 4, 10) <= m.date <= datetime.datetime(2018, 6, 30)
assert 1755906 in (m.home_team_id, m.away_team_id)
assert re.match(MATCH_PATTERN, m.url)
ma2 = chpp.matches_archive(ht_id=1755906,
season=60,
)
assert re.match(MATCH_ARCHIVE_PATTERN, ma2.url)
for m in ma2:
assert datetime.datetime(
2015, 10, 26) <= m.date <= datetime.datetime(2016, 2, 14)
assert 1755906 in (m.home_team_id, m.away_team_id)
assert re.match(MATCH_PATTERN, m.url)
def test_get_match(chpp):
m = chpp.match(ht_id=547513790, events=True)
assert isinstance(m, HTMatch)
assert m.ht_id == 547513790
assert m.url == "https://www.hattrick.org/goto.ashx?path=/Club/Matches/Match.aspx?matchID=547513790"
assert m.date == datetime.datetime(2015, 12, 19, 21, 0)
assert m.home_team_name == "<NAME>"
assert m.away_team_name == "<NAME>"
assert m.added_minutes == 0
assert m.arena_id == 1162154
assert len(m.events) >= 0
assert m.events[14]["minute"] == 72
assert m.events[14]["match_part"] == 2
assert m.events[14]["id"] == 285
assert m.events[14]["variation"] == 3
assert m.events[14]["subject_team_id"] == 292366
assert m.events[14]["subject_player_id"] == 373737451
assert m.events[14]["object_player_id"] == 314946894
# Description is localized
# assert "free kick" in m.events[14]["description"]
def test_is_challengeable(chpp):
challenge = HTChallengeManager(chpp)
if "manage_challenges" in PYCHPP_SCOPE:
ich = challenge.is_challengeable(team_ht_id=1750803)
assert isinstance(ich, dict)
for b in ich.values():
assert isinstance(b, bool)
else:
with pytest.raises(HTUnauthorizedAction):
ich = challenge.is_challengeable(team_ht_id=1750803)
def test_league(chpp):
league = chpp.league(ht_id=36378)
assert isinstance(league, HTLeague)
assert league.ht_id == 36378
assert league.name == "VI.390"
assert league.country_id == 5
assert league.url == "https://www.hattrick.org/goto.ashx?path=/World/Series/?LeagueLevelUnitID=36378"
assert isinstance(league.ranks, list)
for r in league.ranks:
assert isinstance(r, HTRank)
assert league.ranks[3].position == 4
def test_get_match_lineup(chpp):
match_lineup = chpp.match_lineup(ht_id=660688698, team_id=86324)
assert isinstance(match_lineup, HTMatchLineup)
assert isinstance(match_lineup.match, HTMatch)
assert match_lineup.ht_id == 660688698
assert match_lineup.home_team_name == "Gazela.f.c"
assert match_lineup.away_team_id == 86324
assert match_lineup.away_team_name == "<NAME>"
assert match_lineup.arena_id == 1420520
assert match_lineup.game_type == 1
assert re.match(MATCH_PATTERN, match_lineup.url)
assert isinstance(match_lineup.arena, HTArena)
assert len(match_lineup.lineup_players) == 20
assert isinstance(match_lineup.lineup_players[0], HTLineupPlayer)
assert isinstance(match_lineup.lineup_players[0].player, HTPlayer)
assert match_lineup.lineup_players[0].ht_id == 453372825
assert match_lineup.lineup_players[0].first_name == "Teodoro"
assert match_lineup.lineup_players[0].role_id == 100
assert match_lineup.lineup_players[0].role_name == "Keeper"
assert match_lineup.lineup_players[15].role_id == 120
assert match_lineup.lineup_players[15].role_name == "Unknown role"
assert re.match(PLAYER_PATTERN, match_lineup.lineup_players[15].url)
match_lineup = chpp.match_lineup(
ht_id=116104524, team_id=2828377, source='youth')
assert isinstance(match_lineup.lineup_players[0], HTLineupPlayer)
assert isinstance(match_lineup.lineup_players[0].player, HTYouthPlayer)
assert re.match(YOUTH_PLAYER_PATTERN, match_lineup.lineup_players[0].url)
def test_get_world_details(chpp):
portugal_details = chpp.world(ht_id=25, include_regions=True)
assert isinstance(portugal_details, HTWorld)
assert isinstance(portugal_details.leagues[0], HTCountryLeague)
assert isinstance(portugal_details.leagues[0].country, HTCountry)
assert isinstance(portugal_details.leagues[0].cups[0], HTCup)
assert len(portugal_details.leagues) == 1
assert portugal_details.league(ht_id=25).league_name == "Portugal"
assert portugal_details.league(name="portugal").ht_id == 25
assert portugal_details.league(ht_id=25).country.country_name == "Portugal"
portugal_regions = portugal_details.league(ht_id=25).country.regions
assert len(portugal_regions) >= 1
assert isinstance(portugal_regions[0], HTRegionItem)
assert isinstance(portugal_regions[0].region, HTRegion)
assert len(portugal_details.league(ht_id=25).cups) >= 1
with pytest.raises(UnknownLeagueError):
portugal_details.league(ht_id=26)
world_details = chpp.world()
assert len(world_details.leagues) > 1
assert world_details.leagues[0].country.regions is None
assert re.match(COUNTRY_LEAGUE_PATTERN, portugal_details.league(ht_id=25).url)
assert re.match(REGION_PATTERN, portugal_regions[0].region.url)
assert re.match(CUP_PATTERN, portugal_details.league(ht_id=25).cups[0].url)
| 1.953125 | 2 |
Convert.py | mcsim415/NaverWebtoonDownloader | 1 | 12758169 | <gh_stars>1-10
import glob
import os
from PIL import Image
def ConvertImage(image_key,image_value):
im = Image.open(image_key+'-'+str(i)+'.jpg')
print('Get '+image_key+'-'+str(i)+'.jpg')
print('[Image Convert] Start Change image to out/'+image_key+'.png')
im.save('out//'+image_key+'.png')
if __name__ == '__main__' :
print('[Image Convert] Start Changing Images')
target_dir = './'
files = glob.glob(target_dir + '*.jpg')
name_list = {}
# Make Directory
try:
if not(os.path.isdir('out')):
os.makedirs(os.path.join('out'))
except OSError as e:
if e.errno != errno.EEXIST:
print('[Image Convert] Failed to create directory.')
raise
for f in files:
name = f.split('\\')[1]
key = name.split('-')[0]
value = name.split('-')[1].split('.')[0]
if key in name_list.keys():
name_list[key].append(int(value))
else:
name_list[key] = [int(value)]
name_list[key].sort()
for key,value in name_list.items():
if len(value) == 1:
ConvertImage(key,value)
print('[Image Convert] Complete Changing Images') | 3.109375 | 3 |
stanCode_projects/Fractal tree/Part 2 (Fractal).py | iiirischen/MystanCodeProjects | 0 | 12758170 | <reponame>iiirischen/MystanCodeProjects
import tkinter as tk
import math
point = []
top = tk.Tk()
top.title('Fractal')
top.resizable(False, False)
canvas = tk.Canvas(width=1300, height=700)
canvas.pack(side=tk.BOTTOM)
scale = tk.Scale(top)
def main():
# 滑桿
scale.config(orient="horizontal", length=300, from_=0, to=10, tickinterval=1, label="Level", command=draw)
scale.pack()
tk.mainloop()
def draw(val):
level = int(val)
canvas.delete("line")
canvas.delete("flower")
if 0 < level < 7:
# h1 長度
h = 600 / (1 + 3 * (1 - (3/4)**(level-1)))
else:
h = 600 / (1 + 3 * (1 - (3/4)**6))
line(level, 0, canvas.winfo_width() / 2, canvas.winfo_height(), h, 15, math.pi / 2)
def line(level, count, x, y, length, width, theta):
if level == count:
pass
elif level > 7 and count >= 7:
# draw flower
circle(x, y, 15, 4, count, level)
else:
# draw branch
x2 = x - length * math.cos(theta)
y2 = y - length * math.sin(theta)
if count < 4:
color = 'burlywood4'
else:
color = 'PaleGreen3'
canvas.create_line(x, y, x2, y2, width=width, fill=color, tags="line")
if count % 2 == 0: # 偶數
# right branch
line(level, count+1, x2, y2, length * (3 / 4) * (2/(3**0.5)), width - 2, theta + (math.pi / 6))
# left branch
line(level, count+1, x2, y2, length * (3 / 4) * (2/(3**0.5)), width - 2, theta - (math.pi / 6))
else: # 基數
# right
line(level, count + 1, x2, y2, length * 2 / (3 ** 0.5) * 9 / 16, width - 2, theta + (math.pi / 6))
# left
line(level, count + 1, x2, y2, length * 2 / (3 ** 0.5) * 9 / 16, width - 2, theta - (math.pi / 6))
def circle(x, y, r, w, count, level):
if level == count:
pass
else:
draw_circle(x, y, r, w)
# left flower
circle(x - r, y, r * 1/2, w - 1, count + 1, level)
# left up flower
circle(x - r/2, y - r * (3**0.5) / 2, r * 1 / 2, w - 1, count + 1, level)
# right up flower
circle(x + r/2, y - r * (3**0.5) / 2, r * 1 / 2, w - 1, count + 1, level)
# right flower
circle(x + r, y, r * 1 / 2, w - 1, count + 1, level)
# right bottom flower
circle(x + r/2, y + r*(3**0.5) / 2, r * 1 / 2, w - 1, count + 1, level)
# left bottom flower
circle(x - r / 2, y + r*(3**0.5) / 2, r * 1 / 2, w - 1, count + 1, level)
def draw_circle(x, y, r, w):
canvas.create_oval(x-r, y-r, x+r, y+r, outline='PaleVioletRed1', tags='flower', width=w)
if __name__ == '__main__':
main()
| 3.59375 | 4 |
PyRoute/HexMap.py | elsiehupp/traveller_pyroute | 12 | 12758171 | """
Created on Mar 8, 2014
@author: tjoneslo
"""
import os
import logging
from pypdflite import PDFLite
from pypdflite import PDFCursor
from pypdflite.pdfobjects.pdfline import PDFLine
from pypdflite.pdfobjects.pdfellipse import PDFEllipse
from pypdflite.pdfobjects.pdftext import PDFText
from Galaxy import Sector, Galaxy
from Star import Star
from StatCalculation import StatCalculation
class HexMap(object):
"""
Draw the trade routes as calculated, sector by sector onto PDF files.
Used pypdflite to directly generate the PDF files.
"""
def __init__(self, galaxy, routes, min_btn=8):
self.galaxy = galaxy
self.routes = routes
self.ym = 9 # half a hex height
self.xm = 6 # half the length of one side
self.colorStart = 0
self.min_btn = min_btn
self.y_start = 43
self.x_start = 15
def write_maps(self):
"""
Starting point for writing PDF files.
Call this to output the trade maps
"""
logging.getLogger("PyRoute.HexMap").info("writing {:d} sector maps...".format(len(self.galaxy.sectors)))
for sector in self.galaxy.sectors.values():
pdf = self.document(sector)
self.write_base_map(pdf, sector)
self.draw_borders(pdf, sector)
comm_routes = [star for star in self.galaxy.stars.edges(sector.worlds, True) \
if star[2].get('xboat', False) or star[2].get('comm', False)]
for (star, neighbor, data) in comm_routes:
self.comm_line(pdf, [star, neighbor])
sector_trade = [star for star in self.galaxy.stars.edges(sector.worlds, True) \
if star[2]['trade'] > 0 and StatCalculation.trade_to_btn(star[2]['trade']) >= self.min_btn]
logging.getLogger('PyRoute.HexMap').debug("Worlds with trade: {}".format(len(sector_trade)))
sector_trade.sort(key=lambda line: line[2]['trade'])
for (star, neighbor, data) in sector_trade:
self.galaxy.stars[star][neighbor]['trade btn'] = StatCalculation.trade_to_btn(data['trade'])
self.trade_line(pdf, [star, neighbor], data)
# Get all the worlds in this sector
# for (star, neighbor, data) in self.galaxy.stars.edges(sector.worlds, True):
# if star.sector != sector:
# continue#
# if data['trade'] > 0 and self.trade_to_btn(data['trade']) >= self.min_btn:
# self.galaxy.stars[star][neighbor]['trade btn'] = self.trade_to_btn(data['trade'])
# self.trade_line(pdf, [star, neighbor], data)
# elif star.sector != neighbor.sector:
# data = self.galaxy.stars.get_edge_data(neighbor, star)
# if data is not None and \
# data['trade'] > 0 and \
# self.trade_to_btn(data['trade']) >= self.min_btn:
# self.trade_line(pdf, [star, neighbor], data)
for star in sector.worlds:
self.system(pdf, star)
if sector.coreward:
self.coreward_sector(pdf, sector.coreward.name)
if sector.rimward:
self.rimward_sector(pdf, sector.rimward.name)
if sector.spinward:
self.spinward_sector(pdf, sector.spinward.name)
if sector.trailing:
self.trailing_sector(pdf, sector.trailing.name)
self.writer.close()
def write_base_map(self, pdf, sector):
self.sector_name(pdf, sector.name)
self.subsector_grid(pdf)
self.hex_grid(pdf, self._draw_all, 0.5)
def sector_name(self, pdf, name):
cursor = PDFCursor(5, -5, True)
def_font = pdf.get_font()
pdf.set_font('times', size=30)
width = pdf.get_font()._string_width(name)
cursor.x = 306 - (width / 2)
pdf.add_text(name, cursor)
pdf.set_font(font=def_font)
def coreward_sector(self, pdf, name):
cursor = PDFCursor(5, self.y_start - 15, True)
def_font = pdf.get_font()
pdf.set_font('times', size=10)
width = pdf.get_font()._string_width(name) / 2
cursor.x = 306 - width
pdf.add_text(name, cursor)
pdf.set_font(font=def_font)
def rimward_sector(self, pdf, name):
cursor = PDFCursor(306, 767, True)
def_font = pdf.get_font()
pdf.set_font('times', size=10)
cursor.x_plus(-pdf.get_font()._string_width(name) / 2)
pdf.add_text(name, cursor)
pdf.set_font(font=def_font)
def spinward_sector(self, pdf, name):
cursor = PDFCursor(self.x_start - 5, 390, True)
def_font = pdf.get_font()
pdf.set_font('times', size=10)
cursor.y_plus(pdf.get_font()._string_width(name) / 2)
text = PDFText(pdf.session, pdf.page, None, cursor=cursor)
text.text_rotate(90)
text._text(name)
pdf.set_font(font=def_font)
def trailing_sector(self, pdf, name):
cursor = PDFCursor(598, 390, True)
def_font = pdf.get_font()
pdf.set_font('times', size=10)
cursor.y_plus(-(pdf.get_font()._string_width(name) / 2))
text = PDFText(pdf.session, pdf.page, None, cursor=cursor)
text.text_rotate(-90)
text._text(name)
pdf.set_font(font=def_font)
def subsector_grid(self, pdf):
color = pdf.get_color()
color.set_color_by_name('lightgray')
pdf.set_draw_color(color)
vlineStart = PDFCursor(0, self.y_start + self.xm)
vlineEnd = PDFCursor(0, self.y_start + self.xm + (180 * 4))
for x in range(self.x_start, 595, 144):
vlineStart.x = x
vlineEnd.x = x
pdf.add_line(cursor1=vlineStart, cursor2=vlineEnd)
hlineStart = PDFCursor(self.x_start, 0)
hlineEnd = PDFCursor(591, 0)
for y in range(self.y_start + self.xm, 780, 180):
hlineStart.y = y
hlineEnd.y = y
pdf.add_line(cursor1=hlineStart, cursor2=hlineEnd)
def _hline(self, pdf, width, colorname):
hlineStart = PDFCursor(0, 0)
hlineStart.x = 3
hlineStart.y = self.y_start - self.ym
hlineStart.dx = self.xm * 3
hlineStart.dy = self.ym * 2
hlineEnd = PDFCursor(0, 0)
hlineEnd.x = self.xm * 2.5
hlineEnd.y = self.y_start - self.ym
hlineEnd.dx = self.xm * 3
hlineEnd.dy = self.ym * 2
color = pdf.get_color()
color.set_color_by_name(colorname)
hline = PDFLine(pdf.session, pdf.page, hlineStart, hlineEnd, stroke='solid', color=color, size=width)
return (hlineStart, hlineEnd, hline)
def _hline_restart_y(self, x, hlineStart, hlineEnd):
if (x & 1):
hlineStart.y = self.y_start - self.ym
hlineEnd.y = self.y_start - self.ym
else:
hlineStart.y = self.y_start - 2 * self.ym
hlineEnd.y = self.y_start - 2 * self.ym
def _lline(self, pdf, width, colorname):
llineStart = PDFCursor(-10, 0)
llineStart.x = self.x_start
llineStart.dx = self.xm * 3
llineStart.dy = self.ym * 2
llineEnd = PDFCursor(-10, 0)
llineEnd.x = self.x_start + self.xm
llineEnd.dx = self.xm * 3
llineEnd.dy = self.ym * 2
color = pdf.get_color()
color.set_color_by_name(colorname)
lline = PDFLine(pdf.session, pdf.page, llineStart, llineEnd, stroke='solid', color=color, size=width)
return (llineStart, llineEnd, lline)
def _lline_restart_y(self, x, llineStart, llineEnd):
if (x & 1):
llineStart.y = self.y_start - 2 * self.ym
llineEnd.y = self.y_start - self.ym
else:
llineStart.y = self.y_start - self.ym
llineEnd.y = self.y_start - 2 * self.ym
def _rline(self, pdf, width, colorname):
rlineStart = PDFCursor(0, 0)
rlineStart.x = self.x_start + self.xm
rlineStart.dx = self.xm * 3
rlineStart.dy = self.ym * 2
rlineEnd = PDFCursor(0, 0)
rlineEnd.x = self.x_start
rlineEnd.dx = self.xm * 3
rlineEnd.dy = self.ym * 2
color = pdf.get_color()
color.set_color_by_name(colorname)
rline = PDFLine(pdf.session, pdf.page, rlineStart, rlineEnd, stroke='solid', color=color, size=width)
return (rlineStart, rlineEnd, rline)
def _rline_restart_y(self, x, rlineStart, rlineEnd):
if (x & 1):
rlineStart.y = self.y_start - 3 * self.ym
rlineEnd.y = self.y_start - 2 * self.ym
else:
rlineStart.y = self.y_start - 2 * self.ym
rlineEnd.y = self.y_start - 3 * self.ym
def hex_grid(self, pdf, draw, width, colorname='gray'):
hlineStart, hlineEnd, hline = self._hline(pdf, width, colorname)
llineStart, llineEnd, lline = self._lline(pdf, width, colorname)
rlineStart, rlineEnd, rline = self._rline(pdf, width, colorname)
for x in range(33):
hlineStart.x_plus()
hlineEnd.x_plus()
self._hline_restart_y(x, hlineStart, hlineEnd)
self._lline_restart_y(x, llineStart, llineEnd)
self._rline_restart_y(x, rlineStart, rlineEnd)
for y in range(41):
hlineStart.y_plus()
hlineEnd.y_plus()
llineStart.y_plus()
llineEnd.y_plus()
rlineStart.y_plus()
rlineEnd.y_plus()
draw(x, y, hline, lline, rline)
llineStart.x_plus()
llineEnd.x_plus()
rlineStart.x_plus()
rlineEnd.x_plus()
def _draw_all(self, x, y, hline, lline, rline):
if (x < 32):
hline._draw()
lline._draw()
if (y > 0):
rline._draw()
def _draw_borders(self, x, y, hline, lline, rline):
q, r = self.convert_hex_to_axial(x + self.sector.dx, y + self.sector.dy - 1)
if self.galaxy.borders.borders.get((q, r), False):
if self.galaxy.borders.borders[(q, r)] & 1:
hline._draw()
if self.galaxy.borders.borders[(q, r)] & 2 and y > 0:
rline._draw()
if self.galaxy.borders.borders[(q, r)] & 4:
lline._draw()
def draw_borders(self, pdf, sector):
self.sector = sector
self.hex_grid(pdf, self._draw_borders, 1.5, 'salmon')
@staticmethod
def convert_hex_to_axial(row, col):
x = row
z = col - (row - (row & 1)) / 2
return (x, z)
def system(self, pdf, star):
def_font = pdf.get_font()
pdf.set_font('times', size=4)
col = (self.xm * 3 * (star.col))
if (star.col & 1):
row = (self.y_start - self.ym * 2) + (star.row * self.ym * 2)
else:
row = (self.y_start - self.ym) + (star.row * self.ym * 2)
point = PDFCursor(col, row)
self.zone(pdf, star, point.copy())
width = self.string_width(pdf.get_font(), star.uwp)
point.y_plus(7)
point.x_plus(self.ym - (width // 2))
pdf.add_text(star.uwp, point)
if len(star.name) > 0:
for chars in range(len(star.name), 0, -1):
width = self.string_width(pdf.get_font(), star.name[:chars])
if width <= self.xm * 3.5:
break
point.y_plus(3.5)
point.x = col
point.x_plus(self.ym - (width // 2))
pdf.add_text(star.name[:chars], point)
added = star.alg_code
if star.tradeCode.subsector_capital:
added += '+'
elif star.tradeCode.sector_capital or star.tradeCode.other_capital:
added += '*'
else:
added += ' '
added += '{:d}'.format(star.ggCount)
point.y_plus(3.5)
point.x = col
width = pdf.get_font()._string_width(added)
point.x_plus(self.ym - (width // 2))
pdf.add_text(added, point)
added = ''
tradeIn = StatCalculation.trade_to_btn(star.tradeIn)
tradeThrough = StatCalculation.trade_to_btn(star.tradeIn + star.tradeOver)
if self.routes == 'trade':
added += "{:X}{:X}{:X}{:d}".format(star.wtn, tradeIn, tradeThrough, star.starportSize)
elif self.routes == 'comm':
added += "{}{} {}".format(star.baseCode, star.ggCount, star.importance)
elif self.routes == 'xroute':
added += " {}".format(star.importance)
width = pdf.get_font()._string_width(added)
point.y_plus(3.5)
point.x = col
point.x_plus(self.ym - (width // 2))
pdf.add_text(added, point)
pdf.set_font(def_font)
def trade_line(self, pdf, edge, data):
tradeColors = [(255, 0, 0), # Red
(224, 224, 16), # yellow - darker
(0, 255, 0), # green
(0, 255, 255), # Cyan
(96, 96, 255), # blue - lighter
(128, 0, 128), # purple
(148, 0, 211), # violet
]
start = edge[0]
end = edge[1]
trade = StatCalculation.trade_to_btn(data['trade']) - self.min_btn
if trade < 0:
return
if trade > 6:
logging.getLogger('PyRoute.HexMap').warn("trade calculated over %d" % self.min_btn + 6)
trade = 6
tradeColor = tradeColors[trade]
color = pdf.get_color()
color.set_color_by_number(tradeColor[0], tradeColor[1], tradeColor[2])
starty = self.y_start + (self.ym * 2 * (start.row)) - (self.ym * (1 if start.col & 1 else 0))
startx = (self.xm * 3 * (start.col)) + self.ym
endRow = end.row
endCol = end.col
endCircle = True
if (end.sector != start.sector):
endCircle = False
if end.sector.x < start.sector.x:
endCol -= 32
if end.sector.x > start.sector.x:
endCol += 32
if end.sector.y < start.sector.y:
endRow -= 40
if end.sector.y > start.sector.y:
endRow += 40
endy = self.y_start + (self.ym * 2 * (endRow)) - (self.ym * (1 if endCol & 1 else 0))
endx = (self.xm * 3 * endCol) + self.ym
(startx, starty), (endx, endy) = self.clipping(startx, starty, endx, endy)
else:
endy = self.y_start + (self.ym * 2 * (endRow)) - (self.ym * (1 if endCol & 1 else 0))
endx = (self.xm * 3 * endCol) + self.ym
lineStart = PDFCursor(startx, starty)
lineEnd = PDFCursor(endx, endy)
line = PDFLine(pdf.session, pdf.page, lineStart, lineEnd, stroke='solid', color=color, size=1)
line._draw()
radius = PDFCursor(2, 2)
circle = PDFEllipse(pdf.session, pdf.page, lineStart, radius, color, size=3)
circle._draw()
if endCircle:
circle = PDFEllipse(pdf.session, pdf.page, lineEnd, radius, color, size=3)
circle._draw()
def comm_line(self, pdf, edge):
start = edge[0]
end = edge[1]
color = pdf.get_color()
color.set_color_by_number(102, 178, 102)
starty = self.y_start + (self.ym * 2 * (start.row)) - (self.ym * (1 if start.col & 1 else 0))
startx = (self.xm * 3 * (start.col)) + self.ym
endRow = end.row
endCol = end.col
if (end.sector != start.sector):
if end.sector.x < start.sector.x:
endCol -= 32
if end.sector.x > start.sector.x:
endCol += 32
if end.sector.y < start.sector.y:
endRow -= 40
if end.sector.y > start.sector.y:
endRow += 40
endy = self.y_start + (self.ym * 2 * (endRow)) - (self.ym * (1 if endCol & 1 else 0))
endx = (self.xm * 3 * endCol) + self.ym
(startx, starty), (endx, endy) = self.clipping(startx, starty, endx, endy)
else:
endy = self.y_start + (self.ym * 2 * (endRow)) - (self.ym * (1 if endCol & 1 else 0))
endx = (self.xm * 3 * endCol) + self.ym
lineStart = PDFCursor(startx, starty)
lineEnd = PDFCursor(endx, endy)
line = PDFLine(pdf.session, pdf.page, lineStart, lineEnd, stroke='solid', color=color, size=3)
line._draw()
def zone(self, pdf, star, point):
point.x_plus(self.ym)
point.y_plus(self.ym)
color = pdf.get_color()
if star.zone in ['R', 'F']:
color.set_color_by_name('crimson')
elif star.zone in ['A', 'U']:
color.set_color_by_name('goldenrod')
else: # no zone -> do nothing
return
radius = PDFCursor(self.xm, self.xm)
circle = PDFEllipse(pdf.session, pdf.page, point, radius, color, size=2)
circle._draw()
def document(self, sector):
path = os.path.join(self.galaxy.output_path, sector.sector_name() + " Sector.pdf")
self.writer = PDFLite(path)
title = "Sector %s" % sector
subject = "Trade route map generated by PyRoute for Traveller"
author = None
keywords = None
creator = "PyPDFLite"
self.writer.set_information(title, subject, author, keywords, creator)
self.writer.set_compression(True)
document = self.writer.get_document()
document.set_margins(4)
return document
@staticmethod
def string_width(font, string):
w = 0
for i in string:
w += font.character_widths[i] if i in font.character_widths else 600
return w * font.font_size / 1000.0
def clipping(self, startx, starty, endx, endy):
points_t = [0.0, 1.0]
line_pt_1 = [startx, starty]
line_pt_2 = [endx, endy]
if startx == endx:
if starty > endy:
return ((startx, min(max(starty, endy), 780)),
(startx, max(min(starty, endy), 42)))
else:
return ((startx, max(min(starty, endy), 42)),
(startx, min(max(starty, endy), 780)))
if starty == endy:
if startx > endx:
return ((min(max(startx, endx), 600), starty),
(max(min(startx, endx), 15), starty))
else:
return ((max(min(startx, endx), 15), starty),
(min(max(startx, endx), 600), starty))
points_t.append(float(15 - startx) / (endx - startx))
points_t.append(float(600 - startx) / (endx - startx))
points_t.append(float(780 - starty) / (endy - starty))
points_t.append(float(42 - starty) / (endy - starty))
points_t.sort()
result = [(pt_1 + t * (pt_2 - pt_1)) for t in (points_t[2], points_t[3]) for (pt_1, pt_2) in
zip(line_pt_1, line_pt_2)]
logging.getLogger("PyRoute.HexMap").debug(result)
return (result[0], result[1]), (result[2], result[3])
if __name__ == '__main__':
sector = Sector('# Core', '# 0,0')
hexMap = HexMap(None)
pdf = hexMap.document(sector)
hexMap.write_base_map(pdf, sector)
galaxy = Galaxy(0, 0)
star1 = Star(
"0102 <NAME> E551112-7 Lo Po { -3 } (300-3) [1113] B - - 913 9 Im K2 IV M7 V ",
galaxy.starline, 0, 0)
star2 = Star(
"0405 Azimuth B847427-B Ni Pa { 1 } (634+1) [455B] Bc N - 200 13 Im M2 V M7 V ",
galaxy.starline, 0, 0)
hexMap.trade_line(pdf, [star1, star2])
hexMap.system(pdf, star1)
hexMap.system(pdf, star2)
hexMap.writer.close()
| 2.890625 | 3 |
acondbs/migrations/versions/2b15f740eed9_add_a_field_time_created_to_github_.py | simonsobs/acondbs | 0 | 12758172 | <gh_stars>0
"""add a field "time_created" to github_tokens
Revision ID: 2<PASSWORD>
Revises: <PASSWORD>
Create Date: 2021-01-04 12:03:13.963073
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2<PASSWORD>'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('github_tokens', schema=None) as batch_op:
batch_op.add_column(sa.Column('time_created', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('github_tokens', schema=None) as batch_op:
batch_op.drop_column('time_created')
# ### end Alembic commands ###
| 1.234375 | 1 |
aoc2018/d07-2.py | jbudynek/advent-of-code | 0 | 12758173 | <gh_stars>0
# coding: utf-8
import numpy as np
import re
import copy
import sys
import networkx as nx
#import matplotlib.pyplot as plt
#import operator
#from collections import defaultdict
from collections import Counter
import time
def build_world(ii, DBG = True):
world = nx.DiGraph()
for line in ii:
# "Step C must be finished before step A can begin"
node_from = list(line)[5]
node_to = list(line)[36]
if(DBG):print(node_from,node_to)
world.add_node(node_from)
world.add_node(node_to)
world.add_edge(node_from,node_to)
if(DBG):print(world.edges)
return world
start_time = {}
def get_start_time(node, DBG = True):
return start_time[node]
def get_duration(node, DBG = True):
return ord(str(node)[0])-ord('A')+1+OVERHEAD
worker_to_node = {}
def get_current_node(worker, DBG = True):
if worker in worker_to_node:
return worker_to_node[worker]
else:
return None
worker_status = {}
def set_worker_status(worker, status, DBG = True):
worker_status[worker] = status
def get_worker_status(worker, DBG = True):
if not worker in worker_status:
worker_status[worker] = "idle"
return worker_status[worker]
# status = "idle","working"
def update_status(worker, node, tick, world, DBG = True):
if (DBG): print(worker, node, tick)
status = get_worker_status(worker, DBG)
if (status=="working"):
if (DBG): print("working "+node)
delta = tick - get_start_time(node)
if (DBG): print("delta ",delta)
if (delta >= get_duration(node)):
if (DBG): print("node "+node+ " done")
worker_to_node[node] = None
set_worker_status(worker, "idle")
world.remove_node(node)
return("idle")
return status
def start_job(worker, node,tick, world, DBG = True):
if (DBG): print("**start_job ",worker,node,tick)
worker_to_node[worker] = node
start_time[node] = tick
set_worker_status(worker,"working")
def get_next_available_node(world, DBG = True):
# find nodes without parent
ret = []
for node in world.nodes:
if len(list(world.predecessors(node)))==0:
if not node in worker_to_node.values():
ret.append(node)
if (DBG): print("available "+str(ret))
if len(ret)==0:
return None
else:
return ret[0]
def is_done(world, DBG = True):
return world.number_of_nodes() == 0
WORKERS = 5
OVERHEAD = 60
def function(ii, DBG = True):
# build world = build DAG
# topological sort
# build N graphs (1 per worker)
# ticks
# on each graph label "doing" or "done" (propagate to other graphs)
# when possible, take a node
# log when "done"
# when all is done ... we're done
world = build_world(ii,DBG)
#ts = list(nx.lexicographical_topological_sort(world))
tick = 0
#
while (True):
for worker in range(WORKERS):
if(DBG): print ("*worker "+str(worker))
node = get_current_node(worker, DBG)
if(DBG): print ("*node "+str(node))
status = update_status(worker, node, tick, world, DBG)
if(DBG): print ("*status "+status)
if (status=="idle"):
node = get_next_available_node(world, DBG)
if(DBG): print ("*available "+str(node))
if not node == None:
start_job(worker,node,tick, world, DBG)
if(is_done(world, DBG)):
return tick
tick = tick+1
return -1
def test(cc=None, expected=None, DBG = False):
start_millis = int(round(time.time() * 1000))
result = str(function(cc,DBG))
stop_millis = int(round(time.time() * 1000))
expected = str(expected)
flag = (result == expected)
print("*** "+str(cc) + " *** -> Result = "+str(result), " -> success = "+ str(flag) + " -> expected " + expected)
print((stop_millis-start_millis),"ms",int((stop_millis-start_millis)/1000),"s",int((stop_millis-start_millis)/1000/60),"min")
t1="""Step C must be finished before step A can begin.
Step C must be finished before step F can begin.
Step A must be finished before step B can begin.
Step A must be finished before step D can begin.
Step B must be finished before step E can begin.
Step D must be finished before step E can begin.
Step F must be finished before step E can begin."""
tt1 = t1.splitlines()
#test(tt1,15,True) #
#sys.exit()
INPUT_FILE="input-d07.txt"
f = open(INPUT_FILE, "r")
contents = f.read()
puzzle_input = contents.splitlines()
f.close()
ret = function(puzzle_input,True) #
print(ret)
# 230 LOW
| 2.671875 | 3 |
check_field/run.py | kemasuda/otofu | 0 | 12758174 | <gh_stars>0
#%%
import astropy.units as u
from otofu.obsplan import check_field
#%%
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(style='ticks', font_scale=1.6*0.5, font='sans-serif')
plt.rcParams["figure.figsize"] = (14*0.5,7*0.5)
from matplotlib import rc
rc('text', usetex=False)
#%%
fov = 0.45*u.deg
#%%
names = ["TOI-700", "LTT 1445A", "TRAPPIST-1"]
#%%
for name in names[:1]:
coord, d = check_field(name, fov, plot=True)
| 2.46875 | 2 |
src/appfl/protos/operator.py | APPFL/APPFL | 9 | 12758175 | <filename>src/appfl/protos/operator.py<gh_stars>1-10
import logging
from collections import OrderedDict
import torch
from torch.utils.data import DataLoader
import numpy as np
import copy
from appfl.misc.utils import *
from appfl.algorithm import *
from .federated_learning_pb2 import Job
class FLOperator:
def __init__(self, cfg, model, loss_fn, test_dataset, num_clients):
self.logger1 = create_custom_logger(logging.getLogger(__name__), cfg)
cfg["logginginfo"]["comm_size"] = 1
self.logger = logging.getLogger(__name__)
self.operator_id = cfg.operator.id
self.cfg = cfg
self.num_clients = num_clients
self.num_epochs = cfg.num_epochs
self.round_number = 1
self.best_accuracy = 0.0
self.device = "cpu"
self.model = copy.deepcopy(model)
self.loss_fn = loss_fn
""" Loading Model """
if cfg.load_model == True:
self.model = load_model(cfg)
self.client_training_size = OrderedDict()
self.client_training_size_received = OrderedDict()
self.client_weights = OrderedDict()
self.client_states = OrderedDict()
for c in range(num_clients):
self.client_states[c] = OrderedDict()
self.client_states[c]["penalty"] = OrderedDict()
self.client_learning_status = OrderedDict()
self.servicer = None # Takes care of communication via gRPC
self.dataloader = None
if self.cfg.validation == True and len(test_dataset) > 0:
self.dataloader = DataLoader(
test_dataset,
num_workers=0,
batch_size=cfg.test_data_batch_size,
shuffle=cfg.test_data_shuffle,
)
else:
self.cfg.validation = False
self.fed_server: BaseServer = eval(self.cfg.fed.servername)(
self.client_weights,
self.model,
self.loss_fn,
self.num_clients,
self.device,
**self.cfg.fed.args,
)
"""
Return the tensor record of a global model requested by its name.
"""
def get_tensor(self, name):
return (
np.array(self.fed_server.model.state_dict()[name])
if name in self.fed_server.model.state_dict()
else None
)
"""
Return the job status indicating the next job a client is supposed to do.
"""
def get_job(self):
job_todo = Job.WEIGHT
self.logger.debug(
f"[Round: {self.round_number: 04}] client_training_size_received: {self.client_training_size_received}"
)
if all(
c in self.client_training_size_received for c in range(self.num_clients)
):
job_todo = Job.TRAIN
if self.round_number > self.num_epochs:
job_todo = Job.QUIT
return min(self.round_number, self.num_epochs), job_todo
"""
Compute weights of clients based on their training data size.
"""
def get_weight(self, client_id, training_size) -> float:
self.client_training_size[client_id] = training_size
self.client_training_size_received[client_id] = True
self.logger.debug(
f"[Round: {self.round_number: 04}] client_training_size_received: {self.client_training_size_received}"
)
# If we have received training size from all clients
if all(
c in self.client_training_size_received for c in range(self.num_clients)
):
# Instantiate a fed_server if not instantiated yet.
total_training_size = sum(
self.client_training_size[c] for c in range(self.num_clients)
)
for c in range(self.num_clients):
self.client_weights[c] = (
self.client_training_size[c] / total_training_size
)
self.fed_server.set_weights(self.client_weights)
self.logger.debug(
f"[Round: {self.round_number: 04}] self.client_weights: {self.client_weights}"
)
self.logger.debug(
f"[Round: {self.round_number: 04}] self.client_training_size: {self.client_training_size}"
)
self.logger.debug(
f"[Round: {self.round_number: 04}] self.fed_server.weights: {self.fed_server.weights}"
)
return self.client_weights[client_id]
else:
return -1.0
"""
Update model weights of a global model. After updating, we increment the round number.
"""
def update_model_weights(self):
self.logger.info(f"[Round: {self.round_number: 04}] Updating model weights")
self.logger.debug(
f"[Round: {self.round_number: 04}] self.fed_server.weights: {self.fed_server.weights}"
)
self.fed_server.update([self.client_states])
if self.cfg.validation == True:
test_loss, accuracy = validation(self.fed_server, self.dataloader)
if accuracy > self.best_accuracy:
self.best_accuracy = accuracy
self.logger.info(
f"[Round: {self.round_number: 04}] Test set: Average loss: {test_loss:.4f}, Accuracy: {accuracy:.2f}%, Best Accuracy: {self.best_accuracy:.2f}%"
)
if (
self.round_number % self.cfg.checkpoints_interval == 0
or self.round_number == self.cfg.num_epochs
):
"""Saving model"""
if self.cfg.save_model == True:
save_model_iteration(self.round_number, self.model, self.cfg)
self.round_number += 1
"""
Check if we have received model weights from all clients for this round.
"""
def is_round_finished(self):
return all(
(c, self.round_number) in self.client_learning_status
for c in range(0, self.num_clients)
)
"""
Receive model weights from a client. When we have received weights from all clients,
it will trigger a global model update.
"""
def send_learning_results(self, client_id, round_number, penalty, primal, dual):
self.logger.debug(
f"[Round: {self.round_number: 04}] self.fed_server.weights: {self.fed_server.weights}"
)
primal_tensors = OrderedDict()
dual_tensors = OrderedDict()
for tensor in primal:
name = tensor.name
shape = tuple(tensor.data_shape)
flat = np.frombuffer(tensor.data_bytes, dtype=eval(tensor.data_dtype))
nparray = np.reshape(flat, newshape=shape, order="C")
primal_tensors[name] = torch.from_numpy(nparray)
for tensor in dual:
name = tensor.name
shape = tuple(tensor.data_shape)
flat = np.frombuffer(tensor.data_bytes, dtype=eval(tensor.data_dtype))
nparray = np.reshape(flat, newshape=shape, order="C")
dual_tensors[name] = torch.from_numpy(nparray)
self.client_states[client_id]["primal"] = primal_tensors
self.client_states[client_id]["dual"] = dual_tensors
self.client_states[client_id]["penalty"][client_id] = penalty
self.client_learning_status[(client_id, round_number)] = True
self.logger.debug(
f"[Round: {self.round_number: 04}] self.fed_server.weights: {self.fed_server.weights}"
)
# Round is finished when we have received model weights from all clients.
if self.is_round_finished():
self.logger.info(
f"[Round: {self.round_number: 04}] Finished; all clients have sent their results."
)
self.update_model_weights()
| 2.109375 | 2 |
antistasi_template_checker/__main__.py | Giddius/Antistasi_Template_Checker | 0 | 12758176 | <filename>antistasi_template_checker/__main__.py<gh_stars>0
"""
[summary]
"""
# region [Imports]
# * Standard Library Imports ---------------------------------------------------------------------------->
import os
import sys
# * Third Party Imports --------------------------------------------------------------------------------->
import click
# * Local Imports --------------------------------------------------------------------------------------->
from antistasi_template_checker.engine.antistasi_template_parser import run, readit, pathmaker
# endregion[Imports]
# region [TODO]
# endregion [TODO]
# region [AppUserData]
# endregion [AppUserData]
# region [Logging]
# endregion[Logging]
# region [Constants]
# endregion[Constants]
def get_outfile_name(in_file):
name = os.path.basename(in_file)
path = os.path.dirname(in_file)
mod_name = os.path.splitext(in_file)[0]
return pathmaker(path, f"{mod_name}_ERRORS.txt")
@click.group()
def cli():
pass
@cli.command(name="get_source")
def get_source_string():
print('currently not possible with pyinstaller to inspect source data in an single file executable')
@cli.command(name="from_file")
@click.option('--case-insensitive/--case-sensitive', '-ci/-cs', default=False)
@click.option('--write-to-file/--dont-write-to-file', '-wf/-dwf', default=False)
@click.option('--pause-at-end/--no-pause-at-end', '-p/-np', default=False)
@click.option('--make-replacement/--dont-make-replacement', '-mr/-dmr', default=True)
@click.argument('in_files', nargs=-1)
def from_file(case_insensitive, write_to_file, pause_at_end, in_files, make_replacement):
for file in in_files:
if os.path.isfile(file) is False:
raise FileNotFoundError(file)
content = readit(file)
result = run(content, case_insensitive=case_insensitive)
if write_to_file is True:
with open(get_outfile_name(file), 'w') as out_f:
out_f.write(f"Errors in '{os.path.basename(file)}':\n\n\n")
out_f.write(result[0])
if make_replacement is True and len(result[1]) != 0:
_new_file_name = os.path.splitext(os.path.basename(file))[0] + '_CORRECTED' + os.path.splitext(file)[-1]
_new_file_path = pathmaker(os.path.dirname(file), _new_file_name)
_new_content = content
for old_item, new_item in result[1]:
_new_content = _new_content.replace(old_item, new_item)
with open(_new_file_path, 'w') as cor_f:
cor_f.write(_new_content)
if len(result[2]) != 0:
print('\n\n')
print(f'not corrected in "{_new_file_name}":\n')
for _line, _not_corrected_item in result[2]:
print(f'line: {str(_line)} <> "{_not_corrected_item}"\n')
print('\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n')
if pause_at_end is True:
os.system('pause')
def main():
if sys.argv[1] not in ['from_file', 'get_source', '--help']:
first = sys.argv.pop(0)
sys.argv = [first, 'from_file'] + sys.argv
cli(sys.argv[1:])
# region[Main_Exec]
if __name__ == '__main__':
main()
# endregion[Main_Exec]
| 2.296875 | 2 |
tmp.py | borutar/Telegram-bot | 0 | 12758177 | <filename>tmp.py
import math
def calculate_price(total_quantity):
price_per_quantity = 20 * total_quantity
if total_quantity >=10 and total_quantity <15:
price_per_quantity = 0.95* price_per_quantity
elif total_quantity >=15 and total_quantity <20:
price_per_quantity = 0.90* price_per_quantity
elif total_quantity >=20 and total_quantity <25:
price_per_quantity = 0.88 * price_per_quantity
elif total_quantity >=25 and total_quantity <30:
price_per_quantity = 0.85* price_per_quantity
elif total_quantity >=30:
price_per_quantity = 0.80* price_per_quantity
return int(math.ceil(price_per_quantity / 10.0)) * 10
print(calculate_price(20)) | 3.875 | 4 |
Online-Judges/CodingBat/Python/String-01/04-make_out_word.py | shihab4t/Competitive-Programming | 3 | 12758178 | def make_out_word(out, word):
return (out[:2]+word+out[2:])
| 3.3125 | 3 |
ansible/modules/network/netscaler/netscaler_servicegroup.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 1 | 12758179 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Citrix Systems
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netscaler_servicegroup
short_description: Manage service group configuration in Netscaler
description:
- Manage service group configuration in Netscaler.
- This module is intended to run either on the ansible control node or a bastion (jumpserver) with access to the actual netscaler instance.
version_added: "2.4"
author: <NAME> (@giorgos-nikolopoulos)
options:
servicegroupname:
description:
- >-
Name of the service group. Must begin with an ASCII alphabetic or underscore C(_) character, and must
contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space C( ), colon C(:), at C(@), equals
C(=), and hyphen C(-) characters. Can be changed after the name is created.
- "Minimum length = 1"
servicetype:
choices:
- 'HTTP'
- 'FTP'
- 'TCP'
- 'UDP'
- 'SSL'
- 'SSL_BRIDGE'
- 'SSL_TCP'
- 'DTLS'
- 'NNTP'
- 'RPCSVR'
- 'DNS'
- 'ADNS'
- 'SNMP'
- 'RTSP'
- 'DHCPRA'
- 'ANY'
- 'SIP_UDP'
- 'SIP_TCP'
- 'SIP_SSL'
- 'DNS_TCP'
- 'ADNS_TCP'
- 'MYSQL'
- 'MSSQL'
- 'ORACLE'
- 'RADIUS'
- 'RADIUSListener'
- 'RDP'
- 'DIAMETER'
- 'SSL_DIAMETER'
- 'TFTP'
- 'SMPP'
- 'PPTP'
- 'GRE'
- 'SYSLOGTCP'
- 'SYSLOGUDP'
- 'FIX'
- 'SSL_FIX'
description:
- "Protocol used to exchange data with the service."
cachetype:
choices:
- 'TRANSPARENT'
- 'REVERSE'
- 'FORWARD'
description:
- "Cache type supported by the cache server."
maxclient:
description:
- "Maximum number of simultaneous open connections for the service group."
- "Minimum value = C(0)"
- "Maximum value = C(4294967294)"
maxreq:
description:
- "Maximum number of requests that can be sent on a persistent connection to the service group."
- "Note: Connection requests beyond this value are rejected."
- "Minimum value = C(0)"
- "Maximum value = C(65535)"
cacheable:
description:
- "Use the transparent cache redirection virtual server to forward the request to the cache server."
- "Note: Do not set this parameter if you set the Cache Type."
type: bool
cip:
choices:
- 'enabled'
- 'disabled'
description:
- "Insert the Client IP header in requests forwarded to the service."
cipheader:
description:
- >-
Name of the HTTP header whose value must be set to the IP address of the client. Used with the Client
IP parameter. If client IP insertion is enabled, and the client IP header is not specified, the value
of Client IP Header parameter or the value set by the set ns config command is used as client's IP
header name.
- "Minimum length = 1"
usip:
description:
- >-
Use client's IP address as the source IP address when initiating connection to the server. With the
NO setting, which is the default, a mapped IP (MIP) address or subnet IP (SNIP) address is used as
the source IP address to initiate server side connections.
pathmonitor:
description:
- "Path monitoring for clustering."
pathmonitorindv:
description:
- "Individual Path monitoring decisions."
useproxyport:
description:
- >-
Use the proxy port as the source port when initiating connections with the server. With the NO
setting, the client-side connection port is used as the source port for the server-side connection.
- "Note: This parameter is available only when the Use Source IP C(usip) parameter is set to C(yes)."
type: bool
healthmonitor:
description:
- "Monitor the health of this service. Available settings function as follows:"
- "C(yes) - Send probes to check the health of the service."
- >-
C(no) - Do not send probes to check the health of the service. With the NO option, the appliance shows
the service as UP at all times.
type: bool
sp:
description:
- "Enable surge protection for the service group."
type: bool
rtspsessionidremap:
description:
- "Enable RTSP session ID mapping for the service group."
type: bool
clttimeout:
description:
- "Time, in seconds, after which to terminate an idle client connection."
- "Minimum value = C(0)"
- "Maximum value = C(31536000)"
svrtimeout:
description:
- "Time, in seconds, after which to terminate an idle server connection."
- "Minimum value = C(0)"
- "Maximum value = C(31536000)"
cka:
description:
- "Enable client keep-alive for the service group."
type: bool
tcpb:
description:
- "Enable TCP buffering for the service group."
type: bool
cmp:
description:
- "Enable compression for the specified service."
type: bool
maxbandwidth:
description:
- "Maximum bandwidth, in Kbps, allocated for all the services in the service group."
- "Minimum value = C(0)"
- "Maximum value = C(4294967287)"
monthreshold:
description:
- >-
Minimum sum of weights of the monitors that are bound to this service. Used to determine whether to
mark a service as UP or DOWN.
- "Minimum value = C(0)"
- "Maximum value = C(65535)"
downstateflush:
choices:
- 'enabled'
- 'disabled'
description:
- >-
Flush all active transactions associated with all the services in the service group whose state
transitions from UP to DOWN. Do not enable this option for applications that must complete their
transactions.
tcpprofilename:
description:
- "Name of the TCP profile that contains TCP configuration settings for the service group."
- "Minimum length = 1"
- "Maximum length = 127"
httpprofilename:
description:
- "Name of the HTTP profile that contains HTTP configuration settings for the service group."
- "Minimum length = 1"
- "Maximum length = 127"
comment:
description:
- "Any information about the service group."
appflowlog:
choices:
- 'enabled'
- 'disabled'
description:
- "Enable logging of AppFlow information for the specified service group."
netprofile:
description:
- "Network profile for the service group."
- "Minimum length = 1"
- "Maximum length = 127"
autoscale:
choices:
- 'DISABLED'
- 'DNS'
- 'POLICY'
description:
- "Auto scale option for a servicegroup."
memberport:
description:
- "member port."
graceful:
description:
- "Wait for all existing connections to the service to terminate before shutting down the service."
type: bool
servicemembers:
description:
- A list of dictionaries describing each service member of the service group.
suboptions:
ip:
description:
- IP address of the service. Must not overlap with an existing server entity defined by name.
port:
description:
- Server port number.
- Range C(1) - C(65535)
- "* in CLI is represented as 65535 in NITRO API"
hashid:
description:
- The hash identifier for the service.
- This must be unique for each service.
- This parameter is used by hash based load balancing methods.
- Minimum value = C(1)
serverid:
description:
- The identifier for the service.
- This is used when the persistency type is set to Custom Server ID.
servername:
description:
- Name of the server to which to bind the service group.
- The server must already be configured as a named server.
- Minimum length = 1
customserverid:
description:
- The identifier for this IP:Port pair.
- Used when the persistency type is set to Custom Server ID.
weight:
description:
- Weight to assign to the servers in the service group.
- Specifies the capacity of the servers relative to the other servers in the load balancing configuration.
- The higher the weight, the higher the percentage of requests sent to the service.
- Minimum value = C(1)
- Maximum value = C(100)
monitorbindings:
description:
- A list of monitornames to bind to this service
- Note that the monitors must have already been setup possibly using the M(netscaler_lb_monitor) module or some other method
suboptions:
monitorname:
description:
- The monitor name to bind to this servicegroup.
weight:
description:
- Weight to assign to the binding between the monitor and servicegroup.
disabled:
description:
- When set to C(yes) the service group state will be set to DISABLED.
- When set to C(no) the service group state will be set to ENABLED.
- >-
Note that due to limitations of the underlying NITRO API a C(disabled) state change alone
does not cause the module result to report a changed status.
type: bool
default: false
extends_documentation_fragment: netscaler
requirements:
- nitro python sdk
'''
EXAMPLES = '''
# The LB Monitors monitor-1 and monitor-2 must already exist
# Service members defined by C(ip) must not redefine an existing server's ip address.
# Service members defined by C(servername) must already exist.
- name: Setup http service with ip members
delegate_to: localhost
netscaler_servicegroup:
nsip: 172.18.0.2
nitro_user: nsroot
nitro_pass: <PASSWORD>
state: present
servicegroupname: service-group-1
servicetype: HTTP
servicemembers:
- ip: 10.78.78.78
port: 80
weight: 50
- ip: 10.79.79.79
port: 80
weight: 40
- servername: server-1
port: 80
weight: 10
monitorbindings:
- monitorname: monitor-1
weight: 50
- monitorname: monitor-2
weight: 50
'''
RETURN = '''
loglines:
description: list of logged messages by the module
returned: always
type: list
sample: ['message 1', 'message 2']
msg:
description: Message detailing the failure reason
returned: failure
type: str
sample: "Action does not exist"
diff:
description: List of differences between the actual configured object and the configuration specified in the module
returned: failure
type: dict
sample: { 'clttimeout': 'difference. ours: (float) 10.0 other: (float) 20.0' }
'''
from ansible.module_utils.basic import AnsibleModule
import copy
from ansible.module_utils.netscaler import ConfigProxy, get_nitro_client, netscaler_common_arguments, log, loglines, get_immutables_intersection
try:
from nssrc.com.citrix.netscaler.nitro.resource.config.basic.servicegroup import servicegroup
from nssrc.com.citrix.netscaler.nitro.resource.config.basic.servicegroup_servicegroupmember_binding import servicegroup_servicegroupmember_binding
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.resource.config.basic.servicegroup_lbmonitor_binding import servicegroup_lbmonitor_binding
from nssrc.com.citrix.netscaler.nitro.resource.config.lb.lbmonitor_servicegroup_binding import lbmonitor_servicegroup_binding
PYTHON_SDK_IMPORTED = True
except ImportError as e:
PYTHON_SDK_IMPORTED = False
def servicegroup_exists(client, module):
log('Checking if service group exists')
count = servicegroup.count_filtered(client, 'servicegroupname:%s' % module.params['servicegroupname'])
log('count is %s' % count)
if count > 0:
return True
else:
return False
def servicegroup_identical(client, module, servicegroup_proxy):
log('Checking if service group is identical')
servicegroups = servicegroup.get_filtered(client, 'servicegroupname:%s' % module.params['servicegroupname'])
if servicegroup_proxy.has_equal_attributes(servicegroups[0]):
return True
else:
return False
def get_configured_service_members(client, module):
log('get_configured_service_members')
readwrite_attrs = [
'servicegroupname',
'ip',
'port',
'hashid',
'serverid',
'servername',
'customserverid',
'weight'
]
readonly_attrs = [
'delay',
'statechangetimesec',
'svrstate',
'tickssincelaststatechange',
'graceful',
]
members = []
if module.params['servicemembers'] is None:
return members
for config in module.params['servicemembers']:
# Make a copy to update
config = copy.deepcopy(config)
config['servicegroupname'] = module.params['servicegroupname']
member_proxy = ConfigProxy(
actual=servicegroup_servicegroupmember_binding(),
client=client,
attribute_values_dict=config,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs
)
members.append(member_proxy)
return members
def servicemembers_identical(client, module):
log('servicemembers_identical')
try:
# count() raises nitro exception instead of returning 0
count = servicegroup_servicegroupmember_binding.count(client, module.params['servicegroupname'])
if count > 0:
servicegroup_members = servicegroup_servicegroupmember_binding.get(client, module.params['servicegroupname'])
else:
servicegroup_members = []
except nitro_exception as e:
if e.errorcode == 258:
servicegroup_members = []
else:
raise
log('servicemembers %s' % servicegroup_members)
module_servicegroups = get_configured_service_members(client, module)
log('Number of service group members %s' % len(servicegroup_members))
if len(servicegroup_members) != len(module_servicegroups):
return False
# Fallthrough to member evaluation
identical_count = 0
for actual_member in servicegroup_members:
for member in module_servicegroups:
if member.has_equal_attributes(actual_member):
identical_count += 1
break
if identical_count != len(servicegroup_members):
return False
# Fallthrough to success
return True
def sync_service_members(client, module):
log('sync_service_members')
delete_all_servicegroup_members(client, module)
for member in get_configured_service_members(client, module):
member.add()
def delete_all_servicegroup_members(client, module):
log('delete_all_servicegroup_members')
if servicegroup_servicegroupmember_binding.count(client, module.params['servicegroupname']) == 0:
return
servicegroup_members = servicegroup_servicegroupmember_binding.get(client, module.params['servicegroupname'])
log('len %s' % len(servicegroup_members))
log('count %s' % servicegroup_servicegroupmember_binding.count(client, module.params['servicegroupname']))
for member in servicegroup_members:
log('%s' % dir(member))
log('ip %s' % member.ip)
log('servername %s' % member.servername)
if all([
hasattr(member, 'ip'),
member.ip is not None,
hasattr(member, 'servername'),
member.servername is not None,
]):
member.ip = None
member.servicegroupname = module.params['servicegroupname']
servicegroup_servicegroupmember_binding.delete(client, member)
def get_configured_monitor_bindings(client, module):
log('Entering get_configured_monitor_bindings')
bindings = {}
if 'monitorbindings' in module.params and module.params['monitorbindings'] is not None:
for binding in module.params['monitorbindings']:
readwrite_attrs = [
'monitorname',
'servicegroupname',
'weight',
]
readonly_attrs = []
attribute_values_dict = copy.deepcopy(binding)
attribute_values_dict['servicegroupname'] = module.params['servicegroupname']
binding_proxy = ConfigProxy(
actual=lbmonitor_servicegroup_binding(),
client=client,
attribute_values_dict=attribute_values_dict,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
)
key = attribute_values_dict['monitorname']
bindings[key] = binding_proxy
return bindings
def get_actual_monitor_bindings(client, module):
log('Entering get_actual_monitor_bindings')
bindings = {}
try:
# count() raises nitro exception instead of returning 0
count = servicegroup_lbmonitor_binding.count(client, module.params['servicegroupname'])
except nitro_exception as e:
if e.errorcode == 258:
return bindings
else:
raise
if count == 0:
return bindings
# Fallthrough to rest of execution
for binding in servicegroup_lbmonitor_binding.get(client, module.params['servicegroupname']):
log('Gettign actual monitor with name %s' % binding.monitor_name)
key = binding.monitor_name
bindings[key] = binding
return bindings
def monitor_bindings_identical(client, module):
log('Entering monitor_bindings_identical')
configured_bindings = get_configured_monitor_bindings(client, module)
actual_bindings = get_actual_monitor_bindings(client, module)
configured_key_set = set(configured_bindings.keys())
actual_key_set = set(actual_bindings.keys())
symmetrical_diff = configured_key_set ^ actual_key_set
for default_monitor in ('tcp-default', 'ping-default'):
if default_monitor in symmetrical_diff:
log('Excluding %s monitor from key comparison' % default_monitor)
symmetrical_diff.remove(default_monitor)
if len(symmetrical_diff) > 0:
return False
# Compare key to key
for key in configured_key_set:
configured_proxy = configured_bindings[key]
log('configured_proxy %s' % [configured_proxy.monitorname, configured_proxy.servicegroupname, configured_proxy.weight])
log('actual_bindings %s' % [actual_bindings[key].monitor_name, actual_bindings[key].servicegroupname, actual_bindings[key].weight])
if any([configured_proxy.monitorname != actual_bindings[key].monitor_name,
configured_proxy.servicegroupname != actual_bindings[key].servicegroupname,
configured_proxy.weight != float(actual_bindings[key].weight)]):
return False
# Fallthrought to success
return True
def sync_monitor_bindings(client, module):
log('Entering sync_monitor_bindings')
# Delete existing bindings
for binding in get_actual_monitor_bindings(client, module).values():
b = lbmonitor_servicegroup_binding()
b.monitorname = binding.monitor_name
b.servicegroupname = module.params['servicegroupname']
# Cannot remove default monitor bindings
if b.monitorname in ('tcp-default', 'ping-default'):
continue
lbmonitor_servicegroup_binding.delete(client, b)
# Apply configured bindings
for binding in get_configured_monitor_bindings(client, module).values():
log('Adding %s' % binding.monitorname)
binding.add()
def diff(client, module, servicegroup_proxy):
servicegroup_list = servicegroup.get_filtered(client, 'servicegroupname:%s' % module.params['servicegroupname'])
diff_object = servicegroup_proxy.diff_object(servicegroup_list[0])
return diff_object
def do_state_change(client, module, servicegroup_proxy):
if module.params['disabled']:
log('Disabling service')
result = servicegroup.disable(client, servicegroup_proxy.actual)
else:
log('Enabling service')
result = servicegroup.enable(client, servicegroup_proxy.actual)
return result
def main():
module_specific_arguments = dict(
servicegroupname=dict(type='str'),
servicetype=dict(
type='str',
choices=[
'HTTP',
'FTP',
'TCP',
'UDP',
'SSL',
'SSL_BRIDGE',
'SSL_TCP',
'DTLS',
'NNTP',
'RPCSVR',
'DNS',
'ADNS',
'SNMP',
'RTSP',
'DHCPRA',
'ANY',
'SIP_UDP',
'SIP_TCP',
'SIP_SSL',
'DNS_TCP',
'ADNS_TCP',
'MYSQL',
'MSSQL',
'ORACLE',
'RADIUS',
'RADIUSListener',
'RDP',
'DIAMETER',
'SSL_DIAMETER',
'TFTP',
'SMPP',
'PPTP',
'GRE',
'SYSLOGTCP',
'SYSLOGUDP',
'FIX',
'SSL_FIX',
]
),
cachetype=dict(
type='str',
choices=[
'TRANSPARENT',
'REVERSE',
'FORWARD',
]
),
maxclient=dict(type='float'),
maxreq=dict(type='float'),
cacheable=dict(type='bool'),
cip=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
cipheader=dict(type='str'),
usip=dict(type='bool'),
pathmonitor=dict(type='bool'),
pathmonitorindv=dict(type='bool'),
useproxyport=dict(type='bool'),
healthmonitor=dict(type='bool'),
sp=dict(type='bool'),
rtspsessionidremap=dict(type='bool'),
clttimeout=dict(type='float'),
svrtimeout=dict(type='float'),
cka=dict(type='bool'),
tcpb=dict(type='bool'),
cmp=dict(type='bool'),
maxbandwidth=dict(type='float'),
monthreshold=dict(type='float'),
downstateflush=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
tcpprofilename=dict(type='str'),
httpprofilename=dict(type='str'),
comment=dict(type='str'),
appflowlog=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
netprofile=dict(type='str'),
autoscale=dict(
type='str',
choices=[
'DISABLED',
'DNS',
'POLICY',
]
),
memberport=dict(type='int'),
graceful=dict(type='bool'),
)
hand_inserted_arguments = dict(
servicemembers=dict(type='list'),
monitorbindings=dict(type='list'),
disabled=dict(
type='bool',
default=False,
),
)
argument_spec = dict()
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
argument_spec.update(hand_inserted_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module_result = dict(
changed=False,
failed=False,
loglines=loglines,
)
# Fail the module if imports failed
if not PYTHON_SDK_IMPORTED:
module.fail_json(msg='Could not load nitro python sdk')
# Fallthrough to rest of execution
client = get_nitro_client(module)
try:
client.login()
except nitro_exception as e:
msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg)
except Exception as e:
if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>":
module.fail_json(msg='Connection error %s' % str(e))
elif str(type(e)) == "<class 'requests.exceptions.SSLError'>":
module.fail_json(msg='SSL Error %s' % str(e))
else:
module.fail_json(msg='Unexpected error during login %s' % str(e))
# Instantiate service group configuration object
readwrite_attrs = [
'servicegroupname',
'servicetype',
'cachetype',
'maxclient',
'maxreq',
'cacheable',
'cip',
'cipheader',
'usip',
'pathmonitor',
'pathmonitorindv',
'useproxyport',
'healthmonitor',
'sp',
'rtspsessionidremap',
'clttimeout',
'svrtimeout',
'cka',
'tcpb',
'cmp',
'maxbandwidth',
'monthreshold',
'downstateflush',
'tcpprofilename',
'httpprofilename',
'comment',
'appflowlog',
'netprofile',
'autoscale',
'memberport',
'graceful',
]
readonly_attrs = [
'numofconnections',
'serviceconftype',
'value',
'svrstate',
'ip',
'monstatcode',
'monstatparam1',
'monstatparam2',
'monstatparam3',
'statechangetimemsec',
'stateupdatereason',
'clmonowner',
'clmonview',
'groupcount',
'riseapbrstatsmsgcode2',
'serviceipstr',
'servicegroupeffectivestate'
]
immutable_attrs = [
'servicegroupname',
'servicetype',
'cachetype',
'td',
'cipheader',
'state',
'autoscale',
'memberport',
'servername',
'port',
'serverid',
'monitor_name_svc',
'dup_weight',
'riseapbrstatsmsgcode',
'delay',
'graceful',
'includemembers',
'newname',
]
transforms = {
'pathmonitorindv': ['bool_yes_no'],
'cacheable': ['bool_yes_no'],
'cka': ['bool_yes_no'],
'pathmonitor': ['bool_yes_no'],
'tcpb': ['bool_yes_no'],
'sp': ['bool_on_off'],
'usip': ['bool_yes_no'],
'healthmonitor': ['bool_yes_no'],
'useproxyport': ['bool_yes_no'],
'rtspsessionidremap': ['bool_on_off'],
'graceful': ['bool_yes_no'],
'cmp': ['bool_yes_no'],
'cip': [lambda v: v.upper()],
'downstateflush': [lambda v: v.upper()],
'appflowlog': [lambda v: v.upper()],
}
# Instantiate config proxy
servicegroup_proxy = ConfigProxy(
actual=servicegroup(),
client=client,
attribute_values_dict=module.params,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
immutable_attrs=immutable_attrs,
transforms=transforms,
)
try:
if module.params['state'] == 'present':
log('Applying actions for state present')
if not servicegroup_exists(client, module):
if not module.check_mode:
log('Adding service group')
servicegroup_proxy.add()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
elif not servicegroup_identical(client, module, servicegroup_proxy):
# Check if we try to change value of immutable attributes
diff_dict = diff(client, module, servicegroup_proxy)
immutables_changed = get_immutables_intersection(servicegroup_proxy, diff_dict.keys())
if immutables_changed != []:
msg = 'Cannot update immutable attributes %s. Must delete and recreate entity.' % (immutables_changed,)
module.fail_json(msg=msg, diff=diff_dict, **module_result)
if not module.check_mode:
servicegroup_proxy.update()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Check bindings
if not monitor_bindings_identical(client, module):
if not module.check_mode:
sync_monitor_bindings(client, module)
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
if not servicemembers_identical(client, module):
if not module.check_mode:
sync_service_members(client, module)
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
if not module.check_mode:
res = do_state_change(client, module, servicegroup_proxy)
if res.errorcode != 0:
msg = 'Error when setting disabled state. errorcode: %s message: %s' % (res.errorcode, res.message)
module.fail_json(msg=msg, **module_result)
# Sanity check for state
if not module.check_mode:
log('Sanity checks for state present')
if not servicegroup_exists(client, module):
module.fail_json(msg='Service group is not present', **module_result)
if not servicegroup_identical(client, module, servicegroup_proxy):
module.fail_json(
msg='Service group is not identical to configuration',
diff=diff(client, module, servicegroup_proxy),
**module_result
)
if not servicemembers_identical(client, module):
module.fail_json(msg='Service group members differ from configuration', **module_result)
if not monitor_bindings_identical(client, module):
module.fail_json(msg='Monitor bindings are not identical', **module_result)
elif module.params['state'] == 'absent':
log('Applying actions for state absent')
if servicegroup_exists(client, module):
if not module.check_mode:
servicegroup_proxy.delete()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
if not module.check_mode:
log('Sanity checks for state absent')
if servicegroup_exists(client, module):
module.fail_json(msg='Service group is present', **module_result)
except nitro_exception as e:
msg = "nitro exception errorcode=" + str(e.errorcode) + ",message=" + e.message
module.fail_json(msg=msg, **module_result)
client.logout()
module.exit_json(**module_result)
if __name__ == "__main__":
main()
| 1.898438 | 2 |
imperative/python/test/unit/functional/test_loss.py | kxz18/MegEngine | 3 | 12758180 | # -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import numpy as np
import pytest
import megengine.functional as F
from megengine import tensor
def test_cross_entropy_with_logits():
data = tensor([[0, 50], [0, -150]]).astype(np.float32)
label = tensor([1, 0]).astype(np.int32)
loss = F.nn.cross_entropy(data, label)
np.testing.assert_allclose(loss.numpy(), 0.0)
label = tensor([0, 1]).astype(np.int32)
loss = F.nn.cross_entropy(data, label)
np.testing.assert_allclose(loss.numpy(), 100)
label = np.array([1, 0])
loss = F.nn.cross_entropy(data, label)
np.testing.assert_allclose(loss.numpy(), 0.0)
def test_cross_entropy():
def softmax(x):
x = np.exp(x)
x /= x.sum(1, keepdims=True)
return x
def ref(x, y):
return np.mean([-np.log(x[i, y[i]]) for i in range(len(y))])
x = (np.random.rand(5, 10) - 0.5) * 4
y = np.random.randint(10, size=(5,))
for i in range(len(x)):
x[i, y[i]] += np.random.rand() * 2
x = softmax(x)
l_ref = ref(x, y)
l = F.nn.cross_entropy(tensor(x, "float32"), tensor(y, "int32"), with_logits=False)
np.testing.assert_allclose(l.numpy(), l_ref)
def test_cross_entropy_reduction():
logits = np.random.randn(16, 10)
label = np.random.randint(10, size=[16])
logits = tensor(logits, dtype="float32")
label = tensor(label, dtype="int32")
perm = np.random.permutation(16)
logits_perm = tensor(logits[perm], dtype="float32")
label_perm = tensor(label[perm], dtype="int32")
loss = F.nn.cross_entropy(logits, label, reduction="none")
loss_perm = F.nn.cross_entropy(logits_perm, label_perm, reduction="none")
np.testing.assert_allclose(loss.numpy()[perm], loss_perm.numpy())
loss_sum = F.nn.cross_entropy(logits, label, reduction="sum")
np.testing.assert_allclose(loss.numpy().sum(), loss_sum.numpy(), rtol=2e-7)
loss_mean = F.nn.cross_entropy(logits, label, reduction="mean")
np.testing.assert_allclose(loss_mean.numpy(), loss_sum.numpy() / 16)
loss_ls = F.nn.cross_entropy(logits, label, reduction="mean", label_smooth=0.1)
loss_ls_none_reduce = F.nn.cross_entropy(
logits, label, reduction="none", label_smooth=0.1
)
np.testing.assert_allclose(
loss_ls.numpy(), loss_ls_none_reduce.numpy().mean(), rtol=2e-7
)
with pytest.raises(ValueError):
F.nn.cross_entropy(logits, label, reduction="MEAN")
with pytest.raises(ValueError):
F.nn.cross_entropy(logits, label, reduction="max")
| 2.375 | 2 |
1-Neonatal Jaunice Detection/Scripts/hack_1.py | AshishSardana/make-a-thon | 9 | 12758181 | <filename>1-Neonatal Jaunice Detection/Scripts/hack_1.py
import numpy as np
import cv2
from matplotlib import pyplot as plt
x = np.random.randint(25,100,25)
y = np.random.randint(175,255,25)
z = np.hstack((x,y))
z = z.reshape((50,1))
z = np.float32(z)
plt.hist(z,256,[0,256]),plt.show()
# Define criteria = ( type, max_iter = 10 , epsilon = 1.0 )
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
# Set flags (Just to avoid line break in the code)
flags = cv2.KMEANS_RANDOM_CENTERS
# Apply KMeans
compactness,labels,centers = cv2.kmeans(z,2, criteria,10,flags)
A = z[labels==0]
B = z[labels==1]
| 2.640625 | 3 |
config/default.py | by46/fasky | 0 | 12758182 | <reponame>by46/fasky
HTTP_HOST = ''
HTTP_PORT = 8080
FLASKY_MAIL_SUBJECT_PREFIX = "(Info)"
FLASKY_MAIL_SENDER = '<EMAIL>'
FLASKY_ADMIN = '<EMAIL>'
SECRET_KEY = <KEY>"
LOG = "/var/flasky"
# WSGI Settings
WSGI_LOG = 'default'
# Flask-Log Settings
LOG_LEVEL = 'debug'
LOG_FILENAME = "logs/error.log"
LOG_BACKUP_COUNT = 10
LOG_MAX_BYTE = 1024 * 1024 * 10
LOG_FORMATTER = '%(asctime)s - %(levelname)s - %(message)s'
LOG_ENABLE_CONSOLE = True
# Flask-Mail settings
MAIL_SERVER = 'smtp.126.com'
MAIL_PORT = 25
MAIL_USE_TLS = False
MAIL_USE_SSL = False
MAIL_USERNAME = '<EMAIL>'
MAIL_PASSWORD = '<PASSWORD>$'
| 1.765625 | 2 |
dup/tasks.py | almazkun/dup | 0 | 12758183 | from dup.models import Website
def check_all_websites():
for website in Website.objects.all():
check_website(website.pk)
def check_website(website_pk: int) -> None:
website = Website.objects.get(pk=website_pk)
website.update_status()
| 2.15625 | 2 |
lldb/test/API/functionalities/module_cache/debug_index/TestDebugIndexCache.py | LaudateCorpus1/llvm-project | 605 | 12758184 | <filename>lldb/test/API/functionalities/module_cache/debug_index/TestDebugIndexCache.py
import glob
import json
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
import os
import time
class DebugIndexCacheTestcase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Set the lldb module cache directory to a directory inside the build
# artifacts directory so no other tests are interfered with.
self.cache_dir = os.path.join(self.getBuildDir(), 'lldb-module-cache')
def get_module_cache_files(self, basename):
module_cache_glob = os.path.join(self.cache_dir,
"llvmcache-*%s*dwarf-index*" % (basename))
return glob.glob(module_cache_glob)
def get_stats(self, log_path=None):
"""
Get the output of the "statistics dump" and return the JSON as a
python dictionary.
"""
# If log_path is set, open the path and emit the output of the command
# for debugging purposes.
if log_path is not None:
f = open(log_path, 'w')
else:
f = None
return_obj = lldb.SBCommandReturnObject()
command = "statistics dump "
if f:
f.write('(lldb) %s\n' % (command))
self.ci.HandleCommand(command, return_obj, False)
metrics_json = return_obj.GetOutput()
if f:
f.write(metrics_json)
return json.loads(metrics_json)
def enable_lldb_index_cache(self):
self.runCmd('settings set symbols.lldb-index-cache-path "%s"' % (self.cache_dir))
self.runCmd('settings set symbols.enable-lldb-index-cache true')
@no_debug_info_test
def test_with_caching_enabled(self):
"""
Test module cache functionality for debug info index caching.
We test that a debug info index file is created for the debug
information when caching is enabled with a file that contains
at least one of each kind of DIE in ManualDWARFIndex::IndexSet.
The input file has DWARF that will fill in every member of the
ManualDWARFIndex::IndexSet class to ensure we can encode all of the
required information.
With caching enabled, we also verify that the appropriate statistics
specify that the cache file was saved to the cache.
"""
self.enable_lldb_index_cache()
src_dir = self.getSourceDir()
yaml_path = os.path.join(src_dir, "exe.yaml")
yaml_base, ext = os.path.splitext(yaml_path)
obj_path = self.getBuildArtifact("main.o")
self.yaml2obj(yaml_path, obj_path)
# Create a target with the object file we just created from YAML
target = self.dbg.CreateTarget(obj_path)
self.assertTrue(target, VALID_TARGET)
debug_index_cache_files = self.get_module_cache_files('main.o')
self.assertEqual(len(debug_index_cache_files), 1,
"make sure there is one file in the module cache directory (%s) for main.o that is a debug info cache" % (self.cache_dir))
# Verify that the module statistics have the information that specifies
# if we loaded or saved the debug index and symtab to the cache
stats = self.get_stats()
module_stats = stats['modules'][0]
self.assertFalse(module_stats['debugInfoIndexLoadedFromCache'])
self.assertTrue(module_stats['debugInfoIndexSavedToCache'])
self.assertFalse(module_stats['symbolTableLoadedFromCache'])
self.assertTrue(module_stats['symbolTableSavedToCache'])
# Verify the top level stats track how many things were loaded or saved
# to the cache.
self.assertEqual(stats["totalDebugInfoIndexLoadedFromCache"], 0)
self.assertEqual(stats["totalDebugInfoIndexSavedToCache"], 1)
self.assertEqual(stats["totalSymbolTablesLoadedFromCache"], 0)
self.assertEqual(stats["totalSymbolTablesSavedToCache"], 1)
@no_debug_info_test
def test_with_caching_disabled(self):
"""
Test module cache functionality for debug info index caching.
We test that a debug info index file is not created for the debug
information when caching is disabled with a file that contains
at least one of each kind of DIE in ManualDWARFIndex::IndexSet.
The input file has DWARF that will fill in every member of the
ManualDWARFIndex::IndexSet class to ensure we can encode all of the
required information.
With caching disabled, we also verify that the appropriate
statistics specify that the cache file was not saved to the cache.
"""
src_dir = self.getSourceDir()
yaml_path = os.path.join(src_dir, "exe.yaml")
yaml_base, ext = os.path.splitext(yaml_path)
obj_path = self.getBuildArtifact("main.o")
self.yaml2obj(yaml_path, obj_path)
# Create a target with the object file we just created from YAML
target = self.dbg.CreateTarget(obj_path)
self.assertTrue(target, VALID_TARGET)
debug_index_cache_files = self.get_module_cache_files('main.o')
self.assertEqual(len(debug_index_cache_files), 0,
"make sure there is no file in the module cache directory (%s) for main.o that is a debug info cache" % (self.cache_dir))
# Verify that the module statistics have the information that specifies
# if we loaded or saved the debug index and symtab to the cache
stats = self.get_stats()
module_stats = stats['modules'][0]
self.assertFalse(module_stats['debugInfoIndexLoadedFromCache'])
self.assertFalse(module_stats['debugInfoIndexSavedToCache'])
self.assertFalse(module_stats['symbolTableLoadedFromCache'])
self.assertFalse(module_stats['symbolTableSavedToCache'])
# Verify the top level stats track how many things were loaded or saved
# to the cache.
self.assertEqual(stats["totalDebugInfoIndexLoadedFromCache"], 0)
self.assertEqual(stats["totalDebugInfoIndexSavedToCache"], 0)
self.assertEqual(stats["totalSymbolTablesLoadedFromCache"], 0)
self.assertEqual(stats["totalSymbolTablesSavedToCache"], 0)
| 1.992188 | 2 |
tests/parsing.py | markfoodyburton/PySysC | 0 | 12758185 | #
# Copyright (c) 2019 -2021 MINRES Technolgies GmbH
#
# SPDX-License-Identifier: Apache-2.0
#
from cppyy_backend._cppyy_generator import CppyyGenerator
from clang.cindex import Config
from pprint import pprint
import glob
import os.path
proj_dir='../../PySysC-SC/components'
flags=['-I/home/eyck/.conan/data/SystemC/2.3.2/minres/stable/package/672f3350f4be793d25afa19a6a77085e20d01ea5/include',
'-I'+proj_dir,
'-fvisibility=hidden',
'-D__PIC__',
'-Wno-macro-redefined',
'-std=c++11']
Config.set_library_file('/usr/lib/x86_64-linux-gnu/libclang-6.0.so')
lib = Config().lib
import ctypes
from clang.cindex import Type
items = [
("clang_Type_getNumTemplateArguments", [Type], ctypes.c_size_t),
]
for item in items:
func = getattr(lib, item[0])
if len(item) >= 2:
func.argtypes = item[1]
if len(item) >= 3:
func.restype = item[2]
if len(item) == 4:
func.errcheck = item[3]
g = CppyyGenerator(flags, dump_modules=True, dump_items=True, dump_includes=False, dump_privates=True, verbose=True)
mapping = g.create_mapping([os.path.join(proj_dir, 'initiator.h')])
pprint(mapping) | 1.78125 | 2 |
vendor/Twisted-10.0.0/doc/core/examples/threadedselect/Cocoa/SimpleWebClient/Twistzilla.py | bopopescu/cc-2 | 19 | 12758186 | <gh_stars>10-100
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
# import needed classes/functions from Cocoa
from Foundation import *
from AppKit import *
# import Nib loading functionality from AppKit
from PyObjCTools import NibClassBuilder, AppHelper
from twisted.internet import _threadedselect
_threadedselect.install()
from twisted.internet import reactor, protocol
from twisted.web import http
from twisted.python import log
import sys, urlparse
# create ObjC classes as defined in MainMenu.nib
NibClassBuilder.extractClasses("MainMenu")
class TwistzillaClient(http.HTTPClient):
def __init__(self, delegate, urls):
self.urls = urls
self.delegate = delegate
def connectionMade(self):
self.sendCommand('GET', str(self.urls[2]))
self.sendHeader('Host', '%s:%d' % (self.urls[0], self.urls[1]))
self.sendHeader('User-Agent', 'CocoaTwistzilla')
self.endHeaders()
def handleResponse(self, data):
self.delegate.gotResponse_(data)
class MyAppDelegate(NibClassBuilder.AutoBaseClass):
def gotResponse_(self, html):
s = self.resultTextField.textStorage()
s.replaceCharactersInRange_withString_((0, s.length()), html)
self.progressIndicator.stopAnimation_(self)
def doTwistzillaFetch_(self, sender):
s = self.resultTextField.textStorage()
s.deleteCharactersInRange_((0, s.length()))
self.progressIndicator.startAnimation_(self)
u = urlparse.urlparse(self.messageTextField.stringValue())
pos = u[1].find(':')
if pos == -1:
host, port = u[1], 80
else:
host, port = u[1][:pos], int(u[1][pos+1:])
if u[2] == '':
fname = '/'
else:
fname = u[2]
host = host.encode('utf8')
fname = fname.encode('utf8')
protocol.ClientCreator(reactor, TwistzillaClient, self, (host, port, fname)).connectTCP(host, port).addErrback(lambda f:self.gotResponse_(f.getBriefTraceback()))
def applicationDidFinishLaunching_(self, aNotification):
"""
Invoked by NSApplication once the app is done launching and
immediately before the first pass through the main event
loop.
"""
self.messageTextField.setStringValue_("http://www.twistedmatrix.com/")
reactor.interleave(AppHelper.callAfter)
def applicationShouldTerminate_(self, sender):
if reactor.running:
reactor.addSystemEventTrigger(
'after', 'shutdown', AppHelper.stopEventLoop)
reactor.stop()
return False
return True
if __name__ == '__main__':
log.startLogging(sys.stdout)
AppHelper.runEventLoop()
| 1.875 | 2 |
PyDarkLogic/MainDarkLogic/action.py | BlackWalker01/BlackLogic | 6 | 12758187 | <filename>PyDarkLogic/MainDarkLogic/action.py<gh_stars>1-10
class Action:
def __init__(self, fun, id=None):
self._fun = fun
self._id = id
def fun(self):
return self._fun
def id(self):
return self._id
| 1.992188 | 2 |
semantic_segmentation/main.py | pedrohtg/pytorch | 205 | 12758188 | <filename>semantic_segmentation/main.py
from argparse import ArgumentParser
import os
import random
from matplotlib import pyplot as plt
import torch
from torch import optim
from torch import nn
from torch.nn import functional as F
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision import models
from torchvision.utils import save_image
from data import CityscapesDataset, num_classes, full_to_colour, train_to_full
from model import FeatureResNet, SegResNet
# Setup
parser = ArgumentParser(description='Semantic segmentation')
parser.add_argument('--seed', type=int, default=42, help='Random seed')
parser.add_argument('--workers', type=int, default=8, help='Data loader workers')
parser.add_argument('--epochs', type=int, default=100, help='Training epochs')
parser.add_argument('--crop-size', type=int, default=512, help='Training crop size')
parser.add_argument('--lr', type=float, default=5e-5, help='Learning rate')
parser.add_argument('--momentum', type=float, default=0, help='Momentum')
parser.add_argument('--weight-decay', type=float, default=2e-4, help='Weight decay')
parser.add_argument('--batch-size', type=int, default=16, help='Batch size')
args = parser.parse_args()
random.seed(args.seed)
torch.manual_seed(args.seed)
if not os.path.exists('results'):
os.makedirs('results')
plt.switch_backend('agg') # Allow plotting when running remotely
# Data
train_dataset = CityscapesDataset(split='train', crop=args.crop_size, flip=True)
val_dataset = CityscapesDataset(split='val')
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.workers, pin_memory=True)
val_loader = DataLoader(val_dataset, batch_size=1, num_workers=args.workers, pin_memory=True)
# Training/Testing
pretrained_net = FeatureResNet()
pretrained_net.load_state_dict(models.resnet34(pretrained=True).state_dict())
net = SegResNet(num_classes, pretrained_net).cuda()
crit = nn.BCELoss().cuda()
# Construct optimiser
params_dict = dict(net.named_parameters())
params = []
for key, value in params_dict.items():
if 'bn' in key:
# No weight decay on batch norm
params += [{'params': [value], 'weight_decay': 0}]
elif '.bias' in key:
# No weight decay plus double learning rate on biases
params += [{'params': [value], 'lr': 2 * args.lr, 'weight_decay': 0}]
else:
params += [{'params': [value]}]
optimiser = optim.RMSprop(params, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)
scores, mean_scores = [], []
def train(e):
net.train()
for i, (input, target, _) in enumerate(train_loader):
optimiser.zero_grad()
input, target = Variable(input.cuda(async=True)), Variable(target.cuda(async=True))
output = F.sigmoid(net(input))
loss = crit(output, target)
print(e, i, loss.data[0])
loss.backward()
optimiser.step()
# Calculates class intersections over unions
def iou(pred, target):
ious = []
# Ignore IoU for background class
for cls in range(num_classes - 1):
pred_inds = pred == cls
target_inds = target == cls
intersection = (pred_inds[target_inds]).long().sum().data.cpu()[0] # Cast to long to prevent overflows
union = pred_inds.long().sum().data.cpu()[0] + target_inds.long().sum().data.cpu()[0] - intersection
if union == 0:
ious.append(float('nan')) # If there is no ground truth, do not include in evaluation
else:
ious.append(intersection / max(union, 1))
return ious
def test(e):
net.eval()
total_ious = []
for i, (input, _, target) in enumerate(val_loader):
input, target = Variable(input.cuda(async=True), volatile=True), Variable(target.cuda(async=True), volatile=True)
output = F.log_softmax(net(input))
b, _, h, w = output.size()
pred = output.permute(0, 2, 3, 1).contiguous().view(-1, num_classes).max(1)[1].view(b, h, w)
total_ious.append(iou(pred, target))
# Save images
if i % 25 == 0:
pred = pred.data.cpu()
pred_remapped = pred.clone()
# Convert to full labels
for k, v in train_to_full.items():
pred_remapped[pred == k] = v
# Convert to colour image
pred = pred_remapped
pred_colour = torch.zeros(b, 3, h, w)
for k, v in full_to_colour.items():
pred_r = torch.zeros(b, 1, h, w)
pred_r[(pred == k)] = v[0]
pred_g = torch.zeros(b, 1, h, w)
pred_g[(pred == k)] = v[1]
pred_b = torch.zeros(b, 1, h, w)
pred_b[(pred == k)] = v[2]
pred_colour.add_(torch.cat((pred_r, pred_g, pred_b), 1))
save_image(pred_colour[0].float().div(255), os.path.join('results', str(e) + '_' + str(i) + '.png'))
# Calculate average IoU
total_ious = torch.Tensor(total_ious).transpose(0, 1)
ious = torch.Tensor(num_classes - 1)
for i, class_iou in enumerate(total_ious):
ious[i] = class_iou[class_iou == class_iou].mean() # Calculate mean, ignoring NaNs
print(ious, ious.mean())
scores.append(ious)
# Save weights and scores
torch.save(net.state_dict(), os.path.join('results', str(e) + '_net.pth'))
torch.save(scores, os.path.join('results', 'scores.pth'))
# Plot scores
mean_scores.append(ious.mean())
es = list(range(len(mean_scores)))
plt.plot(es, mean_scores, 'b-')
plt.xlabel('Epoch')
plt.ylabel('Mean IoU')
plt.savefig(os.path.join('results', 'ious.png'))
plt.close()
test(0)
for e in range(1, args.epochs + 1):
train(e)
test(e)
| 2.359375 | 2 |
src/installer/src/tortuga/events/types/node.py | sutasu/tortuga | 33 | 12758189 | # Copyright 2008-2018 Univa Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
from marshmallow import fields
from .base import BaseEventSchema, BaseEvent
class NodeStateChangedSchema(BaseEventSchema):
"""
Schema for the NodeStateChange events.
"""
node = fields.Dict()
previous_state = fields.String()
class NodeStateChanged(BaseEvent):
"""
Event that fires when a node state changes.
"""
name = 'node-state-changed'
schema_class = NodeStateChangedSchema
def __init__(self, **kwargs):
"""
Initializer.
:param str node: the current state of the node affected
:param dict request: the previous state
:param kwargs:
"""
super().__init__(**kwargs)
self.node: dict = kwargs.get('node', {})
self.previous_state: str = kwargs.get('previous_state', None)
class NodeTagsChangedSchema(BaseEventSchema):
"""
Schema for the NodeTagsChanged events.
"""
node_id = fields.String()
node_name = fields.String()
tags = fields.Dict()
previous_tags = fields.Dict()
class NodeTagsChanged(BaseEvent):
"""
Event that fires when a node tags are changed.
"""
name = 'node-tags-changed'
schema_class = NodeTagsChangedSchema
def __init__(self, **kwargs):
"""
Initializer.
:param dict node: the current state of the nodeprofile
:param dict previous_tags: the previous version of the tags for the
node
:param kwargs:
"""
super().__init__(**kwargs)
self.node_id: str = kwargs.get('node_id', None)
self.node_name: str = kwargs.get('node_name', None)
self.tags: Dict[str, str] = kwargs.get('tags', {})
self.previous_tags: Dict[str, str] = kwargs.get('previous_tags', {})
| 2.296875 | 2 |
tests/test_core/test_table.py | dinojugosloven/pymalcolm | 0 | 12758190 | <reponame>dinojugosloven/pymalcolm
import unittest
from collections import OrderedDict
import numpy
from annotypes import Anno, Array
from malcolm.core import Table
with Anno("Row A"):
AA = Array[str]
with Anno("Row B"):
AB = Array[int]
class MyTable(Table):
def __init__(self, a, b):
# type: (AA, AB) -> None
self.a = a
self.b = b
class TestTable(unittest.TestCase):
def setUp(self):
self.t = MyTable(AA(["x", "y", "z"]), AB([1, 2, 3]))
self.serialized = OrderedDict()
self.serialized["typeid"] = 'malcolm:core/Table:1.0'
self.serialized["a"] = ["x", "y", "z"]
self.serialized["b"] = [1, 2, 3]
def test_init(self):
t = Table()
assert not t.call_types
def test_from_rows(self):
x = MyTable.from_rows([
["x", 1],
["y", 2],
["z", 3]
])
assert x.to_dict() == self.serialized
def test_rows(self):
assert list(self.t.rows()) == [
["x", 1],
["y", 2],
["z", 3]
]
def test_getitem(self):
assert self.t[1] == ["y", 2]
def test_roundtrip(self):
assert MyTable.from_dict(self.serialized).to_dict() == self.serialized
def test_equal(self):
t2 = MyTable(AA(["x", "y", "z"]), AB(numpy.arange(3) + 1))
assert self.t == t2
def test_not_equal(self):
t2 = MyTable(AA(["x", "y", "z"]), AB(numpy.arange(3)))
assert self.t != t2
| 2.671875 | 3 |
autorest/python/emsapi/models/adi_ems_web_api_v2_model_analytic_query_result.py | ge-flight-analytics/ems-api-wrappers | 2 | 12758191 | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AdiEmsWebApiV2ModelAnalyticQueryResult(Model):
"""Represents the time-series data result of an analytic query.
:param offsets: An array of query result offsets, each representing
seconds from the start of the data
:type offsets: list[float]
:param results: An array of analytic result values for each of the
analytics selected in the query containing values for
each offset
:type results:
list[~emsapi.models.AdiEmsWebApiV2ModelAnalyticAnalyticResult]
"""
_validation = {
'offsets': {'required': True},
'results': {'required': True},
}
_attribute_map = {
'offsets': {'key': 'offsets', 'type': '[float]'},
'results': {'key': 'results', 'type': '[AdiEmsWebApiV2ModelAnalyticAnalyticResult]'},
}
def __init__(self, offsets, results):
super(AdiEmsWebApiV2ModelAnalyticQueryResult, self).__init__()
self.offsets = offsets
self.results = results
| 2.140625 | 2 |
archive_for_wyko/Pyro-3.6/examples/BankExample/BankServer.py | ArcetriAdaptiveOptics/plico_interferometer_server | 0 | 12758192 | <gh_stars>0
#! /usr/bin/env python
#
# The banks server
#
import sys
import Pyro.naming
import Pyro.core
from Pyro.errors import PyroError,NamingError
import banks
group = ':banks1' # the namespace group for all test servers
# avoid network port trouble with the VSB bank server
Pyro.config.PYRO_PORT=Pyro.config.PYRO_PORT+1
# initialize the server and set the default namespace group
Pyro.core.initServer()
Pyro.config.PYRO_NS_DEFAULTGROUP=group
# locate the NS
print 'Searching Naming Service...'
daemon = Pyro.core.Daemon()
locator = Pyro.naming.NameServerLocator()
ns = locator.getNS()
# make sure our namespace group exists
try:
ns.createGroup(group)
except NamingError:
pass
daemon.useNameServer(ns)
# connect a new object implementation (first unregister previous one)
try:
ns.unregister('Rabobank')
ns.unregister('VSB')
except NamingError:
pass
# use Delegation approach for object implementation
obj1=Pyro.core.ObjBase()
obj1.delegateTo(banks.Rabobank())
daemon.connect(obj1,'Rabobank')
obj2=Pyro.core.ObjBase()
obj2.delegateTo(banks.VSB())
daemon.connect(obj2,'VSB')
# enter the service loop.
print 'Banks are ready for customers.'
daemon.requestLoop()
| 2.1875 | 2 |
Crypt0/Vigenere/VEnDecode.py | justinh5/CipherBox | 0 | 12758193 |
def encode(plaintext, key):
"""Encodes plaintext
Encode the message by shifting each character by the offset
of a character in the key.
"""
ciphertext = ""
i, j = 0, 0 # key, plaintext indices
# strip all non-alpha characters from key
key2 = ""
for x in key: key2 += x if x.isalpha() else ""
# shift each character
for x in plaintext:
if 97 <= ord(x) <= 122: # if character is alphabetic lowercase
ciphertext += chr(((ord(x) - 97) + (ord(key2[i].lower()) - 97)) % 26 + 97)
i += 1
elif 65 <= ord(x) <= 90: # if character is alphabetic uppercase
ciphertext += chr(((ord(x) - 65) + (ord(key2[i].upper()) - 65)) % 26 + 65)
i += 1
else: # non-alphabetic characters do not change
ciphertext += x
j += 1
if i == len(key2):
i = 0
return ciphertext
def decode(ciphertext, key):
"""Decode ciphertext message with a key."""
plaintext = ""
i, j = 0, 0 # key, ciphertext indices
# strip all non-alpha characters from key
key2 = ""
for x in key: key2 += x if x.isalpha() else ""
# shift each character
for x in ciphertext:
if 97 <= ord(x) <= 122: # if character is alphabetic lowercase
plaintext += chr(((ord(x) - 97) - (ord(key2[i].lower()) - 97)) % 26 + 97)
i += 1
elif 65 <= ord(x) <= 90: # if character is alphabetic uppercase
plaintext += chr(((ord(x) - 65) - (ord(key2[i].upper()) - 65)) % 26 + 65)
i += 1
else: # non-alphabetic characters do not change
plaintext += x
j += 1
if i == len(key2):
i = 0
return plaintext
| 4.46875 | 4 |
similar_songs.py | joyfulflyer/billboard-song-converter | 0 | 12758194 | <filename>similar_songs.py
import itertools
import logging
import re
import string
import sys
import time
from collections import namedtuple
import db_retriever
import db_saver
import elastic
import entry_generators
ARTIST_WORDS_TO_IGNORE = ['feat', 'featuring', 'vs', 'ft', 'the']
STEP = 5000
TRANSFORMS = {re.compile(r' and '): '&', re.compile(r' with '): '&'}
logger = logging.getLogger(__name__)
def handle_close_songs(session,
skip_user_input=False,
limit=5000,
force_create_new_songs=False):
startTime = time.time()
for entries in entry_generators.entries_without_song_id_steps(
session, limit):
groupStartTime = time.time()
sorted_entries = sorted(entries,
key=lambda entry: (entry.name, entry.artist))
# Assume sorted
for _, group in itertools.groupby(sorted_entries,
key=lambda entry:
(entry.name, entry.artist)):
_handle_group(list(group),
session,
skip_user_input=skip_user_input,
force_create_new_songs=force_create_new_songs)
groupFinishTime = time.time()
diff = groupFinishTime - groupStartTime
logger.error(
f'Took {time.strftime("%H:%M:%S", time.gmtime(diff))} to correct {STEP} songs'
)
finalTime = time.time()
diff = finalTime - startTime
logger.error(
f'Took {time.strftime("%H:%M:%S", time.gmtime(diff))} to correct {limit} songs'
)
def _handle_group(group,
session,
skip_user_input=False,
force_create_new_songs=False):
entry = group[0]
_handle_potential_same_song(entry,
session,
bypass=skip_user_input,
force=force_create_new_songs)
for e in group:
e.song_id = entry.song_id
session.commit()
def _handle_potential_same_song(entry, session, bypass=False, force=False):
# Check to see if we have an exact match
# If we don't, check search and ask
# need to update search if alternate found - if the song id is the same
song_query = db_retriever.get_song_id_query_for(entry.name, entry.artist,
session)
song_count = song_query.count()
db_song = None
if song_count == 1:
db_song = song_query.first()
else:
results = elastic.search_name_artist(name=entry.name,
artist=entry.artist)
if results.count > 0:
first_result = results.result[0]
same = further_comparison_checks(entry, first_result)
if same:
songId = first_result.meta.id
db_song = namedtuple('Song', field_names=['id'])
db_song.id = int(songId)
else:
should_create_new = ''
if bypass:
return
if force:
should_create_new = ''
else:
should_create_new = _get_input_for_song(
entry, results.result)
if should_create_new.lower().strip() == 'n':
songId = first_result.meta.id
db_song = namedtuple('Song', field_names=['id'])
db_song.id = int(songId)
elif should_create_new.isnumeric():
db_song = namedtuple('Song', field_names=['id'])
db_song.id = int(should_create_new)
elif should_create_new.lower().strip() == 'q':
sys.exit(0)
else:
db_song = _create_db(entry.name, entry.artist, session)
else:
db_song = _create_db(entry.name, entry.artist, session)
entry.song_id = db_song.id
def further_comparison_checks(entry, search_result):
# transform
# compare
# transform more
# compare
comparison_object = SongComparison()
comparison_object.entry_name = entry.name
comparison_object.search_name = search_result.name[0]
comparison_object.entry_artist = entry.artist
comparison_object.search_artist = search_result.artist[0]
lower_cased = _strip_and_lower(comparison_object)
if lower_cased.compare():
return True
no_curly_brackets = lower_cased.transform_with(
lambda piece: _remove_curly_brackets(piece))
if no_curly_brackets.compare():
return True
no_punctuation = lower_cased.transform_with(
lambda f: _strip_punctuation(f))
if no_punctuation.compare():
return True
reduced = no_curly_brackets.transform_with(
lambda f: _strip_punctuation(f)).transform_with(
lambda piece: _remove_key_words_remove_spaces(piece))
if reduced.compare():
return True
transformed = no_curly_brackets \
.transform_with(_transform_name_artist) \
.transform_with(_strip_punctuation) \
.transform_with(_remove_key_words_remove_spaces)
if transformed.compare():
return True
return False
def _create_db(name, artist, session):
db_song = db_saver.create_song(name, artist, session)
elastic.create_searchable_from_song(db_song)
return db_song
def _strip_and_lower(song_comparison):
return song_comparison.transform_with(lambda piece: piece.strip().lower())
def _strip_punctuation(s):
return s.translate(str.maketrans('', '', string.punctuation))
def _remove_key_words_remove_spaces(s):
return ''.join(
filter(lambda x: x.lower() not in ARTIST_WORDS_TO_IGNORE,
s.split(' ')))
def _remove_curly_brackets(s):
return re.sub(r'\{[^}]+\}', '', s).strip()
def _log_entry_result(entry, first_result):
logger.error(
f"((\"{entry.name}\", \"{entry.artist}\"), (\"{first_result.name}\", \"{first_result.artist}\"), {first_result.meta.score}),"
)
pass
# This is probably not the most efficient way to do this
def _transform_name_artist(entry):
transformer = entry
# for each key, transform it in to value
for key, value in TRANSFORMS.items():
if re.match(key, transformer):
logger.error(f"Found match {key} for '{transformer}'")
transformer = re.sub(key, value, transformer)
return transformer
def _get_input_for_song(entry, results):
lines = [
"Entry: name= {:<55s} artist= {:<50s}\n".format(
entry.name, entry.artist)
]
for result in results:
lines.append(
"Result: name= {:<55s} artist= {:<50s} id= {:<10s}\n".format(
result.name[0], result.artist[0], result.meta.id))
lines.append(
f"Create new song? (y/n) no entry creates a song or a number uses that as an id, q to quit: "
)
return input(''.join(lines))
class SongComparison():
entry_name = ""
entry_artist = ""
search_name = ""
search_artist = ""
def compare(self):
return self.entry_name == self.search_name and self.entry_artist == self.search_artist
# Return new copy with func called on all fields
def transform_with(self, func):
new_comparison = SongComparison()
new_comparison.entry_name = func(self.entry_name)
new_comparison.entry_artist = func(self.entry_artist)
new_comparison.search_name = func(self.search_name)
new_comparison.search_artist = func(self.search_artist)
return new_comparison
if __name__ == '__main__':
import Session
logging.basicConfig(
level=logging.ERROR,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler("correcting songs.log"),
logging.StreamHandler()
])
session = Session.get_session()()
handle_close_songs(session, skip_user_input=False)
| 2.375 | 2 |
optic_store/patches/v0_6/add_sales_person_name_to_invoice.py | MdAlAmin-aol/optic_store | 1 | 12758195 | <filename>optic_store/patches/v0_6/add_sales_person_name_to_invoice.py
# -*- coding: utf-8 -*-
# Copyright (c) 2019, 9T9IT and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
def execute():
for doctype in ["Sales Order", "Sales Invoice"]:
if not frappe.db.exists(
"Custom Field", "{}-{}".format(doctype, "os_sales_person_name")
):
frappe.get_doc(
{
"doctype": "Custom Field",
"allow_on_submit": 1,
"dt": doctype,
"fetch_from": "os_sales_person.employee_name",
"fieldname": "os_sales_person_name",
"fieldtype": "Data",
"insert_after": "os_sales_person",
"label": "Sales Person Name",
"read_only": 1,
}
).insert()
for doc in frappe.get_all(
doctype, fields=["name", "os_sales_person", "os_sales_person_name"]
):
if doc.os_sales_person and not doc.os_sales_person_name:
frappe.db.set_value(
doctype,
doc.name,
"os_sales_person_name",
frappe.db.get_value(
"Employee", doc.os_sales_person, "employee_name"
),
)
| 1.882813 | 2 |
tests/test_util.py | csommelet/raw_tiles | 11 | 12758196 | <filename>tests/test_util.py
import unittest
class UtilTest(unittest.TestCase):
def test_bbox_for_tile(self):
from raw_tiles.util import bbox_for_tile, MERCATOR_WORLD_SIZE
c = 0.5 * MERCATOR_WORLD_SIZE
self.assertEquals((-c, -c, c, c), bbox_for_tile(0, 0, 0))
self.assertEquals((-c, 0, 0, c), bbox_for_tile(1, 0, 0))
self.assertEquals((-c, -c, 0, 0), bbox_for_tile(1, 0, 1))
self.assertEquals((0, 0, c, c), bbox_for_tile(1, 1, 0))
self.assertEquals((0, -c, c, 0), bbox_for_tile(1, 1, 1))
| 3.0625 | 3 |
Dedalo/offers/create_csv.py | josperdom1/AII | 0 | 12758197 | <reponame>josperdom1/AII
import csv
import os, os.path
offers_list = [ [266, "uni1" ,"empresa1", 3, 10000, "pais1", "provincia1", "ciudad1" , "decripcion1" , True, ["carrera1", "carrera2", "carrera2"]] ]
def offers_csv(offers):
if os.path.isfile('offers.csv'):
print ("File offers.csv updated")
else:
print ("File DO NOT exist, let's create offers.csv")
os.system("touch offers.csv")
with open('offers.csv', 'w', newline='') as file:
writer = csv.writer(file)
writer.writerow(["id", "University", "Enterprise", "Months", "Salary", "Country", "Province", "City", "Description", "Immediate"])
for o in offers:
writer.writerow([o[0], o[1], o[2], o[3], o[4], o[5], o[6], o[7], o[8], o[9]])
def degrees_csv(offers):
if os.path.isfile('degrees.csv'):
print ("File degrees.csv updating")
else:
print ("File DO NOT exist, lets create degrees.csv")
os.system("touch degrees.csv")
with open('degrees.csv', 'w', newline='') as file:
writer = csv.writer(file)
writer.writerow(["id", "idOferta", "Nombre"])
i = 0
for o in offers:
for d in o[10]:
i = i+1
writer.writerow([i, o[0], d]) | 3.3125 | 3 |
biostar/accounts/tasks.py | genomax/biostar-central | 0 | 12758198 | import logging
import inspect
import functools
from urllib.request import urlopen, Request
from functools import partial
import toml as hjson
import mistune
from django.conf import settings
from django.template import loader
from biostar.utils.decorators import task
#
# Do not use logging in tasks! Deadlocking may occur!
#
# https://github.com/unbit/uwsgi/issues/1369
def message(msg, level=0):
print(f"{msg}")
@task
def detect_location(ip, user_id):
"""
Fills the user location based on url.
"""
from biostar.accounts.models import Profile
msg = f"location check for \tid={user_id}\tip={ip}"
# The lookup needs to be turned on.
if not settings.LOCATION_LOOKUP:
message(f"skip {msg}")
return
message(f"execute {msg}")
# Get the profile for the user
profile = Profile.objects.filter(user__id=user_id).first()
# Skip value if it has the word unknown in it
def get(data, attr):
value = data.get(attr, '')
return "" if "unknown" in value.lower() else value.title()
# Check and log location.
if not profile.location:
try:
url = f"http://api.hostip.info/get_json.php?ip={ip}"
message(url)
message(f"{ip}, {profile.user}, {url}")
req = Request(url=url, headers={'User-Agent': 'Mozilla/5.0'})
resp = urlopen(req, timeout=3).read()
data = hjson.loads(resp)
city = get(data, "city")
country = get(data, "country_name")
location = city or country
msg = f"location result for \tid={user_id}\tip={ip}\tloc={location}"
if location:
Profile.objects.filter(user=profile.user).update(location=location)
message(f"updated profile {msg}")
else:
message(f"empty location {msg}")
except Exception as exc:
message(exc)
@task
def verification_email(user_id):
from biostar.accounts import auth, models
user = models.User.objects.filter(id=user_id).first()
auth.send_verification_email(user=user)
return
@task
def create_messages(template, user_ids, sender=None, extra_context={}):
"""
Create batch message from sender to a given recipient_list
"""
from biostar.accounts.models import User, Message, MessageBody
rec_list = User.objects.filter(id__in=user_ids)
# Get the sender
name, email = settings.ADMINS[0]
sender = sender or User.objects.filter(email=email).first() or User.objects.filter(is_superuser=True).first()
# Load the template and context
tmpl = loader.get_template(template_name=template)
context = dict(sender=sender)
context.update(extra_context)
body = tmpl.render(context)
html = mistune.markdown(body, escape=False)
for rec in rec_list:
body = MessageBody.objects.create(body=body, html=html)
Message.objects.create(sender=sender, recipient=rec, body=body)
| 1.9375 | 2 |
app-sg/result_page.py | AleksanderZawisza/Human-Emotion-Classification-Kit | 1 | 12758199 | <reponame>AleksanderZawisza/Human-Emotion-Classification-Kit
import PySimpleGUI as sg
from PIL import Image
from io import BytesIO
import os
from utils import list_all_pictures, create_result_text, create_result_text_folder
def result_layout():
col_l = sg.Column([[sg.Frame('Prediction results', [[sg.Text(key="-PREDICTION RESULTS-",
background_color='white',
auto_size_text=True,
expand_y=True,
expand_x=True)]],
border_width=0, pad=(0, 0), size=(200, 400),
element_justification='center')]])
col_r = sg.Column([[sg.Frame('Image preview', [[sg.Image(key="-IMAGE RESULT-")]],
size=(600, 400), border_width=0, pad=(0, 0),
element_justification='center')]])
layout = [[sg.Frame('Choose image/folder from results:',
[[sg.DropDown([''], background_color='#e3e3e3', key='-FOLDERPIC DROPDOWN-',
auto_size_text=True, expand_x=True, readonly=True, text_color='black',
enable_events=True, )],
[sg.Text('Choose image from folder chosen above:')],
[sg.DropDown(values=[],
background_color='#e3e3e3', key='-PIC DROPDOWN-',
auto_size_text=True, expand_x=True, readonly=True, text_color='black',
enable_events=True)]
], size=(800, 100))],
[sg.Frame("", [[col_l, col_r]], pad=(0, 0), border_width=0)],
[sg.Frame("", [[
sg.Button('Back', enable_events=True, size=(10, 1), font=('Courier New', 12)),
sg.Button('Main menu', key='-MENU-', enable_events=True, size=(10, 1), font=('Courier New', 12))]],
element_justification='center', border_width=0, pad=((0, 0), (20, 0)),
vertical_alignment='center')],
]
return layout
def show_image_result(chosen_path, window):
# print(chosen_path)
im = Image.open(chosen_path)
width, height = (550, 380)
scale = max(im.width / width, im.height / height)
w, h = int(im.width / scale), int(im.height / scale)
im = im.resize((w, h), resample=Image.CUBIC)
with BytesIO() as output:
im.save(output, format="PNG")
data = output.getvalue()
window["-IMAGE RESULT-"].update(data=data)
def result_loop(window, saved_stuff, result_dict, change_dict):
window['-FOLDERPIC DROPDOWN-'].update(values=saved_stuff)
window['-FOLDERPIC DROPDOWN-'].expand()
window["-IMAGE RESULT-"].update(data=[])
while True:
event, values = window.read()
if event == "Exit" or event == sg.WIN_CLOSED or event is None:
break
if 'Back' in event:
window[f'-COL6-'].update(visible=False)
window[f'-COL5-'].update(visible=True)
return False
if event == '-MENU-':
window[f'-COL6-'].update(visible=False)
return True
if event == '-FOLDERPIC DROPDOWN-':
chosen_path = values['-FOLDERPIC DROPDOWN-']
try:
if os.path.isdir(chosen_path):
pics = list_all_pictures([chosen_path])
window['-PIC DROPDOWN-'].update(values=pics)
result_text = create_result_text_folder(result_dict, chosen_path)
result_text = 'Average in folder: \n' + result_text
window['-PREDICTION RESULTS-'].update(result_text)
window["-IMAGE RESULT-"].update(data=[])
else:
window['-PIC DROPDOWN-'].update(values=[])
show_image_result(chosen_path, window)
# print(result_dict)
result_text = create_result_text(result_dict[chosen_path])
result_text = 'Average in photo: \n' + result_text
window['-PREDICTION RESULTS-'].update(result_text)
except Exception:
pass
if event == '-PIC DROPDOWN-':
chosen_pic = values['-PIC DROPDOWN-']
try:
show_image_result(chosen_pic, window)
# print(result_dict)
result_text = create_result_text(result_dict[chosen_pic])
result_text = 'Average in photo: \n' + result_text
window['-PREDICTION RESULTS-'].update(result_text)
except Exception:
pass
window.close()
| 2.78125 | 3 |
tests/test_pmf.py | akabbeke/warhammer_stats | 10 | 12758200 | <reponame>akabbeke/warhammer_stats<filename>tests/test_pmf.py
from unittest import TestCase
from warhammer_stats.pmf import PMF
class TestAttack(TestCase):
def setUp(self):
self.pmf_examples = [
PMF.dn(6),
PMF.dn(5),
PMF.dn(1),
PMF.static(0),
PMF.static(5),
PMF.dn(6).convert_binomial(5),
PMF.dn(6).re_roll_value(1),
]
def assertRoundEqual(self, a, b):
self.assertEqual(round(a, 10), round(b, 10))
def test_dn(self):
"""
Check that we propery generate d(n) PMFs
"""
for i in range(1, 100):
pmf = PMF.dn(i)
# You should never be able roll a 0 on a dice
self.assertEqual(pmf.get(0), 0)
# Check the probability is uniform
self.assertTrue(all([x==1/i for x in pmf.values[1:]]))
def test_static(self):
"""
Test we properly generate the PMF for static values
"""
for i in range(100):
pmf = PMF.static(i)
self.assertEqual(pmf.get(i), 1)
self.assertEqual(sum(pmf.values), 1)
def test_convolve_many(self):
"""
test convolving two PMFs
"""
for pmf_1 in self.pmf_examples:
for pmf_2 in self.pmf_examples:
pmf_convolved = PMF.convolve_many([pmf_1, pmf_2])
self.assertEqual(len(pmf_convolved.values), len(pmf_1.values) + len(pmf_2.values) - 1)
self.assertRoundEqual(pmf_convolved.mean(), pmf_1.mean() + pmf_2.mean())
def test_flatten(self):
"""
test that flattening two dists into one sums to one
"""
for pmf_1 in self.pmf_examples:
for pmf_2 in self.pmf_examples:
for i in range(100):
flattened = PMF.flatten([pmf_1 * (i/100), pmf_2 * ((100-i)/100)])
self.assertRoundEqual(sum(flattened.values), 1)
def test_convolution_pmfs(self):
"""
test that pmfs from convolution have list type values
"""
self.assertTrue(
isinstance(PMF.convolve_many(self.pmf_examples).values, list)
)
| 3.28125 | 3 |
dataset.py | Elmari/speech-to-text-benchmark | 0 | 12758201 | <reponame>Elmari/speech-to-text-benchmark<filename>dataset.py<gh_stars>0
import os
import sys
import soundfile
from bs4 import BeautifulSoup
class Dataset(object):
def size(self):
raise NotImplementedError()
def size_hours(self):
return sum(soundfile.read(self.get(i)[0])[0].size / (16000 * 3600) for i in range(self.size()))
def get(self, index):
raise NotImplementedError()
def __str__(self):
raise NotImplementedError()
@classmethod
def create(cls, dataset_type):
if dataset_type == 'librispeech':
return LibriSpeechDataset(
os.path.join(os.path.dirname(__file__), os.path.normpath('resources/data/LibriSpeech/test-clean')))
elif dataset_type == 'tudade':
return TudaDeDataset('resources/data/german-speechdata-package-v2/test', False, False)
else:
raise ValueError("cannot create %s of type '%s'" % (cls.__name__, dataset_type))
class LibriSpeechDataset(Dataset):
def __init__(self, root):
self._data = list()
print('Initializing librispeech')
for speaker_id in os.listdir(root):
speaker_dir = os.path.join(root, speaker_id)
print('Speaker Dir %s' % speaker_dir)
for chapter_id in os.listdir(speaker_dir):
chapter_dir = os.path.join(speaker_dir, chapter_id)
print('chapter %s' % chapter_dir)
transcript_path = os.path.join(chapter_dir, '%s-%s.trans.txt' % (speaker_id, chapter_id))
with open(transcript_path, 'r') as f:
transcripts = dict(x.split(' ', maxsplit=1) for x in f.readlines())
for flac_file in os.listdir(chapter_dir):
if flac_file.endswith('.flac'):
print("ends with flac")
wav_file = flac_file.replace('.flac', '.wav')
print("renamed")
wav_path = os.path.join(chapter_dir, wav_file)
if not os.path.exists(wav_path):
print("wav does not exist")
try:
flac_path = os.path.join(chapter_dir, flac_file)
print("path is %s" % flac_path)
pcm, sample_rate = soundfile.read(flac_path)
soundfile.write(wav_path, pcm, sample_rate)
except Exception as e:
print("an error occured")
print("wrote file")
self._data.append((wav_path, transcripts[wav_file.replace('.wav', '')]))
print("appended to list")
def size(self):
return len(self._data)
def get(self, index):
return self._data[index]
def __str__(self):
return 'LibriSpeech'
class TudaDeDataset(Dataset):
def __init__(self, root, cleaned, without_punctuation):
self._data = list()
self.cleaned = cleaned
self.without_punctuation = without_punctuation
for filename in os.listdir(root):
if not filename.endswith('.xml'): continue
print("processing file %s " % filename)
fullname = os.path.join(root, filename)
media = os.path.join(root, filename.replace('.xml', '_Kinect-RAW.wav'))
if not os.path.exists(media): continue
infile = open(fullname, "r")
contents = infile.read()
soup = BeautifulSoup(contents, 'lxml')
if cleaned:
transcript = soup.find('cleaned_sentence').string
else:
transcript = soup.find('sentence').string
if transcript is not None:
if without_punctuation:
transcript = transcript.replace(".", "")
transcript = transcript.replace(",", "")
transcript = transcript.replace("!", "")
transcript = transcript.replace("?", "")
transcript = transcript.replace(";", "")
transcript = transcript.replace("\"", "")
transcript = transcript.replace("(", "")
transcript = transcript.replace(")", "")
transcript = transcript.replace(":", "")
self._data.append((media, transcript))
else:
print("Is none: %s" % filename)
sys.exit()
infile.close()
def size(self):
return len(self._data)
def get(self, index):
return self._data[index]
def __str__(self):
return 'Tuda-DE'
| 2.84375 | 3 |
ShopperMiles/providers/serializers.py | juansahe/shoppy | 0 | 12758202 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from rest_framework import serializers
from .models import Promotion,Provider,Bond
class PromotionSerializer(serializers.ModelSerializer):
class Meta:
model = Promotion
fields = ('id', 'name', 'description' , 'img','provider')
class ProviderSerializer(serializers.ModelSerializer):
class Meta:
model = Provider
fields = ('id','type_provider', 'name', 'description','phone_contact','name_contact','email_contact','img','web_site')
class BondSerializer(serializers.ModelSerializer):
class Meta:
model = Bond
fields = ('id', 'name', 'description' ,'value1','value2','value3','code','status','provider','img')
| 2.046875 | 2 |
tests/test_helpers.py | Kedoodle/wise-old-man-updater | 0 | 12758203 | import unittest
from datetime import datetime
from unittest.mock import patch
from src.updater.helpers import filter_outdated_players, get_player_usernames
class TestFilterOutdatedPlayers(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._mock_now_patcher = patch('src.updater.helpers._now')
cls._mock_now = cls._mock_now_patcher.start()
cls._mock_now.return_value = datetime(2021, 5, 27, 14, 41, 32, 709490)
@classmethod
def tearDownClass(cls):
cls._mock_now_patcher.stop()
def test_when_no_players(self):
players = []
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, [])
def test_when_no_outdated_players(self):
players = [
{
"username": "kedd",
"updatedAt": "2021-05-27T14:41:00.000Z"
},
{
"username": "alvx",
"updatedAt": "2021-05-27T14:40:33.000Z"
}
]
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, [])
def test_when_one_outdated_player(self):
players = [
{
"username": "kedd",
"updatedAt": "2021-05-27T14:40:00.000Z"
},
{
"username": "alvx",
"updatedAt": "2021-05-27T14:40:33.000Z"
}
]
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, [players[0]])
def test_when_multiple_outdated_players(self):
players = [
{
"username": "kedd",
"updatedAt": "2021-05-27T14:40:00.000Z"
},
{
"username": "alvx",
"updatedAt": "2021-05-27T14:40:32.000Z"
},
{
"username": "detredwings",
"updatedAt": "2021-05-27T14:41:00.000Z"
}
]
outdated_players = filter_outdated_players(players)
self.assertListEqual(outdated_players, players[:-1])
class TestGetPlayerUsernames(unittest.TestCase):
def test_when_no_players(self):
players = []
usernames = get_player_usernames(players)
self.assertListEqual(usernames, [])
def test_when_one_player(self):
players = [
{
"username": "kedd"
}
]
usernames = get_player_usernames(players)
self.assertListEqual(usernames, ["kedd"])
def test_when_multiple_players(self):
players = [
{
"username": "kedd"
},
{
"username": "alvx"
}
]
usernames = get_player_usernames(players)
self.assertListEqual(usernames, ["kedd", "alvx"])
if __name__ == '__main__':
unittest.main()
| 3 | 3 |
lucas-kanade.py | mike239x/stereo | 0 | 12758204 | """
An implementation of a stereo algorith based on Lucas-Kanade method
"""
import sys, os, time
import skimage
import numpy as np
def preprocess_image(path):
return list(skimage.transform.pyramid_gaussian(
skimage.color.rgb2gray(
skimage.io.imread(path)
),
max_layer = 8,
multichannel = False,
))
im_prmd_0 = preprocess_image('data/Classroom1-perfect/im0.png')
im_prmd_1 = preprocess_image('data/Classroom1-perfect/im1.png')
dx = list(map(skimage.filters.scharr_v, im_prmd_0))
dt = list(a - b for a,b in zip(im_prmd_0, im_prmd_1))
skimage.io.imshow_collection([
dx[-1],
dt[-1],
dt[-1] / dx[-1], # disparity based on the last lvl of image pyramids
])
skimage.io.show()
| 2.984375 | 3 |
grebe/grebe.py | tac0x2a/grebe | 2 | 12758205 | import pika
from lakeweed import clickhouse as d2c
from . import dbms_clickhouse as dbms
from .dbms_clickhouse import TableNotFoundException
class Grebe:
def __init__(self, client, schema_store, source_settings_store, resend_exchange_name, retry_max: int, tz_str: str, logger):
self.client = client
self.schema_store = schema_store
self.source_settings_store = source_settings_store
self.resend_exchange_name = resend_exchange_name
self.retry_max = retry_max
self.tz_str = tz_str
self.logger = logger
self.reload_schema()
self.logger.info(f"Schemas: {[s for s in self.schema_cache.values()]}")
self.reload_source_settings()
def callback(self, channel, method, properties, body):
self.logger.debug("receive '{}({})'".format(method.routing_key, method.delivery_tag))
try:
Grebe.insert_data(
method, body,
self.client,
self.schema_store,
self.schema_cache,
self.specified_types_cache,
self.tz_str,
self.logger
)
except TableNotFoundException as e:
self.logger.error(f"Table '{ e.table_name }' is renamed? Update schema table cache.")
self.reload_schema()
self.logger.error("Consume failed '{}'. retrying...".format(method.routing_key))
Grebe.send_retry(
channel, method, properties, body,
self.retry_max,
self.resend_exchange_name,
self.logger
)
except Exception as e:
self.logger.error(e, exc_info=e)
self.logger.error("Consume failed '{}'. retrying...".format(method.routing_key))
Grebe.send_retry(
channel, method, properties, body,
self.retry_max,
self.resend_exchange_name,
self.logger
)
finally:
channel.basic_ack(delivery_tag=method.delivery_tag)
self.logger.debug("return basic_ack '{}({})'".format(method.routing_key, method.delivery_tag))
def reload_schema(self):
self.schema_cache = self.schema_store.load_all_schemas()
self.logger.info(f"Load {len(self.schema_cache)} schemas from {self.schema_store}")
return {'schema_count': len(self.schema_cache), 'store': str(type(self.schema_store))}
def reload_source_settings(self):
self.source_settings_cache = self.source_settings_store.load_all_source_settings()
self.logger.info(f"Loaded source_settings: {self.source_settings_cache}")
self.specified_types_cache = {source_id: body['types'] for source_id, body in self.source_settings_cache.items() if 'types' in body}
self.logger.info(f"Specified Types cache: {self.specified_types_cache}")
return {'store': str(type(self.source_settings_store))}
@classmethod
def insert_data(cls, method, body, client, schema_store, schema_cache, specified_types, tz_str, logger):
# replace delimiter in topic mqtt/amqp.
topic = method.routing_key
source_id = topic.replace("/", "_").replace(".", "_")
payload = str(body.decode('utf-8'))
if source_id in specified_types.keys():
spec_types = specified_types[source_id]
else:
spec_types = {}
(columns, types, values_list) = d2c.data_string2type_value(payload, specified_types=spec_types, tz_str=tz_str)
serialized = dbms.serialize_schema(columns, types, source_id)
data_table_name = dbms.get_table_name_with_insert_if_new_schema(client, schema_store, source_id, columns, types, serialized, schema_cache)
dbms.insert_data(client, data_table_name, columns, values_list)
logger.debug(serialized)
@classmethod
def send_retry(cls, channel, method, properties, body, retry_max, resend_exchange_name, logger):
RetryCountKey = "x-grebe-retry-count"
current_retry_count = 0
if properties.headers and properties.headers.get(RetryCountKey):
current_retry_count = int(properties.headers.get(RetryCountKey))
if current_retry_count >= retry_max:
logger.error("Retry count exceeded!!({}). Discard Message = [exchange={}, routing_key={}, body={}]".format(retry_max, resend_exchange_name, method.routing_key, body))
return
props = pika.BasicProperties(
headers={RetryCountKey: current_retry_count + 1}
)
logger.debug("Re-sending [exchange={}, routing_key={}, props={}, body={}]".format(resend_exchange_name, method.routing_key, props, body))
channel.basic_publish(exchange=resend_exchange_name, routing_key=method.routing_key, properties=props, body=body)
logger.warning("Re-send complete. ({})".format(current_retry_count + 1))
| 1.976563 | 2 |
loldib/getratings/models/NA/na_nami/na_nami_mid.py | koliupy/loldib | 0 | 12758206 | from getratings.models.ratings import Ratings
class NA_Nami_Mid_Aatrox(Ratings):
pass
class NA_Nami_Mid_Ahri(Ratings):
pass
class NA_Nami_Mid_Akali(Ratings):
pass
class NA_Nami_Mid_Alistar(Ratings):
pass
class NA_Nami_Mid_Amumu(Ratings):
pass
class NA_Nami_Mid_Anivia(Ratings):
pass
class NA_Nami_Mid_Annie(Ratings):
pass
class NA_Nami_Mid_Ashe(Ratings):
pass
class NA_Nami_Mid_AurelionSol(Ratings):
pass
class NA_Nami_Mid_Azir(Ratings):
pass
class NA_Nami_Mid_Bard(Ratings):
pass
class NA_Nami_Mid_Blitzcrank(Ratings):
pass
class NA_Nami_Mid_Brand(Ratings):
pass
class NA_Nami_Mid_Braum(Ratings):
pass
class NA_Nami_Mid_Caitlyn(Ratings):
pass
class NA_Nami_Mid_Camille(Ratings):
pass
class NA_Nami_Mid_Cassiopeia(Ratings):
pass
class NA_Nami_Mid_Chogath(Ratings):
pass
class NA_Nami_Mid_Corki(Ratings):
pass
class NA_Nami_Mid_Darius(Ratings):
pass
class NA_Nami_Mid_Diana(Ratings):
pass
class NA_Nami_Mid_Draven(Ratings):
pass
class NA_Nami_Mid_DrMundo(Ratings):
pass
class NA_Nami_Mid_Ekko(Ratings):
pass
class NA_Nami_Mid_Elise(Ratings):
pass
class NA_Nami_Mid_Evelynn(Ratings):
pass
class NA_Nami_Mid_Ezreal(Ratings):
pass
class NA_Nami_Mid_Fiddlesticks(Ratings):
pass
class NA_Nami_Mid_Fiora(Ratings):
pass
class NA_Nami_Mid_Fizz(Ratings):
pass
class NA_Nami_Mid_Galio(Ratings):
pass
class NA_Nami_Mid_Gangplank(Ratings):
pass
class NA_Nami_Mid_Garen(Ratings):
pass
class NA_Nami_Mid_Gnar(Ratings):
pass
class NA_Nami_Mid_Gragas(Ratings):
pass
class NA_Nami_Mid_Graves(Ratings):
pass
class NA_Nami_Mid_Hecarim(Ratings):
pass
class NA_Nami_Mid_Heimerdinger(Ratings):
pass
class NA_Nami_Mid_Illaoi(Ratings):
pass
class NA_Nami_Mid_Irelia(Ratings):
pass
class NA_Nami_Mid_Ivern(Ratings):
pass
class NA_Nami_Mid_Janna(Ratings):
pass
class NA_Nami_Mid_JarvanIV(Ratings):
pass
class NA_Nami_Mid_Jax(Ratings):
pass
class NA_Nami_Mid_Jayce(Ratings):
pass
class NA_Nami_Mid_Jhin(Ratings):
pass
class NA_Nami_Mid_Jinx(Ratings):
pass
class NA_Nami_Mid_Kalista(Ratings):
pass
class NA_Nami_Mid_Karma(Ratings):
pass
class NA_Nami_Mid_Karthus(Ratings):
pass
class NA_Nami_Mid_Kassadin(Ratings):
pass
class NA_Nami_Mid_Katarina(Ratings):
pass
class NA_Nami_Mid_Kayle(Ratings):
pass
class NA_Nami_Mid_Kayn(Ratings):
pass
class NA_Nami_Mid_Kennen(Ratings):
pass
class NA_Nami_Mid_Khazix(Ratings):
pass
class NA_Nami_Mid_Kindred(Ratings):
pass
class NA_Nami_Mid_Kled(Ratings):
pass
class NA_Nami_Mid_KogMaw(Ratings):
pass
class NA_Nami_Mid_Leblanc(Ratings):
pass
class NA_Nami_Mid_LeeSin(Ratings):
pass
class NA_Nami_Mid_Leona(Ratings):
pass
class NA_Nami_Mid_Lissandra(Ratings):
pass
class NA_Nami_Mid_Lucian(Ratings):
pass
class NA_Nami_Mid_Lulu(Ratings):
pass
class NA_Nami_Mid_Lux(Ratings):
pass
class NA_Nami_Mid_Malphite(Ratings):
pass
class NA_Nami_Mid_Malzahar(Ratings):
pass
class NA_Nami_Mid_Maokai(Ratings):
pass
class NA_Nami_Mid_MasterYi(Ratings):
pass
class NA_Nami_Mid_MissFortune(Ratings):
pass
class NA_Nami_Mid_MonkeyKing(Ratings):
pass
class NA_Nami_Mid_Mordekaiser(Ratings):
pass
class NA_Nami_Mid_Morgana(Ratings):
pass
class NA_Nami_Mid_Nami(Ratings):
pass
class NA_Nami_Mid_Nasus(Ratings):
pass
class NA_Nami_Mid_Nautilus(Ratings):
pass
class NA_Nami_Mid_Nidalee(Ratings):
pass
class NA_Nami_Mid_Nocturne(Ratings):
pass
class NA_Nami_Mid_Nunu(Ratings):
pass
class NA_Nami_Mid_Olaf(Ratings):
pass
class NA_Nami_Mid_Orianna(Ratings):
pass
class NA_Nami_Mid_Ornn(Ratings):
pass
class NA_Nami_Mid_Pantheon(Ratings):
pass
class NA_Nami_Mid_Poppy(Ratings):
pass
class NA_Nami_Mid_Quinn(Ratings):
pass
class NA_Nami_Mid_Rakan(Ratings):
pass
class NA_Nami_Mid_Rammus(Ratings):
pass
class NA_Nami_Mid_RekSai(Ratings):
pass
class NA_Nami_Mid_Renekton(Ratings):
pass
class NA_Nami_Mid_Rengar(Ratings):
pass
class NA_Nami_Mid_Riven(Ratings):
pass
class NA_Nami_Mid_Rumble(Ratings):
pass
class NA_Nami_Mid_Ryze(Ratings):
pass
class NA_Nami_Mid_Sejuani(Ratings):
pass
class NA_Nami_Mid_Shaco(Ratings):
pass
class NA_Nami_Mid_Shen(Ratings):
pass
class NA_Nami_Mid_Shyvana(Ratings):
pass
class NA_Nami_Mid_Singed(Ratings):
pass
class NA_Nami_Mid_Sion(Ratings):
pass
class NA_Nami_Mid_Sivir(Ratings):
pass
class NA_Nami_Mid_Skarner(Ratings):
pass
class NA_Nami_Mid_Sona(Ratings):
pass
class NA_Nami_Mid_Soraka(Ratings):
pass
class NA_Nami_Mid_Swain(Ratings):
pass
class NA_Nami_Mid_Syndra(Ratings):
pass
class NA_Nami_Mid_TahmKench(Ratings):
pass
class NA_Nami_Mid_Taliyah(Ratings):
pass
class NA_Nami_Mid_Talon(Ratings):
pass
class NA_Nami_Mid_Taric(Ratings):
pass
class NA_Nami_Mid_Teemo(Ratings):
pass
class NA_Nami_Mid_Thresh(Ratings):
pass
class NA_Nami_Mid_Tristana(Ratings):
pass
class NA_Nami_Mid_Trundle(Ratings):
pass
class NA_Nami_Mid_Tryndamere(Ratings):
pass
class NA_Nami_Mid_TwistedFate(Ratings):
pass
class NA_Nami_Mid_Twitch(Ratings):
pass
class NA_Nami_Mid_Udyr(Ratings):
pass
class NA_Nami_Mid_Urgot(Ratings):
pass
class NA_Nami_Mid_Varus(Ratings):
pass
class NA_Nami_Mid_Vayne(Ratings):
pass
class NA_Nami_Mid_Veigar(Ratings):
pass
class NA_Nami_Mid_Velkoz(Ratings):
pass
class NA_Nami_Mid_Vi(Ratings):
pass
class NA_Nami_Mid_Viktor(Ratings):
pass
class NA_Nami_Mid_Vladimir(Ratings):
pass
class NA_Nami_Mid_Volibear(Ratings):
pass
class NA_Nami_Mid_Warwick(Ratings):
pass
class NA_Nami_Mid_Xayah(Ratings):
pass
class NA_Nami_Mid_Xerath(Ratings):
pass
class NA_Nami_Mid_XinZhao(Ratings):
pass
class NA_Nami_Mid_Yasuo(Ratings):
pass
class NA_Nami_Mid_Yorick(Ratings):
pass
class NA_Nami_Mid_Zac(Ratings):
pass
class NA_Nami_Mid_Zed(Ratings):
pass
class NA_Nami_Mid_Ziggs(Ratings):
pass
class NA_Nami_Mid_Zilean(Ratings):
pass
class NA_Nami_Mid_Zyra(Ratings):
pass
| 1.359375 | 1 |
Contents/Server Plugin/weather.py | NSBum/EC | 0 | 12758207 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import xmltodict
ERROR_DEFAULT = 999.9
class Observation:
"""A weather observation from Environment Canada
"""
def __init__(self,doc):
self.data = doc['siteData']['currentConditions']
self.timestamp = self.data['dateTime'][0]['timeStamp']
self.currentConditions = CurrentConditions(self.data)
self.yesterdayConditions = YesterdayConditions(doc['siteData']['yesterdayConditions'])
class CurrentConditions:
"""Current weather conditions at a station
"""
def __init__(self,xml):
self.xml = xml
self.condition = self.xml['condition']
self.temperature = float(self.xml.get('temperature', {}).get('#text',ERROR_DEFAULT))
self.tempString = "{0}°C".format(self.temperature) if self.temperature != ERROR_DEFAULT else "NA"
self.dewpoint = float(self.xml.get('dewpoint', {}).get('#text',ERROR_DEFAULT))
self.dewpointString = "{0}°C".format(self.dewpoint) if self.dewpoint != ERROR_DEFAULT else "NA"
self.humidity = float(self.xml.get('relativeHumidity', {}).get('#text',ERROR_DEFAULT))
self.pressure = float(self.xml.get('pressure', {}).get('#text',999.9))
self.visibility = float(self.xml.get('visibility', {}).get('#text',999.9))
self.winds = Winds(self.xml['wind'])
class Winds:
"""Current winds structure at station
"""
def __init__(self,xml):
self.xml = xml
self.windSpeed = int(self.xml['speed']['#text'])
self.windDirection = self.xml['direction']
# sometimes a bearing is not provided
# so use an error bearing of 999.9
self.windBearing = float(self.xml.get('bearing', {}).get('#text',999.9))
self.windGust = int(self.xml.get('gust', {}).get('#text',0))
class YesterdayConditions:
"""Prior day conditions at station
"""
def __init__(self,xml):
self.xml = xml
temp1 = float(self.xml.get('temperature', {})[0].get('#text',999.9))
temp2 = float(self.xml.get('temperature', {})[1].get('#text',999.9))
self.yesterdayHigh = max(temp1,temp2)
self.yesterdayLow = min(temp1,temp2)
self.yesterdayPrecip = float(self.xml.get('precip', {}).get('#text',999.9))
# This is still unimplemented
class TodayForecast:
"""Forecast for today"""
def __init__(self,doc):
self.xml = doc
# drill down to today's forecast, the 1st in the fc group
fcxml = self.xml['siteData']['forecastGroup']['forecast'][0]
| 3.0625 | 3 |
remittance/invoice.py | drummonds/remittance | 0 | 12758208 | <filename>remittance/invoice.py
from .remittance import RemittanceException
from .remittance import AbstractInvoiceLineItem
from .utilities import enrich_field
from .utils import p
class Invoice(AbstractInvoiceLineItem):
"""Normal Invoice"""
def __str__(self):
return ' Invoice {}'.format(repr(self))
def enrich(self, remittance_doc, sage):
"""Enrich a raw remittance document with data from Sage
This is designed specifically for
It uses getField which uses 3 predefined columns:
'Your Ref' is our invoice number
'Member Code' is an AIS specfic membership code and defines some exceptions
'Document Type' defines the type of document. We are only enriching 'Invoice' and 'Credit Note'
Worked example on calcs:
Inputs
Sage Invoiced 2142.24
constant Discount rate 2.50%
- calc Discount 53.56
- calc net amount 2088.68
net discount 44.63
vat discount 8.93
Fenwick Gross 2133.31
Fenwick Discount 44.63
- calc Net Cash 2088.68
"""
invoice_number = self.number # This is the invoice number
enrich_field(sage, self, 'customer', 'ALT_REF')
enrich_field(sage, self, 'invoiced', 'AMOUNT')
if p(self.invoiced) == p(self.gross_amount): # Paying the full amount
if self.discount > 0:
raise RemittanceException('Gross amount on remittance advice ' +
'{} matches invoice {} from {} but discount {} is greater than zero'.format(
p(self.gross_amount), invoice_number, self.customer, self.discount))
else: # Paying less the full amount so assuming should be taking advantage of prompt payment
self.gross_prompt_payment_discount = p(float(self.invoiced) * remittance_doc.cust_discount_rate)
self.net_prompt_payment_discount = p(float(self.gross_prompt_payment_discount)
/ (1 + self.vat_rate))
self.prompt_payment_discount_vat = ((self.gross_prompt_payment_discount)
- p(self.net_prompt_payment_discount))
self.calc_net_amount = p(self.invoiced) - p(self.gross_prompt_payment_discount)
self.net_cash_in = p(self.gross_amount - self.discount)
if self.calc_net_amount != self.net_cash_in:
if abs(self.calc_net_amount - self.net_cash_in) < p(0.05): # Small rounding error will adjust discount
# to align with actual cash received.
# so self.net_cash_in is assumed to be correct
# & self.invoiced is correct as from accounts
self.net_prompt_payment_discount = p(self.invoiced
- self.prompt_payment_discount_vat - self.net_cash_in)
self.gross_prompt_payment_discount = (self.net_prompt_payment_discount
+ self.prompt_payment_discount_vat)
self.calc_net_amount = p(self.invoiced) - p(self.gross_prompt_payment_discount)
else: # Discount >= 0.05
raise RemittanceException(
'Calculated net payment after prompt payment discount does not match receipt.\n' +
' Invoiced amount : {}'.format(p(self.invoiced)) +
' Prompt payment discount: {}'.format(p(self.gross_prompt_payment_discount)) +
' Calculated net amount : {}'.format(p(self.calc_net_amount)) +
' On remittance:' +
' Gross amount: {}'.format(p(self.gross_amount)) +
' Discount: {}'.format(p(self.discount)) +
' Net Cash: {}'.format(self.net_cash_in)
)
enrich_field(sage, self, 'outstanding', 'OUTSTANDING')
if self.outstanding == p(0) and self.gross_amount > 0:
raise RemittanceException(
'Outstanding amount is zero for invoice {} from {} must have already been entered'.format(
invoice_number, self.customer))
def create_transactions(self, sage_import_file):
"""This creates transactions such as payments and credits at some later date."""
rd = sage_import_file
si = sage_import_file.sage_import
if self.gross_amount > 0:
comment = ''
# The VAT on the prompt payment discount is recoverable
if self.discount > 0:
si.detailed_check_write_row('SC', '4009', rd.remittance.supplier+' '+self.number,
rd.tran_date, # Use the file transaction date as this is when the
# transaction takes place. Previously (in error) used self.date which is the
# date the invoice was raised. (Referenced from below)
'Sales Discount '+self.number,
self.net_prompt_payment_discount, 'T1',
tax_amount = self.prompt_payment_discount_vat,
comment = comment, account = self.customer)
# No impact on running bank balance
cash_in = self.gross_amount - self.discount
si.detailed_check_write_row('SA', rd.bank, rd.remittance.supplier+' '+self.number,
rd.tran_date, # see first check_write_row in Invoice.create_transactions
'Sales Receipt '+self.number,
self.calc_net_amount, 'T9', comment = comment, account = self.customer)
rd.running_bank_balance += cash_in
elif self.gross_amount == 0:
print("Invoice has zero amount which is a little odd. {}".format(self))
else:
raise RemittanceException("Invoice has negative amount which is definitely odd. {}".format(self))
| 2.875 | 3 |
tests/test_sql_interf_gd_get_tax.py | HobnobMancer/cazy_webscrapper | 0 | 12758209 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) University of St Andrews 2020-2021
# (c) University of Strathclyde 2020-2021
# (c) James Hutton Institute 2020-2021
#
# Author:
# <NAME>
#
# Contact
# <EMAIL>
#
# <NAME>,
# Biomolecular Sciences Building,
# University of St Andrews,
# North Haugh Campus,
# St Andrews,
# KY16 9ST
# Scotland,
# UK
#
# The MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Tests sql.sql_interface.get_data.get_taxonomies.py
These test are intened to be run from the root of the repository using:
pytest -v
"""
from argparse import Namespace, ArgumentParser
from datetime import datetime
from pathlib import Path
import pytest
from cazy_webscraper.sql import sql_orm
from cazy_webscraper.sql.sql_interface.get_data import get_selected_gbks, get_taxonomies
from cazy_webscraper.sql.sql_orm import (
Ec,
Genbank,
Session,
Taxonomy,
Uniprot,
)
def test_get_tax_user_acc(monkeypatch):
argsdict = {"args": Namespace(
genbank_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
uniprot_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
)}
def mock_get_tax(*args, **kwards):
return [1, 2, 3]
def mock_get_table_dicts(*args, **kwards):
return {}, {}
monkeypatch.setattr(get_taxonomies, "get_taxs_for_user_gbks", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_taxs_for_uniprots", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_uni_gbk_tax_dict", mock_get_table_dicts)
get_taxonomies.get_taxonomies(set(), set(), {}, set(), set(), 'connection', argsdict['args'])
def test_get_tax_db_tax(monkeypatch):
argsdict = {"args": Namespace(
genbank_accessions=None,
uniprot_accessions=None,
)}
def mock_get_tax(*args, **kwards):
return [1, 2, 3]
monkeypatch.setattr(get_taxonomies, "get_taxs_for_user_gbks", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_taxs_for_uniprots", mock_get_tax)
monkeypatch.setattr(get_taxonomies, "get_filtered_taxs", mock_get_tax)
get_taxonomies.get_taxonomies(set(), set(), {}, set(), set(), 'connection', argsdict['args'])
def test_get_uni_gbk_dict(db_path):
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions=None,
)}
db_connection = sql_orm.get_db_connection(db_path, False, False)
assert ({}, {}) == get_taxonomies.get_uni_gbk_tax_dict(db_connection)
def test_get_user_gbks_fail():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions=None,
genbank_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
with pytest.raises(SystemExit) as pytest_wrapped_e:
get_taxonomies.get_taxs_for_user_gbks(gbk_table_dict, argsdict['args'])
assert pytest_wrapped_e.type == SystemExit
def test_get_user_gbks():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions=None,
genbank_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
assert [2] == get_taxonomies.get_taxs_for_user_gbks(gbk_table_dict, argsdict['args'])
def test_get_user_uniprot_fail():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs_FAIL.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
with pytest.raises(SystemExit) as pytest_wrapped_e:
get_taxonomies.get_taxs_for_uniprots(gbk_table_dict, argsdict['args'])
assert pytest_wrapped_e.type == SystemExit
def test_get_user_uniprot():
argsdict = {"args": Namespace(
sql_echo=True,
uniprot_accessions="tests/test_inputs/test_inputs_sql_interface/test_accs.txt",
)}
gbk_table_dict = {'test_gbk': 1, 'gbk_acc': 2}
assert [2] == get_taxonomies.get_taxs_for_uniprots(gbk_table_dict, argsdict['args'])
| 1.210938 | 1 |
tests/functional/custom_singular_tests/test_custom_singular_tests.py | tomasfarias/dbt-core | 799 | 12758210 | <gh_stars>100-1000
import pytest
from pathlib import Path
from dbt.tests.util import run_dbt
# from `test/integration/009_data_test`
#
# Models
#
models__table_copy = """
{{
config(
materialized='table'
)
}}
select * from {{ this.schema }}.seed
"""
#
# Tests
#
tests__fail_email_is_always_null = """
select *
from {{ ref('table_copy') }}
where email is not null
"""
tests__fail_no_ref = """
select 1
"""
tests__dotted_path_pass_id_not_null = """
{# Same as `pass_id_not_null` but with dots in its name #}
select *
from {{ ref('table_copy') }}
where id is null
"""
tests__pass_id_not_null = """
select *
from {{ ref('table_copy') }}
where id is null
"""
tests__pass_no_ref = """
select 1 limit 0
"""
class CustomSingularTestsBase(object):
@pytest.fixture(scope="class", autouse=True)
def setUp(self, project):
"""Create seed and downstream model tests are to be run on"""
project.run_sql_file(project.test_data_dir / Path("seed_expected.sql"))
results = run_dbt()
assert len(results) == 1
@pytest.fixture(scope="class")
def models(self):
return {"table_copy.sql": models__table_copy}
class TestPassingTests(CustomSingularTestsBase):
@pytest.fixture(scope="class")
def tests(self):
return {
"my_db.my_schema.table_copy.pass_id_not_null.sql": tests__dotted_path_pass_id_not_null,
"tests__pass_id_not_null.sql": tests__pass_id_not_null,
"tests__pass_no_ref.sql": tests__pass_no_ref,
}
def test_data_tests(self, project, tests):
test_results = run_dbt(["test"])
assert len(test_results) == len(tests)
for result in test_results:
assert result.status == "pass"
assert not result.skipped
assert result.failures == 0
class TestFailingTests(CustomSingularTestsBase):
@pytest.fixture(scope="class")
def tests(self):
return {
"tests__fail_email_is_always_null.sql": tests__fail_email_is_always_null,
"tests__fail_no_ref.sql": tests__fail_no_ref,
}
def test_data_tests(self, project, tests):
"""assert that all deliberately failing tests actually fail"""
test_results = run_dbt(["test"], expect_pass=False)
assert len(test_results) == len(tests)
for result in test_results:
assert result.status == "fail"
assert not result.skipped
assert result.failures > 0
| 1.976563 | 2 |
tests/controllers/test_controller.py | drkane/find-that-postcode | 4 | 12758211 | from findthatpostcode.controllers.controller import Controller
from tests.fixtures import MockElasticsearch
def test_controller_class():
id_ = "testentity"
data = {"code": "testentity", "name": "Test Entity"}
# test a found entity
a = Controller(id_, data)
assert a.id == id_
assert a.attributes["name"] == data["name"]
assert a.found is True
assert len(a.get_errors()) == 0
# test internal functions
assert a.parse_id(id_) == id_
assert a.process_attributes(data) == data
# test a non existant object
a = Controller(id_, {})
assert a.id == id_
assert a.attributes.get("name") is None
assert a.found is False
assert len(a.get_errors()) == 1
assert a.get_errors()[0]["status"] == "404"
def test_controller_fetch():
es = MockElasticsearch()
a = Controller.get_from_es(
"EX36 4AT", es, es_config={"es_index": "geo_postcode", "es_type": "_doc"}
)
assert isinstance(a.id, str)
assert len(a.attributes) > 4
assert a.found is True
assert len(a.get_errors()) == 0
| 2.546875 | 3 |
gamestonk_terminal/cryptocurrency/nft/nft_controller.py | Flodur871/GamestonkTerminal | 1 | 12758212 | import argparse
from typing import List
from prompt_toolkit.completion import NestedCompleter
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.menu import session
from gamestonk_terminal.helper_funcs import (
EXPORT_ONLY_RAW_DATA_ALLOWED,
MENU_GO_BACK,
MENU_QUIT,
MENU_RESET,
try_except,
system_clear,
get_flair,
check_positive,
parse_known_args_and_warn,
)
from gamestonk_terminal.cryptocurrency.nft import nftcalendar_view
class NFTController:
"""NFT Controller class"""
CHOICES = [
"cls",
"?",
"help",
"q",
"quit",
"reset",
]
CHOICES_COMMANDS = [
"today",
"upcoming",
"ongoing",
"newest",
]
CHOICES += CHOICES_COMMANDS
def __init__(self):
"""Constructor"""
self.nft_parser = argparse.ArgumentParser(add_help=False, prog="nft")
self.nft_parser.add_argument(
"cmd",
choices=self.CHOICES,
)
self.completer = NestedCompleter.from_nested_dict(
{c: None for c in self.CHOICES}
)
def print_help(self):
"""Print help"""
help_text = """
What do you want to do?
cls clear screen
?/help show this menu again
q quit this menu, and shows back to main menu
quit quit to abandon the program
reset reset terminal and reload configs
nftcalendar.io:
today today's NFT drops
upcoming upcoming NFT drops
ongoing Ongoing NFT drops
newest Recently NFTs added
"""
print(help_text)
def switch(self, an_input: str):
"""Process and dispatch input
Returns
-------
MENU_GO_BACK, MENU_QUIT, MENU_RESET
MENU_GO_BACK - Show main context menu again
MENU_QUIT - Quit terminal
MENU_RESET - Reset terminal and go back to same previous menu
"""
# Empty command
if not an_input:
print("")
return None
(known_args, other_args) = self.nft_parser.parse_known_args(an_input.split())
# Help menu again
if known_args.cmd == "?":
self.print_help()
return None
# Clear screen
if known_args.cmd == "cls":
system_clear()
return None
return getattr(
self, "call_" + known_args.cmd, lambda: "command not recognized!"
)(other_args)
def call_help(self, _):
"""Process Help Command"""
self.print_help()
def call_q(self, _):
"""Process Q command - quit the menu"""
return MENU_GO_BACK
def call_quit(self, _):
"""Process Quit command - exit the program"""
return MENU_QUIT
def call_reset(self, _):
"""Process Reset command - reset the program"""
return MENU_RESET
@try_except
def call_today(self, other_args: List[str]):
"""Process today command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="today",
description="Today's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_today_drops(
num=ns_parser.num,
export=ns_parser.export,
)
@try_except
def call_upcoming(self, other_args: List[str]):
"""Process upcoming command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="upcoming",
description="Upcoming's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_upcoming_drops(
num=ns_parser.num,
export=ns_parser.export,
)
@try_except
def call_ongoing(self, other_args: List[str]):
"""Process ongoing command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="ongoing",
description="Ongoing's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_ongoing_drops(
num=ns_parser.num,
export=ns_parser.export,
)
@try_except
def call_newest(self, other_args: List[str]):
"""Process newest command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="newest",
description="Newest's NFT drops [Source: nftcalendar.io]",
)
parser.add_argument(
"-n",
"--num",
type=check_positive,
help="Number of NFT collections to display",
dest="num",
default=5,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if not ns_parser:
return
nftcalendar_view.display_nft_newest_drops(
num=ns_parser.num,
export=ns_parser.export,
)
def menu():
"""NFT Menu"""
nft_controller = NFTController()
nft_controller.call_help(None)
while True:
if session and gtff.USE_PROMPT_TOOLKIT:
completer = NestedCompleter.from_nested_dict(
{c: None for c in nft_controller.CHOICES}
)
an_input = session.prompt(
f"{get_flair()} (nft)> ",
completer=completer,
)
else:
an_input = input(f"{get_flair()} (nft)> ")
try:
process_input = nft_controller.switch(an_input)
if process_input is not None:
return process_input
except SystemExit:
print("The command selected doesn't exit\n")
continue
| 2.671875 | 3 |
crime.py | camhwilson/Crime | 0 | 12758213 | <filename>crime.py
from dataclasses import dataclass
from hashable_list import HashableList
@dataclass
class Overdose:
overdose_dict: dict
def __post_init__(self):
self._id = self.overdose_dict['_id']
self.death_date_and_time = self.overdose_dict['death_date_and_time']
self.manner_of_death = self.overdose_dict['manner_of_death']
self.age = self.overdose_dict['age']
self.sex = self.overdose_dict['sex']
self.race = self.overdose_dict['race']
self.case_dispo = self.overdose_dict['case_dispo']
all_ods = [self.overdose_dict['combined_od1'],
self.overdose_dict['combined_od2'],
self.overdose_dict['combined_od3'],
self.overdose_dict['combined_od4'],
self.overdose_dict['combined_od5'],
self.overdose_dict['combined_od6'],
self.overdose_dict['combined_od7'],
self.overdose_dict['combined_od8'],
self.overdose_dict['combined_od9'],
self.overdose_dict['combined_od10']]
self.drug_list = HashableList([i for i in all_ods if i is not None])
self.incident_zip = self.overdose_dict['incident_zip']
self.decedent_zip = self.overdose_dict['decedent_zip']
self.case_time = self.overdose_dict['case_year']
| 3.015625 | 3 |
004/euler_4.py | kdungs/euler | 0 | 12758214 | #!/usr/bin/env python
import itertools as it
result = it.ifilter(
lambda i: str(i) == str(i)[::-1],
sorted(
map(
lambda x: x[0]*x[1],
it.combinations_with_replacement(range(100, 1000), 2)
),
reverse=True
)
).next()
print(result)
| 3 | 3 |
pickleCache.py | o19s/lazy-semantic-indexing | 32 | 12758215 | <gh_stars>10-100
import os
import pickle
def fetch(key):
key = key + '.pickle'
if os.path.isfile(key):
return pickle.load(open(key, 'rb'))
raise KeyError("%s not in pickle cache" % key)
def save(key, data):
key = key + '.pickle'
pickled = pickle.dumps(data)
f = open(key, 'wb')
f.write(pickled)
f.close()
| 2.875 | 3 |
lms/services/h_api.py | hypothesis/lms | 38 | 12758216 | """The H API service."""
from h_api.bulk_api import BulkAPI, CommandBuilder
from lms.models import HUser
from lms.services import ExternalRequestError
class HAPIError(ExternalRequestError):
"""
A problem with an h API request.
Raised whenever an h API request times out or when an unsuccessful, invalid
or unexpected response is received from the h API.
"""
class HAPI:
"""
A client for the "h" API.
:raise HAPIError: if a call to the "h" API raises an unhandled exception
"""
def __init__(self, _context, request):
self._request = request
settings = request.registry.settings
self._authority = settings["h_authority"]
self._http_auth = (settings["h_client_id"], settings["h_client_secret"])
self._base_url = settings["h_api_url_private"]
self._http_service = request.find_service(name="http")
def execute_bulk(self, commands):
"""
Send a series of h_api commands to the H bulk API.
:param commands: Instances of h_api Commands
"""
commands = list(commands)
commands = [
CommandBuilder.configure(
effective_user=HUser(username="lms").userid(self._authority),
total_instructions=len(commands) + 1,
)
] + commands
self._api_request(
"POST",
path="bulk",
body=BulkAPI.to_string(commands),
headers={"Content-Type": "application/vnd.hypothesis.v1+x-ndjson"},
)
def get_user(self, username):
"""
Return the h user for the given username.
:rtype: HUser
"""
userid = HUser(username).userid(self._authority)
user_info = self._api_request("GET", path=f"users/{userid}").json()
return HUser(username=username, display_name=user_info["display_name"])
def _api_request(self, method, path, body=None, headers=None):
"""
Send any kind of HTTP request to the h API and return the response.
:param method: the HTTP request method to use, (e.g. "GET")
:param path: the h API path to post to, relative to
`settings["h_api_url_private"]` (e.g. "users")
:param body: the body to send as a string (without modification)
:param headers: extra headers to pass with the request
:raise HAPIError: if the request fails for any other reason
:return: the response from the h API
:rtype: requests.Response
"""
headers = headers or {}
headers["Hypothesis-Application"] = "lms"
request_args = {}
if body is not None:
request_args["data"] = body
try:
response = self._http_service.request(
method=method,
url=self._base_url + path.lstrip("/"),
auth=self._http_auth,
headers=headers,
**request_args,
)
except ExternalRequestError as err:
raise HAPIError("Connecting to Hypothesis failed", err.response) from err
return response
| 2.6875 | 3 |
metatags/templatetags/metatags_tags.py | degenhard/django-metatags | 1 | 12758217 | <gh_stars>1-10
from django import template
from django.contrib.contenttypes.models import ContentType
from django.core.cache import cache
from metatags import app_settings, models
register = template.Library()
@register.inclusion_tag('metatags/meta_tags.html')
def meta(obj=None, meta={}):
"""
Template tag to generate meta tags. Takes an optional parameter of a
template object to pull the tags from. If not passed, it will return the
default meta tags, as set in their respective templates.
"""
cachename = obj and 'metatags_%s_%s' % (
obj.__class__.__name__,
obj.id
) or 'meta_default'
meta_tags = cache.get(cachename)
if not meta_tags:
try:
meta_tags = MetaTag.objects.get(
content_type=ContentType.objects.get_for_model(obj.__class__),
object_id=obj.pk
)
except (AttributeError, IndexError):
meta_tags = None
if app_settings.METATAGS_CACHE_TTL:
cache.set(cachename, meta_tags, app_settings.METATAGS_CACHE_TTL)
return { 'meta_tags': meta } | 2.296875 | 2 |
app.py | fogonwater/simple-color-extract | 0 | 12758218 | import os
from pprint import pprint as pp
import colorgram
from flask import Flask
from flask import render_template
app = Flask(__name__)
def get_filenames(f_dir, prefix='', suffix=''):
"""Get list of filenames within a directory. Optionally scope by prefix/suffix."""
f_names = []
for r,d,files in os.walk(f_dir):
for f in files:
if f.startswith(prefix) and f.endswith(suffix):
f_names.append('{}/{}'.format(r, f))
return f_names
def get_colors(num_cols=6):
imgs = []
for suffix in ['jpg', 'jpeg', 'png', 'gif']:
for fname in get_filenames('static', suffix=suffix):
colors = colorgram.extract(fname, num_cols)
summary = []
for color in colors:
summary.append({
'rgb':tuple(color.rgb),
'hsl':tuple(color.hsl),
'percent':round(color.proportion * 100, 1)
})
imgs.append({
'fname':fname,
'colors':summary
})
return imgs
@app.route("/")
def home():
imgs = get_colors()
return render_template('color.html', imgs=imgs)
@app.route("/api")
def api_home():
imgs = get_colors()
return {'images':imgs}
@app.route("/<int:num_cols>")
def number_colors(num_cols):
imgs = get_colors(num_cols)
return render_template('color.html', imgs=imgs)
| 2.890625 | 3 |
core/models.py | Alpeng01d/Qodex | 0 | 12758219 | <reponame>Alpeng01d/Qodex<gh_stars>0
#PACKAGE IMPORTS
from django.db import models
#LOCAL IMPORTS
class Qodex_users(models.Model):
""" """
#RELATED FIELDS
#qodex_org = models.ForeignKey('Qodex_org', db_column='qodex_org', on_delete=models.SET_NULL, blank=True, null=True)
#role = models.ForeignKey('User_roles', db_column='user_roles', on_delete=models.SET_NULL, blank=True, null=True)
#region = models.ForeignKey('Regions', db_column='regions', on_delete=models.SET_NULL, blank=True, null=True)
#LOCAL FIELDS
name = models.TextField(blank=True, null=True)
password = models.CharField(max_length=256, blank=True, null=True)
refresh_token = models.TextField(blank=True, null=True)
last_ip = models.CharField(max_length=15, blank=True, null=True)
address = models.CharField(max_length=120, blank=True, null=True)
weight_fault = models.IntegerField(blank=True, null=True)
connected = models.BooleanField(blank=True, null=True)
inn = models.CharField(max_length=20, blank=True, null=True)
kpp = models.CharField(max_length=20, blank=True, null=True)
active = models.BooleanField(blank=True, null=True)
def __str__(self):
""" Django-admin UI """
return '%s-%s' % (self.name, self.inn)
class Meta:
""" Model configurations """
managed = True
db_table = 'Qodex_user'
class Weight_types(models.Model):
""" Weight_types - model of weight_types databases """
def __str__(self):
return self.name
#LOCAL FIELDS
name = models.TextField(blank=True, null=True)
class Meta:
""" Model configurations """
managed = True
db_table = 'Weight_types'
class Operators(models.Model):
""" Operators - model of operators databases """
def __str__(self):
return self.username
#LOCAL FIELDS
username = models.TextField(blank=True, null=True)
password = models.TextField(blank=True, null=True)
role = models.CharField(max_length=30, blank=True, null=True)
fullname = models.CharField(max_length=30, blank=True, null=True)
ex_id = models.IntegerField(blank=True, null=True)
upd_time = models.DateTimeField(blank=True, null=True)
ar_get = models.DateTimeField(blank=True, null=True)
active = models.BooleanField(blank=True, null=True)
#RELATED FIELDS
poligon = models.ForeignKey('Qodex_Users', db_column='poligon', on_delete=models.SET_NULL, blank=True, null=True)
class Meta:
""" Model configurations """
managed = True
db_table = 'Operators'
class Qodex_achive(models.Model):
""" Qodex_achive - model of qodex_achive databases """
def __str__(self):
return self.user
#LOCAL FIELDS
date = models.DateTimeField(blank=True, null=True)
user = models.CharField(max_length=20, blank=True, null=True)
origin = models.TextField(blank=True, null=True)
new_data = models.TextField(blank=True, null=True)
restored = models.BooleanField(blank=True, null=True)
restored_date = models.DateTimeField(blank=True, null=True)
#RELATED FIELDS
#table = models.ForeignKey('Qodex_Users', db_column='table', on_delete=models.SET_NULL, blank=True, null=True)
poligon = models.ForeignKey('Qodex_users', db_column='poligon', on_delete=models.SET_NULL, blank=True, null=True)
class Meta:
""" Model configurations """
managed = True
db_table = 'Qodex_achive'
class Cm_events_log(models.Model):
""" Cm_events_log - model of m_events_log databases """
def __str__(self):
return self.operator
#LOCAL FIELDS
datetime = models.DateTimeField(blank=True, null=True)
event = models.IntegerField(blank=True, null=True)
operator = models.CharField(max_length=12, blank=True, null=True)
ex_id = models.IntegerField(blank=True, null=True)
upd_time = models.DateTimeField(blank=True, null=True)
#RELATED FIELDS
poligon = models.ForeignKey('Qodex_users', db_column='poligon', on_delete=models.SET_NULL, blank=True, null=True)
class Meta:
""" Model configurations """
managed = True
db_table = 'Cm_events_log'
| 2.109375 | 2 |
made/commands/inputs_grp/cmd.py | GuerrillaAnalytics/made-cli | 1 | 12758220 | <reponame>GuerrillaAnalytics/made-cli<filename>made/commands/inputs_grp/cmd.py
import os
import logging
import click
from made.commands.inputs_grp.input_functions import validate_input_version
from made.controllers.config import Config
from made.controllers.inputs import input_manager_factory
from made.commands.project_grp import project_functions
from made.controllers.inputs import inputs_functions
from made import utils
@click.group()
@click.pass_context
def input(ctx):
pass
@input.command('create')
# @click.option('--alert', '-a', default=True)
# @click.argument('name')
# @click.argument('url')
@click.pass_obj
def input_create(ctx):
while True:
user_source_id = click.prompt('Please enter an input ID', type=str)
if not validate_input_id(user_source_id):
logging.getLogger('my logger').debug(
"Input ID has invalid format " + user_source_id)
continue
break
# source name
while True:
user_source_name = click.prompt('Please enter a schema name', type=str)
if len(user_source_name.strip()) == 0:
continue
if " " in user_source_name.strip():
continue
break
config = Config(os.getcwd())
input_manager = input_manager_factory.InputManagerFactory.create(config)
input_manager.create_new_source(user_source_id, user_source_name)
pass
@input.command('audit')
@click.pass_obj
def input_audit(ctx):
audit_result = inputs_functions.input_audit_path(os.getcwd())
utils.pretty_print(audit_result)
pass
| 2.421875 | 2 |
libs/markdown/extensions/extra.py | bbondy/brianbondy.gae | 0 | 12758221 | #!/usr/bin/env python
"""
Python-Markdown Extra Extension
===============================
A compilation of various Python-Markdown extensions that imitates
[PHP Markdown Extra](http://michelf.com/projects/php-markdown/extra/).
Note that each of the individual extensions still need to be available
on your PYTHONPATH. This extension simply wraps them all up as a
convenience so that only one extension needs to be listed when
initiating Markdown. See the documentation for each individual
extension for specifics about that extension.
In the event that one or more of the supported extensions are not
available for import, Markdown will issue a warning and simply continue
without that extension.
There may be additional extensions that are distributed with
Python-Markdown that are not included here in Extra. Those extensions
are not part of PHP Markdown Extra, and therefore, not part of
Python-Markdown Extra. If you really would like Extra to include
additional extensions, we suggest creating your own clone of Extra
under a differant name. You could also edit the `extensions` global
variable defined below, but be aware that such changes may be lost
when you upgrade to any future version of Python-Markdown.
"""
import markdown
extensions = ['fenced_code',
'footnotes',
'headerid',
'def_list',
'tables',
'abbr',
]
class ExtraExtension(markdown.Extension):
""" Add various extensions to Markdown class."""
def extendMarkdown(self, md, md_globals):
""" Register extension instances. """
md.registerExtensions(extensions, self.config)
def makeExtension(configs={}):
return ExtraExtension(configs=dict(configs))
| 2.0625 | 2 |
jupiter/domain/projects/project.py | horia141/jupiter | 15 | 12758222 | """The project."""
from dataclasses import dataclass
from jupiter.domain.entity_name import EntityName
from jupiter.domain.projects.project_key import ProjectKey
from jupiter.framework.aggregate_root import AggregateRoot
from jupiter.framework.base.entity_id import BAD_REF_ID
from jupiter.framework.base.timestamp import Timestamp
@dataclass()
class Project(AggregateRoot):
"""The project."""
@dataclass(frozen=True)
class Created(AggregateRoot.Created):
"""Created event."""
@dataclass(frozen=True)
class Updated(AggregateRoot.Updated):
"""Updated event."""
_key: ProjectKey
_name: EntityName
@staticmethod
def new_project(key: ProjectKey, name: EntityName, created_time: Timestamp) -> 'Project':
"""Create a project."""
project = Project(
_ref_id=BAD_REF_ID,
_archived=False,
_created_time=created_time,
_archived_time=None,
_last_modified_time=created_time,
_events=[],
_key=key,
_name=name)
project.record_event(Project.Created.make_event_from_frame_args(created_time))
return project
def change_name(self, name: EntityName, modification_time: Timestamp) -> 'Project':
"""Change the name of the workspace."""
self._name = name
self.record_event(
Project.Updated.make_event_from_frame_args(modification_time))
return self
@property
def key(self) -> ProjectKey:
"""The key of the project."""
return self._key
@property
def name(self) -> EntityName:
"""The name of the project."""
return self._name
| 2.3125 | 2 |
ettus/rfnoc/mult.py | benreynwar/rfgnocchi | 4 | 12758223 | import os
import logging
import testfixtures
from pyvivado import interface, signal, builder
from rfgnocchi import config, ettus
logger = logging.getLogger(__name__)
def get_mult_interface(params):
module_name = 'Mult'
width_A = params['width_A']
width_B = params['width_B']
width_P = params['width_P']
drop_top_P = params['drop_top_P']
latency = params['latency']
cascade_out = params['cascade_out']
builder = ettus.get_builder('mult')
packages = []
module_parameters = {
'WIDTH_A': width_A,
'WIDTH_B': width_B,
'WIDTH_P': width_P,
'DROP_TOP_P': drop_top_P,
'LATENCY': latency,
'CASCADE_OUT': cascade_out,
}
wires_in = (
('reset', signal.std_logic_type),
('a_tdata', signal.StdLogicVector(width=width_A)),
('a_tlast', signal.std_logic_type),
('a_tvalid', signal.std_logic_type),
('b_tdata', signal.StdLogicVector(width=width_B)),
('b_tlast', signal.std_logic_type),
('b_tvalid', signal.std_logic_type),
('p_tready', signal.std_logic_type),
)
wires_out = (
('a_tready', signal.std_logic_type),
('b_tready', signal.std_logic_type),
('p_tdata', signal.StdLogicVector(width=width_P)),
('p_tlast', signal.std_logic_type),
('p_tvalid', signal.std_logic_type),
)
iface = interface.Interface(
wires_in, wires_out, module_name=module_name,
parameters=params, module_parameters=module_parameters,
packages=packages, builder=builder, clock_names=['clk'])
return iface
assert('Mult' not in interface.module_register)
interface.module_register['Mult'] = get_mult_interface
| 1.929688 | 2 |
examples/mnist_simple/mnist.py | raviqqe/qnd.tf | 69 | 12758224 | ../lib/mnist.py | 1.0625 | 1 |
q17.py | assuncaofelipe/Python-Exercises | 0 | 12758225 | # criar uma matriz identidade de tamanho NxN, onde N é informado pelo usuário.
# Criar uma segunda matriz que é o dobro da primeira.
# 1 0 0
# 0 1 0
# 0 0 1
def criaMatriz (n, m):
mat = [0]*n
for i in range(m):
mat[i] = [0] * n
return mat
def criaMatrizEye(n):
mat = criaMatriz(n,n)
for i in range(n):
mat[i][i] = 1
return mat
def mostraMatriz(mat, n):
for i in range(n):
print(mat[i][:])
print ('\n')
n = int(input('Tamanho matriz: '))
mat1 = criaMatrizEye(n)
mat2 = criaMatriz(n,n)
# chama as funções e imprime
mostraMatriz(mat1, n)
mostraMatriz(mat2, n)
# multiplica a matriz 1
for i in range(n):
for j in range(n):
mat2[i][j] = mat1[i][j] * 2
mostraMatriz(mat2, n) | 4.0625 | 4 |
database/database.py | ethanlindley/lego.py | 4 | 12758226 | <gh_stars>1-10
import os
from mongoengine import *
from .account import Account
class Database(object):
def __init__(self, salt):
if not os.path.exists('./resources/data'):
os.makedirs('./resources/data/db')
connect('lego-py', host='localhost', port=27017) # TODO - setup config for server/db specific constants
self.accounts = []
self.get_accounts()
self.salt = salt
def get_accounts(self):
# load the database and store account objects in a list
# that can be dynamically read across server instancess
for account in Account.objects:
self.accounts.append(account)
def register_account_db(self, username='', password='', banned=False, is_admin=False):
account = Account(
account_id=Account.objects.count() + 1,
username=username,
password=password, # TODO - fix hashing of passwords in db
banned=banned,
is_admin=is_admin
)
account.save()
self.accounts.append(account)
| 3.03125 | 3 |
apps/accounts/views/core.py | whytheplatypus/sharemyhealth | 0 | 12758227 | <reponame>whytheplatypus/sharemyhealth
import logging
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from ..forms import AccountSettingsForm
logger = logging.getLogger('sharemyhealth_.%s' % __name__)
@login_required
def account_settings(request):
name = _('Account Settings')
groups = request.user.groups.values_list('name', flat=True)
for g in groups:
messages.info(request, _('You are in the group: %s' % (g)))
if request.method == 'POST':
form = AccountSettingsForm(request.POST)
if form.is_valid():
data = form.cleaned_data
# update the user info
request.user.username = data['username']
request.user.email = data['email']
request.user.first_name = data['first_name']
request.user.last_name = data['last_name']
request.user.save()
messages.success(request,
'Your account settings have been updated.')
return render(request,
'account-settings.html',
{'form': form, 'name': name})
else:
# the form had errors
return render(request,
'account-settings.html',
{'form': form, 'name': name})
# this is an HTTP GET
form = AccountSettingsForm(
initial={
'username': request.user.username,
'email': request.user.email,
'last_name': request.user.last_name,
'first_name': request.user.first_name,
}
)
return render(request,
'account-settings.html',
{'name': name, 'form': form})
| 2.265625 | 2 |
linear_attention_transformer/__init__.py | lucidrains/linear-attention | 361 | 12758228 | from linear_attention_transformer.linear_attention_transformer import LinearAttentionTransformer, LinearAttentionTransformerLM, LinformerSettings, LinformerContextSettings
from linear_attention_transformer.autoregressive_wrapper import AutoregressiveWrapper
from linear_attention_transformer.images import ImageLinearAttention
| 1.148438 | 1 |
Erik-solutions/day10/day.py | NTI-Gymnasieingenjor/AdventOfCode2020 | 1 | 12758229 | import numpy
import math
inputDoc = open("input.txt")
docLines = sorted([int(i) for i in inputDoc.read().split("\n")])
inputDoc.close()
# PART 1
print("\nPART 1")
myJoltage = 0
adapterDiff = 3
myAdapter = max(docLines) + adapterDiff
listOfJolts = {1: [], 3: []}
for index, joltA in enumerate(docLines):
if index + 1 < len(docLines):
listOfJolts[docLines[index + 1] - joltA].append(index)
print(numpy.prod([len(listOfJolts[i]) + 1 for i in listOfJolts])) # 1890
# PART 2
print("\nPART 2")
arrangements = set({})
# while count <= math.pow(len(docLines), 2):
# order = tuple()
count = 0
while count <= math.pow(len(docLines), 2):
order = []
for index, item in enumerate(docLines):
if index + 1 >= len(docLines):
if item + 3 == myAdapter:
arrangements.add(tuple(order))
break
if 0 < docLines[index + 1] - item < adapterDiff + 1:
order.append(item)
count += 1
itertools.combinations()
print(arrangements)
| 2.953125 | 3 |
tests/advanced.py | seece/cbpp | 0 | 12758230 | <filename>tests/advanced.py
import sys
import inspect
from tests.testcase import *
from processor import *
class basicIfdef(Test):
def setup(self):
self.name = "test normal #ifdef feature"
self.sourcefile = "tests/cb/ifdef.cb"
class negationIfdef(Test):
def setup(self):
self.name = "test #ifndef feature"
self.sourcefile = "tests/cb/ifndef.cb"
class undef(Test):
def setup(self):
self.name = "test #undef feature"
self.sourcefile = "tests/cb/undef.cb"
class basicConcatenation(Test):
def setup(self):
self.sourcefile = "tests/cb/concat.cb"
class lineMacro(Test):
def setup(self):
self.sourcefile = "tests/cb/line.cb"
class fileMacro(Test):
def setup(self):
self.sourcefile = "tests/cb/filemacro.cb"
class macroOverlap(Test):
def setup(self):
self.sourcefile = "tests/cb/substitution_overlap.cb"
class onlyMacroOnLine(Test):
def setup(self):
self.sourcefile = "tests/cb/linestart.cb"
class parensInArguments(Test):
def setup(self):
self.sourcefile = "tests/cb/parens.cb"
class dumbDebug(Test):
def setup(self):
self.sourcefile = "tests/cb/pragma.cb"
class recursion(Test):
def setup(self):
self.sourcefile = "tests/cb/recursion.cb"
"""
class namespaceBasic(Test):
def setup(self):
self.sourcefile = "tests/cb/namespace_basic.cb"
class namespaceNested(Test):
def setup(self):
self.sourcefile = "tests/cb/namespace_nested.cb"
"""
class culling(Test):
def setup(self):
self.sourcefile = "tests/cb/cull.cb"
def getDefaultArgs(self):
args = super().getDefaultArgs()
args.cull = True
return args
class minify(Test):
def setup(self):
self.sourcefile = "tests/cb/minify.cb"
def getDefaultArgs(self):
args = super().getDefaultArgs()
args.minify= True
return args
class minifyWithCulling(Test):
def setup(self):
self.sourcefile = "tests/cb/minify_and_cull.cb"
def getDefaultArgs(self):
args = super().getDefaultArgs()
args.cull = True
args.minify= True
return args
class stringify(Test):
def setup(self):
self.name = "stringify macro parameters"
self.sourcefile = "tests/cb/stringify.cb"
class multilineParse(Test):
def setup(self):
self.name = "multiline macro parsing"
self.sourcefile = "tests/decorate/multiline_parse.txt"
class multilineExpand(Test):
def setup(self):
self.name = "basic multiline macro expansion"
self.sourcefile = "tests/decorate/multiline_expand_basic.txt"
class multilineExpandNoParams(Test):
def setup(self):
self.name = "multiline macro with no parameters"
self.sourcefile = "tests/decorate/multiline_expand_noparams.txt"
class multilineExpandMultiple(Test):
def setup(self):
self.name = "multiple multiline macros"
self.sourcefile = "tests/decorate/multiline_expand_multiple.txt"
class multilineExpandLambda(Test):
def setup(self):
self.name = "basic multiline lambda expansion"
self.sourcefile = "tests/decorate/multiline_expand_lambda.txt"
testlist = []
for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass):
if obj.__module__ == __name__:
testlist.append(obj)
| 2.46875 | 2 |
noxfile.py | gitonthescene/csv-reconcile | 27 | 12758231 | from nox_poetry import session, SDIST
args = lambda s: s.split()
@session(python=['3.7', '3.8', '3.9'])
def test_main(session):
session.poetry.installroot(distribution_format=SDIST)
session.install('pytest')
session.run(*args('pytest -v tests/main'))
@session(python=['3.7', '3.8', '3.9'])
def test_geo(session):
session.poetry.installroot(distribution_format=SDIST)
session.install('csv-reconcile-geo')
session.install('pytest')
session.run(*args('pytest -v tests/plugins/geo'))
| 1.59375 | 2 |
nineml/abstraction/componentclass/visitors/__init__.py | INCF/nineml-python | 6 | 12758232 | <gh_stars>1-10
from .queriers import ComponentClassInterfaceInferer
| 1.03125 | 1 |
myenv/lib/python3.7/site-packages/autocorrect/constants.py | theCydonian/AudioEyes | 3 | 12758233 | <gh_stars>1-10
# -*- coding: utf-8 -*-
word_regexes = {
'en': r'[A-Za-z]+',
'pl': r'[A-Za-zęóąśłżźćń]+',
'ru': r'[АаБбВвГгДдЕеЁёЖжЗзИиЙйКкЛлМмНнОоПпРрСсТтУуФфХхЦцЧчШшЩщЪъЫыЬьЭэЮюЯя]+',
'uk': r'[АаБбВвГ㥴ДдЕеЄєЖжЗзИиІіЇїЙйКкЛлМмНнОоПпРрСсТтУуФфХхЦцЧчШшЩщЬЮюЯя]+',
'tr': r'[a-zA-ZçÇğĞüÜöÖşŞıİ]+',
}
alphabets = {
'en': 'abcdefghijklmnopqrstuvwxyz',
'pl': 'abcdefghijklmnopqrstuvwxyzęóąśłżźćń',
'ru': 'шиюынжсяплзухтвкйеобмцьёгдщэарчфъ',
'uk': 'фагксщроємшплуьцнжхїйювязтибґідеч',
'tr': 'abcçdefgğhıijklmnoöprsştuüvyzqwxÇĞİÜÖ',
}
| 1.84375 | 2 |
pub_cam.py | ssanatos/turtlebot3_motion_driving | 4 | 12758234 | <gh_stars>1-10
#!/usr/bin/env python
import rclpy
from rclpy.node import Node
from sensor_msgs.msg import CompressedImage
import cv2 as cv
from cv_bridge import CvBridge
import numpy as np
class BotCamPublisher(Node):
def __init__(self):
super().__init__('bot_cam_publisher')
self.publisher_ = self.create_publisher(CompressedImage, 'botcam', 10)
def img_publisher(self,cv_img):
self.publisher_.publish(cv_img)
def main(args=None):
rclpy.init(args=args)
pub = BotCamPublisher()
bridge = CvBridge()
cap = cv.VideoCapture(0)
while cap.isOpened():
success, img = cap.read()
if not success:
print("Ignoring empty camera frame.")
continue
cv_img = bridge.cv2_to_compressed_imgmsg(img)
pub.img_publisher(cv_img)
cap.release()
if __name__ == '__main__':
main() | 2.71875 | 3 |
analysis/lib/online.py | jonhue/bachelors-thesis | 0 | 12758235 | from typing import List, Tuple
from soco.data_center.online import stop
from soco.data_center.online.uni_dimensional import (
lazy_capacity_provisioning,
memoryless,
probabilistic,
randomized,
randomly_biased_greedy,
)
from soco.data_center.online.multi_dimensional import (
horizon_control,
lazy_budgeting,
online_balanced_descent,
online_gradient_descent,
)
from soco.data_center.model import DataCenterModel
from tqdm import tqdm
ADDR = "127.0.0.1:3449"
def evaluate(
alg,
online_inp: List[List[List[List[int]]]],
fractional,
integral,
m,
initial_runtime,
) -> Tuple[float, float]:
initial_cost = fractional[1][0]
initial_int_cost = integral[1][0]
cost = initial_cost
int_cost = initial_int_cost
energy_cost = integral[1][1].energy_cost if integral[1][1] is not None else 0
revenue_loss = integral[1][1].revenue_loss if integral[1][1] is not None else 0
assert int_cost >= energy_cost + revenue_loss
runtimes = []
initial_xs = integral[0]
xs = initial_xs.copy()
ms = []
for i in tqdm(range(len(online_inp))):
fractional, integral, m, runtime = alg.next(ADDR, online_inp[i])
cost = fractional[1][0]
int_cost = integral[1][0]
if integral[1][1] is not None:
energy_cost = integral[1][1].energy_cost
revenue_loss = integral[1][1].revenue_loss
assert int_cost >= energy_cost + revenue_loss
xs.append(integral[0])
ms.append(m)
runtimes.append(runtime)
stop(ADDR)
switching_cost = int_cost - energy_cost - revenue_loss
assert len(xs) - len(initial_xs) == len(online_inp)
print(f"Resulting schedule: {xs}")
return (
initial_cost,
cost,
initial_int_cost,
int_cost,
energy_cost,
revenue_loss,
switching_cost,
initial_runtime,
runtimes,
ms,
)
def evaluate_fractional_lazy_capacity_provisioning(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = lazy_capacity_provisioning.fractional.start(ADDR, model, offline_inp, w)
return evaluate(
lazy_capacity_provisioning.fractional,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_integral_lazy_capacity_provisioning(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = lazy_capacity_provisioning.integral.start(ADDR, model, offline_inp, w)
return evaluate(
lazy_capacity_provisioning.integral,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_memoryless(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
fractional, integral, m, initial_runtime = memoryless.start(
ADDR, model, offline_inp, 0
)
return evaluate(memoryless, online_inp, fractional, integral, m, initial_runtime)
def evaluate_probabilistic(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
options = probabilistic.Options(probabilistic.Breakpoints([]))
fractional, integral, m, initial_runtime = probabilistic.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(probabilistic, online_inp, fractional, integral, m, initial_runtime)
def evaluate_randomized_probabilistic(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
fractional, integral, m, initial_runtime = randomized.probabilistic.start(
ADDR, model, offline_inp, 0
)
return evaluate(
randomized.probabilistic, online_inp, fractional, integral, m, initial_runtime
)
def evaluate_randomized_randomly_biased_greedy(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
fractional, integral, m, initial_runtime = randomized.randomly_biased_greedy.start(
ADDR, model, offline_inp, 0
)
return evaluate(
randomized.randomly_biased_greedy,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_randomly_biased_greedy(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
theta: float = 1,
) -> Tuple[float, float]:
options = randomly_biased_greedy.Options(theta)
fractional, integral, m, initial_runtime = randomly_biased_greedy.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(
randomly_biased_greedy, online_inp, fractional, integral, m, initial_runtime
)
def evaluate_lazy_budgeting_slo(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
randomized: bool = False,
) -> Tuple[float, float]:
options = lazy_budgeting.smoothed_load_optimization.Options(randomized)
(
fractional,
integral,
m,
initial_runtime,
) = lazy_budgeting.smoothed_load_optimization.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(
lazy_budgeting.smoothed_load_optimization,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_lazy_budgeting_sblo(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
epsilon: float = 0.25,
) -> Tuple[float, float]:
options = lazy_budgeting.smoothed_balanced_load_optimization.Options(epsilon)
(
fractional,
integral,
m,
initial_runtime,
) = lazy_budgeting.smoothed_balanced_load_optimization.start(
ADDR, model, offline_inp, 0, options
)
return evaluate(
lazy_budgeting.smoothed_balanced_load_optimization,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_pobd(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
beta: float = 0.5,
) -> Tuple[float, float]:
options = online_balanced_descent.primal.Options.euclidean_squared(beta)
(
fractional,
integral,
m,
initial_runtime,
) = online_balanced_descent.primal.start(ADDR, model, offline_inp, 0, options)
return evaluate(
online_balanced_descent.primal,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_dobd(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
eta: float = 1,
) -> Tuple[float, float]:
options = online_balanced_descent.dual.Options.euclidean_squared(eta)
(
fractional,
integral,
m,
initial_runtime,
) = online_balanced_descent.dual.start(ADDR, model, offline_inp, 0, options)
return evaluate(
online_balanced_descent.dual,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_ogd(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
) -> Tuple[float, float]:
options = online_gradient_descent.Options.sqrt()
(
fractional,
integral,
m,
initial_runtime,
) = online_gradient_descent.start(ADDR, model, offline_inp, 0, options)
return evaluate(
online_gradient_descent,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_receding_horizon_control(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = horizon_control.receding_horizon_control.start(ADDR, model, offline_inp, w)
return evaluate(
horizon_control.receding_horizon_control,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
def evaluate_averaging_fixed_horizon_control(
model: DataCenterModel,
offline_inp: List[List[int]],
online_inp: List[List[List[List[int]]]],
w: int = 0,
) -> Tuple[float, float]:
(
fractional,
integral,
m,
initial_runtime,
) = horizon_control.averaging_fixed_horizon_control.start(
ADDR, model, offline_inp, w
)
return evaluate(
horizon_control.averaging_fixed_horizon_control,
online_inp,
fractional,
integral,
m,
initial_runtime,
)
| 2.109375 | 2 |
scripts/stream_skycam.py | kpant14/multipath_sim | 2 | 12758236 | #!/usr/bin/env python3
from cv_bridge import CvBridge, CvBridgeError
import cv2
import rospy
from sensor_msgs.msg import Image
from gazebo_msgs.srv import GetModelState
from tf.transformations import euler_from_quaternion
import numpy as np
import json
import os
data_dir = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir)) + "/data/"
def main():
rospy.init_node("satview_streamer")
bridge = CvBridge()
sv_filename = data_dir + "hk_data_sv_mean.json"
sv_data = {}
with open(sv_filename, "rb") as fstream:
sv_data = json.load(fstream)
print(sv_data.keys())
cno_max = np.max([sv_data[key]["cno_max"] for key in sv_data.keys()])
gms = rospy.ServiceProxy("/gazebo/get_model_state", GetModelState)
img_pub = rospy.Publisher("skycam/satview", Image, queue_size=1)
start_time = rospy.get_rostime()
cb_args = [bridge, sv_data, gms, img_pub, start_time, cno_max]
img_sub = rospy.Subscriber("skycam/image_raw", Image, img_sub_cb, cb_args)
def img_sub_cb(data, cb_args):
bridge = cb_args[0]
sv_data = cb_args[1]
gms = cb_args[2]
img_pub = cb_args[3]
start_time = cb_args[4]
cno_max = cb_args[5]
model_pose = gms("laser_0", "world")
model_euler = euler_from_quaternion(
[model_pose.pose.orientation.x,
model_pose.pose.orientation.y,
model_pose.pose.orientation.z,
model_pose.pose.orientation.w,]
)
# ENU (gzb) to NED
heading = np.pi/2 - model_euler[2]
cv_img = bridge.imgmsg_to_cv2(data, "bgr8")
cv_img = np.array(np.flip(cv_img, axis=0))
img_height = cv_img.shape[0]
img_width = cv_img.shape[1]
img_center = np.array([img_height/2.0, img_width/2.0]) # [250 250]
r_max = np.min(img_center)
green = (0, 255, 0)
red = (0, 0, 255)
blue = (255, 0, 0)
now = rospy.get_rostime()
elapsed = (now-start_time).to_sec()
for sv_id in sv_data.keys():
elev = sv_data[sv_id]["mean"][0]
azim = sv_data[sv_id]["mean"][1]
index = int(elapsed*10 % len(sv_data[sv_id]["cno"]))
cno = sv_data[sv_id]["cno"][index]
# print(sv_id+" cno: ", cno)
# print(sv_id+" color: ", int((cno)/cno_max*255), int((cno_max - cno)/cno_max*255))
r = (90.0 - elev)/90.0 * r_max
theta = np.deg2rad(azim) - np.pi/2 - heading
x = int(r*np.cos(theta) + img_center[0])
y = int(r*np.sin(theta) + img_center[1])
cv2.circle(cv_img, (x, y), 10, (0, int((cno)/cno_max*255), int((cno_max-cno)/cno_max*255)/2), -1)
cv2.circle(cv_img, (x, y), 11, (0, 0, 255), 2)
cv2.putText(cv_img, sv_id, (x-10, y-15), cv2.FONT_HERSHEY_SIMPLEX, 0.3, green, 1)
nesw = ["N", "E", "S", "W"]
for i in range(4):
theta = i*np.pi/2 - np.pi/2 - heading
r = 235
x = int(r*np.cos(theta) + img_center[0])
y = int(r*np.sin(theta) + img_center[1])
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(cv_img, nesw[i], (x,y), font, 0.5, green, 2)
ros_img = bridge.cv2_to_imgmsg(cv_img, "bgr8")
img_pub.publish(ros_img)
# cv2.imshow("skycam", cv_img)
# k = cv2.waitKey(3) & 0xff
if __name__ == "__main__":
main()
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down...")
cv2.destroyAllWindows()
| 2.359375 | 2 |
tests/test_array_fields.py | Chocorean/infi.clickhouse_orm | 0 | 12758237 | from __future__ import unicode_literals
import unittest
from datetime import date
from infi.clickhouse_orm.database import Database
from infi.clickhouse_orm.models import Model
from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.engines import *
class ArrayFieldsTest(unittest.TestCase):
def setUp(self):
self.database = Database('test-db')
self.database.create_table(ModelWithArrays)
def tearDown(self):
self.database.drop_database()
def test_insert_and_select(self):
instance = ModelWithArrays(
date_field='2016-08-30',
arr_str=['goodbye,', 'cruel', 'world', 'special chars: ,"\\\'` \n\t\\[]'],
arr_date=['2010-01-01'],
)
self.database.insert([instance])
query = 'SELECT * from $db.modelwitharrays ORDER BY date_field'
for model_cls in (ModelWithArrays, None):
results = list(self.database.select(query, model_cls))
self.assertEqual(len(results), 1)
self.assertEqual(results[0].arr_str, instance.arr_str)
self.assertEqual(results[0].arr_int, instance.arr_int)
self.assertEqual(results[0].arr_date, instance.arr_date)
def test_conversion(self):
instance = ModelWithArrays(
arr_int=('1', '2', '3'),
arr_date=['2010-01-01']
)
self.assertEqual(instance.arr_str, [])
self.assertEqual(instance.arr_int, [1, 2, 3])
self.assertEqual(instance.arr_date, [date(2010, 1, 1)])
def test_assignment_error(self):
instance = ModelWithArrays()
for value in (7, 'x', [date.today()], ['aaa'], [None]):
with self.assertRaises(ValueError):
instance.arr_int = value
def test_parse_array(self):
from infi.clickhouse_orm.utils import parse_array, unescape
self.assertEqual(parse_array("[]"), [])
self.assertEqual(parse_array("[1, 2, 395, -44]"), ["1", "2", "395", "-44"])
self.assertEqual(parse_array("['big','mouse','','!']"), ["big", "mouse", "", "!"])
self.assertEqual(parse_array(unescape("['\\r\\n\\0\\t\\b']")), ["\r\n\0\t\b"])
for s in ("",
"[",
"]",
"[1, 2",
"3, 4]",
"['aaa', 'aaa]"):
with self.assertRaises(ValueError):
parse_array(s)
def test_invalid_inner_field(self):
for x in (DateField, None, "", ArrayField(Int32Field())):
with self.assertRaises(AssertionError):
ArrayField(x)
class ModelWithArrays(Model):
date_field = DateField()
arr_str = ArrayField(StringField())
arr_int = ArrayField(Int32Field())
arr_date = ArrayField(DateField())
engine = MergeTree('date_field', ('date_field',))
| 2.53125 | 3 |
controllers/ZombieController.py | Ctalk3r/NotTowerDefence | 0 | 12758238 | import Utils
from config.Resources import UnitTypes
from controllers.Controller import Controller
class ZombieController(Controller):
def __init__(self):
self.good_points = []
def do(self, unit, all_units):
target = list(all_units)[0]
#target = self.find_nearest_obj(unit, all_units, {UnitTypes.core})
better_target = target.pos
priority = unit.hp - 6
for point in self.good_points:
if Utils.classical_dist(better_target, target.pos) > Utils.classical_dist(point[0], target.pos) \
or (Utils.classical_dist(better_target, target.pos) > Utils.classical_dist(point[0], target.pos) - 40 and point[1] > priority):
better_target = point[0]
priority = point[1]
if target is None:
exit(0)
unit.reload()
if unit.move(better_target) is False:
obj = self.find_nearest_obj(unit, all_units)
if obj.unit_type is UnitTypes.wall:
target = obj
if target:
unit.attack(target)
self.good_points.append((unit.pos, unit.hp))
if len(self.good_points) > 1000:
self.good_points.clear()
| 2.8125 | 3 |
Example/ProjectionMappingDemo.py | davidsoncolin/IMS | 0 | 12758239 | <gh_stars>0
#!/usr/bin/env python
import ISCV
import numpy as np
from IO import ViconReader, MovieReader, MAReader, IO, C3D, ASFReader
from UI import QGLViewer, QApp # gui
from GCore import Label, Recon, Retarget, SolveIK, Calibrate, Character, Opengl
from UI.GLMeshes import GLMeshes
#from UI.GLSkel import GLSkel
#from UI.GLSkeleton import GLSkeleton
def intersectRaysCB(fi):
global x2d_frames,mats,Ps,c3d_frames,view,primitives,primitives2D,track3d,prev_frame,track_orn,orn_graph,boot,g_all_skels,md,orn_mapper,mar_mapper
skipping = prev_frame is None or np.abs(fi - prev_frame) > 10
prev_frame = fi
view = QApp.view()
points,altpoints = primitives
g2d = primitives2D[0]
frame = x2d_frames[fi]
x2ds_data,x2ds_splits = ViconReader.frameCentroidsToDets(frame,mats)
g2d.setData(x2ds_data,x2ds_splits)
if skipping:
x3ds,x3ds_labels = track3d.boot(x2ds_data, x2ds_splits)
#trackGraph = Label.TrackGraph()
boot = -10
else:
x3ds,x3ds_labels = track3d.push(x2ds_data, x2ds_splits)
if False:
boot = boot+1
if boot == 0:
x2d_threshold_hash = 0.01
penalty = 10.0 # the penalty for unlabelled points. this number should be about 10. to force more complete labellings, set it higher.
maxHyps = 500 # the number of hypotheses to maintain.
print "booting:"
numLabels = len(orn_graph[0])
l2x = -np.ones(numLabels, dtype=np.int32)
label_score = ISCV.label_from_graph(x3ds, orn_graph[0], orn_graph[1], orn_graph[2], orn_graph[3], maxHyps, penalty, l2x)
clouds = ISCV.HashCloud2DList(x2ds_data, x2ds_splits, x2d_threshold_hash)
which = np.array(np.where(l2x != -1)[0],dtype=np.int32)
pras_score, x2d_labels, vels = Label.project_assign(clouds, x3ds[l2x[which]], which, Ps, x2d_threshold=x2d_threshold_hash)
print fi, label_score, pras_score
labelled_x3ds = x3ds[l2x[which]]
print track_orn.bootPose(x2ds_data, x2ds_splits, x2d_labels)
if boot > 0:
track_orn.push(x2ds_data, x2ds_splits, its=4)
#x3ds,x2ds_labels = Recon.intersect_rays(x2ds_data, x2ds_splits, Ps, mats, seed_x3ds = None)
points.setData(x3ds)
if c3d_frames != None:
c3ds = c3d_frames[(fi-832)/2]
true_labels = np.array(np.where(c3ds[:,3]==0)[0],dtype=np.int32)
x3ds_true = c3ds[true_labels,:3]
altpoints.setData(x3ds_true)
ci = view.cameraIndex()-1
if True: #ci == -1:
MovieReader.readFrame(md, seekFrame=max((fi - 14)/4,0))
QApp.app.refreshImageData()
(orn_skel_dict, orn_t) = g_all_skels['orn']
orn_mesh_dict, orn_skel_mesh, orn_geom_mesh = orn_t
orn_anim_dict = orn_skel_dict['anim_dict']
orn_skel_dict['chanValues'][:] = orn_anim_dict['dofData'][fi]
Character.updatePoseAndMeshes(orn_skel_dict, orn_skel_mesh, orn_geom_mesh)
(mar_skel_dict, mar_t) = g_all_skels['mar']
mar_anim_dict = mar_skel_dict['anim_dict']
mar_mesh_dict, mar_skel_mesh, mar_geom_mesh = mar_t
Character.updatePoseAndMeshes(mar_skel_dict, mar_skel_mesh, mar_geom_mesh, mar_anim_dict['dofData'][fi])
from PIL import Image
#orn_geom_mesh.setImage((md['vbuffer'],(md['vheight'],md['vwidth'],3)))
#orn_geom_mesh.refreshImage()
w,h = 1024,1024
cam = view.cameras[0]
cam.refreshImageData(view)
aspect = float(max(1,cam.bindImage.width()))/float(cam.bindImage.height()) if cam.bindImage is not None else 1.0
orn_mapper.project(orn_skel_dict['geom_Vs'], aspect)
data = Opengl.renderGL(w, h, orn_mapper.render, cam.bindId)
orn_geom_mesh.setImage(data)
mar_mapper.project(mar_skel_dict['geom_Vs'], aspect)
data = Opengl.renderGL(w, h, mar_mapper.render, cam.bindId)
mar_geom_mesh.setImage(data)
#image = Image.fromstring(mode='RGB', size=(w, h), data=data)
#image = image.transpose(Image.FLIP_TOP_BOTTOM)
#image.save('screenshot.png')
if 0:
global g_screen
image = Opengl.renderGL(1920, 1080, Opengl.quad_render, (cam.bindId, g_screen))
import pylab as pl
pl.imshow(image)
pl.show()
view.updateGL()
def load_skels(directory):
ted_dir = os.path.join(os.environ['GRIP_DATA'],'ted')
_,ted_skel_dict = IO.load(os.path.join(ted_dir,'ted_body6.skel'))
asf_filename = os.path.join(directory,'Orn.asf')
amc_filename = os.path.join(directory,'Orn.amc')
asf_dict = ASFReader.read_ASF(asf_filename)
orn_anim_dict = ASFReader.read_AMC(amc_filename, asf_dict)
orn_skel_dict = ASFReader.asfDict_to_skelDict(asf_dict)
orn_skel_dict['anim_dict'] = orn_anim_dict
# we are going to try to transfer the geometry from ted to orn
# the challenge here is that the joints/bones are not the same
# because the rigging is rather different, our best chance is to pose the characters and transfer using joint names
# orn's joint names begin 'VSS_'
orn = [t[4:] for t in orn_skel_dict['jointNames']]; orn[0] = 'root'
# ted's joint names begin 'GenTed:'. the first joint is 'Reference' (on the floor) and the second 'Hips'.
ted = [t[7:] for t in ted_skel_dict['jointNames']]; ted[0] = 'root'; ted[1] = 'root'
#ted_skel_dict['Ls'][0][:3,:3] = 1.03 * np.eye(3,3) # apparently, ted is 3% smaller than orn?
# ted has extra joints compared to orn
ted_extras = ['RightUpLeg_Roll','LeftUpLeg_Roll','RightArm_Roll','LeftArm_Roll','Neck1','Neck2',\
'LeftHand1','LeftInHandIndex','LeftInHandMiddle','LeftInHandRing','LeftInHandPinky',\
'RightHand1','RightInHandIndex','RightInHandMiddle','RightInHandRing','RightInHandPinky',\
'HeadEnd','LeftToeBaseEnd','RightToeBaseEnd',\
'LeftHandThumb4','LeftHandIndex4','LeftHandMiddle4','LeftHandRing4','LeftHandPinky4',\
'RightHandThumb4','RightHandIndex4','RightHandMiddle4','RightHandRing4','RightHandPinky4'
]
ted_extras = sorted([ted.index(n) for n in ted_extras])
# map ted's extra joints to their parent
for ji in ted_extras:
jpi = ted_skel_dict['jointParents'][ji]
ted[ji] = ted[jpi]
# some of ted's names differ
name_mapping = dict([(ot,orn_skel_dict['jointNames'][orn.index(t.replace('Spine3','Chest').replace('_Roll','Roll'))]) for t,ot in zip(ted,ted_skel_dict['jointNames'])])
print zip(ted_skel_dict['jointNames'],[name_mapping[t] for t in ted_skel_dict['jointNames']])
print list(enumerate(ted_skel_dict['jointNames']))
print list(enumerate(orn_skel_dict['jointNames']))
orn_indices = np.array([orn_skel_dict['jointIndex'][name_mapping[t]] for t in ted_skel_dict['jointNames']],dtype=np.int32)
# solve ted into orn's position.
# we generate one constraint per joint and zero the weights of those that aren't constrained
numJoints = ted_skel_dict['numJoints']
markerParents = np.arange(numJoints,dtype=np.int32)
markerOffsets = np.zeros((numJoints,3),dtype=np.float32)
markerWeights = np.ones(numJoints,dtype=np.float32)
once = set(orn_indices)
for mi,oi in enumerate(orn_indices):
if oi in once: once.remove(oi)
else: markerWeights[mi] = 0; print 'weighting zero',mi,ted_skel_dict['jointNames'][mi]
# don't fit the shoulders, to avoid ted's head leaning back
markerWeights[ted_skel_dict['jointIndex']['GenTed:LeftShoulder']] = 0
markerWeights[ted_skel_dict['jointIndex']['GenTed:RightShoulder']] = 0
markerWeights[0] = 0
markerWeights[1] = 1
p_o_w = markerParents, markerOffsets, markerWeights
effectorData = SolveIK.make_effectorData(ted_skel_dict, p_o_w = p_o_w)
effectorTargets = np.zeros_like(ted_skel_dict['Gs'])
effectorTargets[:,:,3] = orn_skel_dict['Gs'][:,:,3][orn_indices]
jps = ted_skel_dict['jointParents']
cvs,jcss = ted_skel_dict['chanValues'],ted_skel_dict['jointChanSplits']
def kill_joint(ji):
cvs[jcss[2*ji]:jcss[2*ji+2]] = 0
#if jcss[2*ji] == jcss[2*ji+2]: kill_joint(jps[ji])
for it in range(20):
SolveIK.solveIK(ted_skel_dict, ted_skel_dict['chanValues'], effectorData, effectorTargets, outerIts = 4)
print it,SolveIK.scoreIK(ted_skel_dict, ted_skel_dict['chanValues'], effectorData, effectorTargets)
# zero all joints that are only in ted
for ji in ted_extras: kill_joint(ji)
# for some reason, the Head and Hands wander off: keep them straight
nji = ted_skel_dict['jointIndex']['GenTed:Neck']
cvs[jcss[2*nji]:jcss[2*nji+2]] *= [0,0,1] # zero first two channels only...
#kill_joint(nji)
hji = ted_skel_dict['jointIndex']['GenTed:Head']
cvs[jcss[2*hji]:jcss[2*hji+2]] = -cvs[jcss[2*nji]:jcss[2*nji+2]]
#kill_joint(hji)
for jn,ji in ted_skel_dict['jointIndex'].iteritems():
if 'Hand' in jn: kill_joint(ji)
print it,SolveIK.scoreIK(ted_skel_dict, ted_skel_dict['chanValues'], effectorData, effectorTargets)
# kill all end effectors' parents
#for ji in xrange(len(jps)):
# if ji not in list(jps): kill_joint(jps[ji])
print SolveIK.scoreIK(ted_skel_dict, ted_skel_dict['chanValues'], effectorData, effectorTargets)
orn_skel_dict['geom_dict'] = ted_skel_dict['geom_dict']
orn_skel_dict['geom_Vs'] = ted_skel_dict['geom_Vs'].copy()
orn_skel_dict['geom_vsplits'] = ted_skel_dict['geom_vsplits'].copy()
orn_skel_dict['geom_Gs'] = ted_skel_dict['geom_Gs'].copy()
orn_skel_dict['shape_weights'] = Character.shape_weights_mapping(ted_skel_dict, orn_skel_dict, name_mapping)
return ted_skel_dict, orn_skel_dict
def main(x2d_filename, xcp_filename, c3d_filename = None):
'''Generate a 3D view of an x2d file, using the calibration.'''
global x2d_frames,mats,Ps,c3d_frames,primitives,primitives2D,track3d,prev_frame,track_orn,orn_graph,boot,orn_mapper,mar_mapper
prev_frame = None
c3d_frames = None
if c3d_filename != None:
c3d_dict = C3D.read(c3d_filename)
c3d_frames, c3d_fps, c3d_labels = c3d_dict['frames'],c3d_dict['fps'],c3d_dict['labels']
mats,xcp_data = ViconReader.loadXCP(xcp_filename)
camera_ids = [int(x['DEVICEID']) for x in xcp_data]
print 'loading 2d'
x2d_dict = ViconReader.loadX2D(x2d_filename)
x2d_frames = x2d_dict['frames']
cameras_info = ViconReader.extractCameraInfo(x2d_dict)
print 'num frames', len(x2d_frames)
Ps = [m[2]/(m[0][0,0]) for m in mats]
track3d = Label.Track3D(mats)
primitives = QGLViewer.makePrimitives(vertices = [], altVertices=[])
primitives2D = QGLViewer.makePrimitives2D(([],[0]))
global g_all_skels,md
directory = os.path.join(os.environ['GRIP_DATA'],'151110')
_,orn_skel_dict = IO.load(os.path.join(directory,'orn.skel'))
movie_fn = os.path.join(directory,'50_Grip_RoomCont_AA_02.v2.mov')
md = MovieReader.open_file(movie_fn, audio=True, frame_offset=0, volume_ups=10)
asf_filename = os.path.join(directory,'Martha.asf')
amc_filename = os.path.join(directory,'Martha.amc')
asf_dict = ASFReader.read_ASF(asf_filename)
mar_skel_dict = ASFReader.asfDict_to_skelDict(asf_dict)
mar_skel_dict['anim_dict'] = ASFReader.read_AMC(amc_filename, asf_dict)
for k in ('geom_Vs','geom_vsplits','geom_Gs'): mar_skel_dict[k] = orn_skel_dict[k].copy()
mar_skel_dict['shape_weights'] = orn_skel_dict['shape_weights']
mar_skel_dict['geom_dict'] = orn_skel_dict['geom_dict']
orn_vss = ViconReader.loadVSS(os.path.join(directory,'Orn.vss'))
orn_vss_chan_mapping = [orn_vss['chanNames'].index(n) for n in orn_skel_dict['chanNames']]
orn_anim_dict = orn_skel_dict['anim_dict']
orn_vss_anim = np.zeros((orn_anim_dict['dofData'].shape[0],orn_vss['numChans']),dtype=np.float32)
orn_vss_anim[:,orn_vss_chan_mapping] = orn_anim_dict['dofData']
orn_anim_dict['dofData'] = orn_vss_anim
orn_vss['anim_dict'] = orn_anim_dict
for x in ['geom_dict','geom_Vs','geom_vsplits','geom_Gs','shape_weights']: orn_vss[x] = orn_skel_dict[x]
orn_skel_dict = orn_vss
g_all_skels = {}
orn_mesh_dict, orn_skel_mesh, orn_geom_mesh = orn_t = Character.make_geos(orn_skel_dict)
g_all_skels['orn'] = (orn_skel_dict, orn_t)
orn_skel_dict['chanValues'][:] = 0
Character.updatePoseAndMeshes(orn_skel_dict, orn_skel_mesh, orn_geom_mesh)
mar_mesh_dict, mar_skel_mesh, mar_geom_mesh = mar_t = Character.make_geos(mar_skel_dict)
g_all_skels['mar'] = (mar_skel_dict, mar_t)
#ted_mesh_dict, ted_skel_mesh, ted_geom_mesh = ted_t = Character.make_geos(ted_skel_dict)
#g_all_skels['ted'] = (ted_skel_dict, ted_t)
#ted_skel_dict['chanValues'][0] += 1000
#Character.updatePoseAndMeshes(ted_skel_dict, ted_skel_mesh, ted_geom_mesh)
mnu = orn_skel_dict['markerNamesUnq']
mns = orn_skel_dict['markerNames']
effectorLabels = np.array([mnu.index(n) for n in mns], dtype=np.int32)
orn_graph = Label.graph_from_skel(orn_skel_dict, mnu)
boot = -10
track_orn = Label.TrackModel(orn_skel_dict, effectorLabels, mats)
#ted = GLSkel(ted_skel_dict['Bs'], ted_skel_dict['Gs']) #, mvs=ted_skel_dict['markerOffsets'], mvis=ted_skel_dict['markerParents'])
#ted = GLSkeleton(ted_skel_dict['jointNames'],ted_skel_dict['jointParents'], ted_skel_dict['Gs'][:,:,3])
#ted.setName('ted')
#ted.color = (1,1,0)
#orn = GLSkeleton(orn_skel_dict['jointNames'],orn_skel_dict['jointParents'], orn_skel_dict['Gs'][:,:,3])
#orn.setName('orn')
#orn.color = (0,1,1)
#square = GLMeshes(names=['square'],verts=[[[0,0,0],[1000,0,0],[1000,1000,0],[0,1000,0]]],vts=[[[0,0],[1,0],[1,1],[0,1]]],faces=[[[0,1,2,3]]],fts=[[[0,1,2,3]]])
#square.setImageData(np.array([[[0,0,0],[255,255,255]],[[255,255,255],[0,0,0]]],dtype=np.uint8))
#orn_geom_mesh.setImageData(np.array([[[0,0,0],[255,255,255]],[[255,255,255],[0,0,0]]],dtype=np.uint8))
P = Calibrate.composeP_fromData((60.8,),(-51.4,14.7,3.2),(6880,2860,5000),0) # roughed in camera for 151110
ks = (0.06,0.0)
mat = Calibrate.makeMat(P, ks, (1080, 1920))
orn_mapper = Opengl.ProjectionMapper(mat)
orn_mapper.setGLMeshes(orn_geom_mesh)
orn_geom_mesh.setImage((md['vbuffer'],(md['vheight'],md['vwidth'],3)))
mar_mapper = Opengl.ProjectionMapper(mat)
mar_mapper.setGLMeshes(mar_geom_mesh)
mar_geom_mesh.setImage((md['vbuffer'],(md['vheight'],md['vwidth'],3)))
global g_screen
g_screen = Opengl.make_quad_distortion_mesh()
QGLViewer.makeViewer(mat=mat,md=md,layers = {\
#'ted':ted, 'orn':orn,
#'ted_skel':ted_skel_mesh,'ted_geom':ted_geom_mesh,\
#'square':square,
'orn_skel':orn_skel_mesh,'orn_geom':orn_geom_mesh,\
'mar_skel':mar_skel_mesh,'mar_geom':mar_geom_mesh,\
},
primitives=primitives, primitives2D=primitives2D, timeRange=(0, len(x2d_frames) - 1, 4, 25.0), callback=intersectRaysCB, mats=mats,camera_ids=camera_ids)
if __name__=='__main__':
import sys, os
print sys.argv
if len(sys.argv) == 1:
directory = os.path.join(os.environ['GRIP_DATA'],'151110')
x2d_filename = os.path.join(directory,'50_Grip_RoomCont_AA_02.x2d')
xcp_filename = os.path.join(directory,'50_Grip_RoomCont_AA_02.xcp')
main(x2d_filename, xcp_filename)
elif len(sys.argv) == 3:
x2d_filename, xcp_filename = sys.argv[1:]
main(x2d_filename, xcp_filename)
elif len(sys.argv) == 4:
x2d_filename, xcp_filename, c3d_filename = sys.argv[1:]
main(x2d_filename, xcp_filename, c3d_filename)
| 1.851563 | 2 |
core/models/__init__.py | samlowe106/Saved-Sorter-For-Reddit | 1 | 12758240 | from .parsers import IParser, ImgurParser, SingleImageParser, FlickrParser, GfycatParser, PARSERS
from ..utils.strings import *
from .submission_wrapper import SubmissionWrapper
from ..utils.urls import *
from .submission_source import SubmissionSource, SortOption
| 0.964844 | 1 |
MargalottiH/tests/KNN.py | imharrymargalotti/MachineLearningFinal | 0 | 12758241 | import csv
import numpy as np
import os
import sklearn
from sklearn import datasets
from sklearn import neighbors, datasets, preprocessing
from sklearn.decomposition import PCA
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, auc
from sklearn.neighbors import KNeighborsClassifier
import math
from sklearn.multioutput import MultiOutputClassifier
'''
3 6
1 4
2 5
'''
def logistic(X_train, y_train, X_test):
output = np.zeros((2705, 147))#2705 or 1967
# X_train, X_test, y_train, y_test = train_test_split(X, Y)
# print(y_train)
# clf.predict_proba(X)
clf = LogisticRegression(random_state=0, solver='saga', n_jobs=-1, max_iter=250)
for k in range(148):
clf.fit(X_train, y_train[:, k])
print(k+1, "/148")
y_predict = clf.predict_proba(X_test)
output[:, k] = y_predict[:, 1]
np.savetxt("logistic.csv", output, delimiter=",")
def RandForest(X_train, y_train, X_test):
# X_train, X_test, y_train, y_test = train_test_split(X, Y)
output = np.zeros((2705,147))
for k in range(147):
clf = RandomForestClassifier(n_estimators=100, max_depth=60,
random_state=0)
clf.fit(X_train, y_train[:,k])
print(k, "/75")
y_predict = clf.predict_proba(X_test)
# print(y_predict)
output[:, k] = y_predict[:, 1]
# print(y_test)
# print("\n\n",output)
# fpr, tpr, thresholds = sklearn.metrics.roc_curve(y_test, output)
# roc_auc = auc(fpr, tpr)
# print(roc_auc)
np.savetxt("RandomForest.csv", output, delimiter=",")
print("done")
def KNN(X_train, x_test, y_train, y_test):
knn = KNeighborsClassifier(algorithm='auto', metric='minkowski', metric_params=None, n_jobs=-1,
n_neighbors=147, p=2, weights='distance')
print("poopf")
knn.fit(X_train, y_train)
classifier = MultiOutputClassifier(knn, n_jobs=-1)
classifier.fit(X_train, y_train)
y_predict = (classifier.predict_proba(x_test))
output = np.zeros((1967,147)) #2597
for x in range(1967):
for y in range(147):
output[x][y] = y_predict[y][x][1]
# print(output)
# np.savetxt("sub.csv", output, delimiter=",")
print(classifier.score(output,y_test))
def main():
Trainfiles = 7868
TrainList = np.zeros((7868, 76))
for x in range(Trainfiles):
filename = "/Users/harrymargalotti/MLfinal/MachineLearningFinal/Kaggle_Final/train_feature_files/" + str(
x) + ".npz"
data = np.load(filename)
TrainList[x] = data['summary']
X = TrainList
X = np.nan_to_num(X)
tesetfile = 2705
testList = np.zeros((2705, 76))
for x in range(tesetfile):
filename = "/Users/harrymargalotti/MLfinal/MachineLearningFinal/Kaggle_Final/test_feature_files/" + str(
x) + ".npz"
data = np.load(filename)
testList[x] = data['summary']
xtest = testList
xtest= np.nan_to_num(xtest)
file = '/Users/harrymargalotti/MLfinal/MachineLearningFinal/Kaggle_Final/cal10k_train_data.csv'
y = np.array(list(csv.reader(open(file, "r"), delimiter=","))).astype("float")
# X_train, X_test, y_train, y_test = train_test_split(X, y)
# print("Xtrain: ", X_train.shape)
# print("Xtest: ",X_test.shape)
# print("ytrain: ", y_train.shape)
# print("ytest: ", y_test.shape)
# KNN(X_train, X_test, y_train, y_test)
print("data load done")
# RandForest(X, y, xtest)
pca = PCA()
pca.fit(X)
logistic(X, y, xtest)
main()
'''
Xtrain: (5901, 76)
Xtest: (1967, 76)
ytrain: (5901, 148)
ytest: (1967, 148)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.33, random_state=42)
create a data matrix X_train from the 74-dimensional summary audio feature vectors for each of the 7868 training tracks: done
Load the cal10k_train_date matrix Y_train for the 147 genres and 7868 training tracks
Using scikit-learn, train 147 logistic regression classifiers with one for each genre
Iterate through the list of test npz files and create a data matrix X_test from the 74-dimensional summary audio feature vectors for each of the 2705 test tracks
Predict the probability of each test track and each genre. This should be a 2705-by-147 dimensional matrix called Y_predict
Format Y_predict so that it match the file format that is given in the cal10k_test_random_submission.csv
Upload your submission csv to Kaggle and check out the leaderboard
------------------------------------------------------------------------------------------------------------------------------
The training set is a subset of the data set used to train a model.
x_train is the training data set.
y_train is the set of labels to all the data in x_train.
The test set is a subset of the data set that you use to test your model after the model has gone through initial vetting by the validation set.
x_test is the test data set.
y_test is the set of labels to all the data in x_test.
''' | 2.859375 | 3 |
servo/ftdiuart.py | mmind/servo-hdctools | 2 | 12758242 | <gh_stars>1-10
# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Allow creation of uart interface via libftdiuart library for FTDI devices."""
import ctypes
import logging
import errno
import os
import sys
import termios
import threading
import time
import tty
import ftdi_utils
import ftdi_common
import uart
# TODO(tbroch) need some way to xref these to values in ftdiuart.h
FUART_NAME_SIZE = 128
FUART_BUF_SIZE = 128
FUART_USECS_SLEEP = 5000
class FuartError(Exception):
"""Class for exceptions of Fuart."""
def __init__(self, msg, value=0):
"""FuartError constructor.
Args:
msg: string, message describing error in detail
value: integer, value of error when non-zero status returned. Default=0
"""
super(FuartError, self).__init__(msg, value)
self.msg = msg
self.value = value
class UartCfg(ctypes.Structure):
"""Defines uart configuration values.
These values are supplied to the libftdi API:
ftdi_set_line_property
ftdi_set_baudrate
Declared in ftdi_common.h and named uart_cfg.
"""
_fields_ = [('baudrate', ctypes.c_uint),
('bits', ctypes.c_uint),
('parity', ctypes.c_uint),
('sbits', ctypes.c_uint)]
class FuartContext(ctypes.Structure):
"""Defines primary context structure for libftdiuart.
Declared in ftdiuart.h and named fuart_context
"""
_fields_ = [('fc', ctypes.POINTER(ftdi_common.FtdiContext)),
('gpio', ftdi_common.Gpio),
('name', ctypes.c_char * FUART_NAME_SIZE),
('cfg', UartCfg),
('is_open', ctypes.c_int),
('usecs_to_sleep', ctypes.c_int),
('fd', ctypes.c_int),
('buf', ctypes.c_ubyte * FUART_BUF_SIZE),
('error', ctypes.c_int),
('lock', ctypes.POINTER(ctypes.c_int))]
class Fuart(uart.Uart):
"""Provide interface to libftdiuart c-library via python ctypes module."""
def __init__(self, vendor=ftdi_common.DEFAULT_VID,
product=ftdi_common.DEFAULT_PID, interface=3,
serialname=None,
ftdi_context=None):
"""Fuart contstructor.
Loads libraries for libftdi, libftdiuart. Creates instance objects
(Structures), FuartContext, FtdiContext and Gpio to iteract with the library
and intializes them.
Args:
vendor: usb vendor id of FTDI device
product: usb product id of FTDI device
interface: interface number of FTDI device to use
serialname: string of device serialname/number as defined in FTDI eeprom.
ftdi_context: ftdi context created previously or None if one should be
allocated here. This shared context functionality is seen in miniservo
which has a uart + 4 gpios
Raises:
FuartError: If either ftdi or fuart inits fail
"""
super(Fuart, self).__init__()
self._logger = logging.getLogger('Fuart')
self._logger.debug('')
(self._flib, self._lib) = ftdi_utils.load_libs('ftdi', 'ftdiuart')
self._fargs = ftdi_common.FtdiCommonArgs(vendor_id=vendor,
product_id=product,
interface=interface,
serialname=serialname,
speed=115200,
bits=8, # BITS_8 in ftdi.h
partity=0, # NONE in ftdi.h
sbits=0 # STOP_BIT_1 in ftdi.h
)
self._is_closed = True
self._fuartc = FuartContext()
if ftdi_context:
self._fc = ftdi_context
else:
self._fc = ftdi_common.FtdiContext()
err = self._flib.ftdi_init(ctypes.byref(self._fc))
if err:
raise FuartError('doing ftdi_init', err)
err = self._lib.fuart_init(ctypes.byref(self._fuartc),
ctypes.byref(self._fc))
if err:
raise FuartError('doing fuart_init', err)
def __del__(self):
"""Fuart destructor."""
self._logger.debug('')
if not self._is_closed:
self.close()
def open(self):
"""Opens access to FTDI uart interface.
Raises:
FuartError: If open fails
"""
self._logger.debug('')
err = self._lib.fuart_open(ctypes.byref(self._fuartc),
ctypes.byref(self._fargs))
if err:
raise FuartError('doing fuart_open', err)
self._is_closed = False
def close(self):
"""Closes connection to FTDI uart interface.
Raises:
FuartError: If close fails
"""
self._logger.debug('')
err = self._lib.fuart_close(ctypes.byref(self._fuartc))
if err:
raise FuartError('doing fuart_close', err)
self._is_closed = True
def run(self):
"""Creates a pthread to poll FTDI & PTY for data.
Raises:
FuartError: If thread creation fails
"""
self._logger.debug('')
if self._is_closed:
self.open()
err = self._lib.fuart_run(ctypes.byref(self._fuartc), FUART_USECS_SLEEP)
if err:
raise FuartError('Failure with fuart_run', err)
def get_uart_props(self):
"""Get the uart's properties.
Returns:
dict where:
baudrate: integer of uarts baudrate
bits: integer, number of bits of data Can be 5|6|7|8 inclusive
parity: integer, parity of 0-2 inclusive where:
0: no parity
1: odd parity
2: even parity
sbits: integer, number of stop bits. Can be 0|1|2 inclusive where:
0: 1 stop bit
1: 1.5 stop bits
2: 2 stop bits
"""
self._logger.debug('')
return {'baudrate': self._fuartc.cfg.baudrate,
'bits': self._fuartc.cfg.bits,
'parity': self._fuartc.cfg.parity,
'sbits': self._fuartc.cfg.sbits}
def set_uart_props(self, line_props):
"""Set the uart's properties.
Args:
line_props: dict where:
baudrate: integer of uarts baudrate
bits: integer, number of bits of data ( prior to stop bit)
parity: integer, parity of 0-2 inclusive where
0: no parity
1: odd parity
2: even parity
sbits: integer, number of stop bits. Can be 0|1|2 inclusive where:
0: 1 stop bit
1: 1.5 stop bits
2: 2 stop bits
Raises:
FuartError: If failed to set line properties
"""
self._logger.debug('')
self._uart_props_validation(line_props, exception_type=FuartError)
cfg = UartCfg()
cfg.baudrate = line_props['baudrate']
cfg.bits = line_props['bits']
cfg.parity = line_props['parity']
cfg.sbits = line_props['sbits']
if self._lib.fuart_stty(ctypes.byref(self._fuartc), ctypes.byref(cfg)):
raise FuartError('Failed to set line properties requested')
def get_pty(self):
"""Gets path to pty for communication to/from uart.
Returns:
String path to the pty connected to the uart
"""
self._logger.debug('')
return self._fuartc.name
def test():
(options, args) = ftdi_utils.parse_common_args(interface=3)
format='%(asctime)s - %(name)s - %(levelname)s'
loglevel = logging.INFO
if options.debug:
loglevel = logging.DEBUG
format += ' - %(filename)s:%(lineno)d:%(funcName)s'
format += ' - %(message)s'
logging.basicConfig(level=loglevel, format=format)
logger = logging.getLogger(os.path.basename(sys.argv[0]))
logger.info('Start')
fobj = Fuart(options.vendor, options.product, options.interface)
fobj.run()
logging.info('%s' % fobj.get_pty())
# run() is a thread so just busy wait to mimic server
while True:
# ours sleeps to eleven!
time.sleep(11)
if __name__ == '__main__':
try:
test()
except KeyboardInterrupt:
sys.exit(0)
| 2.1875 | 2 |
health_check.py | mcooper44/pi-utils | 0 | 12758243 | #!/usr/bin/python3.7
import time
from gpiozero import CPUTemperature
import psutil
import sys
from datetime import datetime
def check_temp(threshold=70):
'''
uses gpiozero to measure the cpu temp of the pi
returns a data structure with the result, a status flag
and a string representation of what is going on.
'''
cpu = CPUTemperature()
temp_C = cpu.temperature
alert = int(temp_C) >= threshold
alert_str = f'The temp is: {temp_C}'
return {'result': temp_C,
'alert_status': alert,
'message': alert_str}
def get_memory(threshold=95):
'''
using psutil determine how much free memory (RAM) there is.
uses the .free method which returns memory not used at all (zeroed) that is
readily available - note that this does not reflect the actual memory
available
param threshold is expressed as a percent
'''
vm = psutil.virtual_memory()
free_m = vm.free/1073741824
return {'message': f'% of memory used: {vm.percent}\nGB free is: {free_m:.2f}',
'alert_status': vm.percent>threshold,
'result': f'{vm.percent}%'}
def get_cpu(duration=4, all_cpu=True, threshold=90):
'''
using psutil determine how much cpu is being used over duration.
uses .cpu_percent method with takes two parameters
interval (duration) and percpu (all_cpu)
returns a float representing the current system-wide CPU utilization
as a % when percpu is True returns a list of floats representing
the utilization as a % for each CPU.
'''
usage = psutil.cpu_percent(duration, all_cpu)
avg = sum(usage)/len(usage)
return {'result': usage,
'alert_status': avg > threshold,
'message': f'cpu usage over {duration} seconds is {usage}'}
def parse_arg_number(arg):
'''
helper to parse the command line argument
which should provide the number of times to cycle through
the tests
returns (True, <number of cycles as int>)
or if the input is invalid (None, None)
'''
try:
valid = type(int(arg)) == int
return (valid, int(arg))
except Exception as Error:
print('please provide a number as an argument for the number of tests that you wish to run')
return (None, None)
def parse_all_arg(arg):
'''
helper function to make sure additional functions are run when desired
'''
try:
if arg[2].lower() == 'all':
return True
except Exception as Error:
return None
health_functions = [check_temp]
all_funcs = [check_temp, get_memory, get_cpu]
if __name__ == '__main__':
repeat = 1
count = 0
if len(sys.argv) >= 2:
valid, arg = parse_arg_number(sys.argv[1])
if valid:
repeat = arg
invoke_all = parse_all_arg(sys.argv)
if invoke_all:
health_functions = all_funcs
while count < repeat:
print('\n')
for f in health_functions:
print(datetime.now())
check = f()
m_str = f"{check['message']}"
if check['alert_status']:
print('ALERT!')
print(m_str)
else:
print(m_str)
count +=1
time.sleep(30)
| 3.59375 | 4 |
debayer/debayer.py | tasptz/pytorch-debayer | 0 | 12758244 | <reponame>tasptz/pytorch-debayer
import torch
import torch.nn
import torch.nn.functional
class Debayer3x3(torch.nn.Module):
'''Demosaicing of Bayer images using 3x3 convolutions.
Requires BG-Bayer color filter array layout. That is,
the image[1,1]='B', image[1,2]='G'. This corresponds
to OpenCV naming conventions.
Compared to Debayer2x2 this method does not use upsampling.
Instead, we identify five 3x3 interpolation kernels that
are sufficient to reconstruct every color channel at every
pixel location.
We convolve the image with these 5 kernels using stride=1
and a one pixel replication padding. Finally, we gather
the correct channel values for each pixel location. Todo so,
we recognize that the Bayer pattern repeats horizontally and
vertically every 2 pixels. Therefore, we define the correct
index lookups for a 2x2 grid cell and then repeat to image
dimensions.
Note, in every 2x2 grid cell we have red, blue and two greens
(G1,G2). The lookups for the two greens differ.
'''
def __init__(self):
super(Debayer3x3, self).__init__()
self.kernels = torch.nn.Parameter(
torch.tensor([
[0,0,0],
[0,1,0],
[0,0,0],
[0, 0.25, 0],
[0.25, 0, 0.25],
[0, 0.25, 0],
[0.25, 0, 0.25],
[0, 0, 0],
[0.25, 0, 0.25],
[0, 0, 0],
[0.5, 0, 0.5],
[0, 0, 0],
[0, 0.5, 0],
[0, 0, 0],
[0, 0.5, 0],
]).view(5,1,3,3), requires_grad=False
)
self.index = torch.nn.Parameter(
torch.tensor([
# dest channel r
[0, 3], # pixel is R,G1
[4, 2], # pixel is G2,B
# dest channel g
[1, 0], # pixel is R,G1
[0, 1], # pixel is G2,B
# dest channel b
[2, 4], # pixel is R,G1
[3, 0], # pixel is G2,B
]).view(1,3,2,2), requires_grad=False
)
def forward(self, x):
'''Debayer image.
Parameters
----------
x : Bx1xHxW tensor
Images to debayer
Returns
-------
rgb : Bx3xHxW tensor
Color images in RGB channel order.
'''
B,C,H,W = x.shape
x = torch.nn.functional.pad(x, (1,1,1,1), mode='replicate')
c = torch.nn.functional.conv2d(x, self.kernels, stride=1)
rgb = torch.gather(c, 1, self.index.repeat(B,1,H//2,W//2))
return rgb
class Debayer2x2(torch.nn.Module):
'''Demosaicing of Bayer images using 2x2 convolutions.
Requires BG-Bayer color filter array layout. That is,
the image[1,1]='B', image[1,2]='G'. This corresponds
to OpenCV naming conventions.
'''
def __init__(self):
super(Debayer2x2, self).__init__()
self.kernels = torch.nn.Parameter(
torch.tensor([
[1, 0],
[0, 0],
[0, 0.5],
[0.5, 0],
[0, 0],
[0, 1],
]).view(3,1,2,2), requires_grad=False
)
def forward(self, x):
'''Debayer image.
Parameters
----------
x : Bx1xHxW tensor
Images to debayer
Returns
-------
rgb : Bx3xHxW tensor
Color images in RGB channel order.
'''
x = torch.nn.functional.conv2d(x, self.kernels, stride=2)
x = torch.nn.functional.interpolate(x, scale_factor=2, mode='bilinear', align_corners=False)
return x | 2.8125 | 3 |
google-logo.py | RaviTeja-12/redesigned-chainsaw | 0 | 12758245 | <gh_stars>0
import turtle
turtle.speed(100)
turtle.bgcolor('black')
turtle.width(3)
turtle.color('#4285F4', '#4285F4')
turtle.forward(120)
turtle.right(90)
turtle.circle(-150, 50)
turtle.color('#39A853')
turtle.circle(-150, 100)
turtle.color('#FBBC05')
turtle.circle(-150, 60)
turtle.color('#EA4335', '#EA4335')
turtle.begin_fill()
turtle.circle(-150, 100)
turtle.right(90)
turtle.forward(50)
turtle.right(90)
turtle.circle(100, 100)
turtle.right(90)
turtle.fd(50)
turtle.end_fill()
turtle.color('#FBBC05')
turtle.begin_fill()
turtle.right(180)
turtle.fd(50)
turtle.right(90)
turtle.circle(100, 60)
turtle.right(90)
turtle.fd(50)
turtle.right(90)
turtle.circle(-150, 60)
turtle.end_fill()
turtle.right(90)
turtle.fd(50)
turtle.right(90)
turtle.circle(100, 60)
turtle.right(90)
turtle.fd(50)
turtle.color('#39A853')
turtle.begin_fill()
turtle.right(180)
turtle.fd(50)
turtle.right(90)
turtle.circle(100, 100)
turtle.right(90)
turtle.fd(50)
turtle.right(90)
turtle.circle(-150, 100)
turtle.end_fill()
turtle.right(90)
turtle.fd(50)
turtle.right(90)
turtle.circle(100, 100)
turtle.right(90)
turtle.fd(50)
turtle.color('#4285F4')
turtle.begin_fill()
turtle.right(180)
turtle.fd(50)
turtle.right(90)
turtle.circle(100, 25)
turtle.left(115)
turtle.forward(60)
turtle.right(90)
turtle.fd(50)
turtle.right(90)
turtle.fd(120)
turtle.right(90)
turtle.circle(-160, 50)
turtle.end_fill()
turtle.hideturtle()
turtle.penup()
turtle.goto(-15, 20)
turtle.pendown()
turtle.color('gold')
turtle.pensize(10)
turtle.write("Ravitej")
turtle.done()
| 2.453125 | 2 |
conbench/tests/api/test_benchmarks.py | ursa-labs/conbench | 31 | 12758246 | <gh_stars>10-100
import copy
import decimal
import pytest
from ...api._examples import _api_benchmark_entity
from ...entities._entity import NotFound
from ...entities.distribution import Distribution
from ...entities.summary import Summary
from ...tests.api import _asserts, _fixtures
from ...tests.helpers import _uuid
ARROW_REPO = "https://github.com/apache/arrow"
CONBENCH_REPO = "https://github.com/conbench/conbench"
def _expected_entity(summary):
return _api_benchmark_entity(
summary.id,
summary.case_id,
summary.info_id,
summary.context_id,
summary.batch_id,
summary.run_id,
summary.case.name,
)
class TestBenchmarkGet(_asserts.GetEnforcer):
url = "/api/benchmarks/{}/"
public = True
def _create(self, name=None, results=None, unit=None, sha=None):
return _fixtures.summary(
name=name,
results=results,
unit=unit,
sha=sha,
)
def test_get_benchmark(self, client):
self.authenticate(client)
summary = self._create()
response = client.get(f"/api/benchmarks/{summary.id}/")
self.assert_200_ok(response, _expected_entity(summary))
def test_get_benchmark_regression(self, client):
self.authenticate(client)
name = _uuid()
# create a distribution history & a regression
self._create(
name=name,
results=_fixtures.RESULTS_DOWN[0],
unit="i/s",
sha=_fixtures.GRANDPARENT,
)
self._create(
name=name,
results=_fixtures.RESULTS_DOWN[1],
unit="i/s",
sha=_fixtures.PARENT,
)
summary = self._create(
name=name,
results=_fixtures.RESULTS_DOWN[2],
unit="i/s",
)
expected = _expected_entity(summary)
expected["stats"].update(
{
"data": ["1.000000", "2.000000", "3.000000"],
"iqr": "1.000000",
"iterations": 3,
"max": "3.000000",
"mean": "2.000000",
"median": "2.000000",
"min": "1.000000",
"q1": "1.500000",
"q3": "2.500000",
"stdev": "1.000000",
"times": [],
"z_score": "-{:.6f}".format(abs(_fixtures.Z_SCORE_DOWN)),
"z_regression": True,
"unit": "i/s",
}
)
response = client.get(f"/api/benchmarks/{summary.id}/")
self.assert_200_ok(response, expected)
def test_get_benchmark_regression_less_is_better(self, client):
self.authenticate(client)
name = _uuid()
# create a distribution history & a regression
self._create(
name=name,
results=_fixtures.RESULTS_UP[0],
unit="s",
sha=_fixtures.GRANDPARENT,
)
self._create(
name=name,
results=_fixtures.RESULTS_UP[1],
unit="s",
sha=_fixtures.PARENT,
)
summary = self._create(
name=name,
results=_fixtures.RESULTS_UP[2],
unit="s",
)
expected = _expected_entity(summary)
expected["stats"].update(
{
"data": ["10.000000", "20.000000", "30.000000"],
"iqr": "10.000000",
"iterations": 3,
"max": "30.000000",
"mean": "20.000000",
"median": "20.000000",
"min": "10.000000",
"q1": "15.000000",
"q3": "25.000000",
"stdev": "10.000000",
"times": [],
"z_score": "-{:.6f}".format(abs(_fixtures.Z_SCORE_UP)),
"z_regression": True,
}
)
response = client.get(f"/api/benchmarks/{summary.id}/")
self.assert_200_ok(response, expected)
def test_get_benchmark_improvement(self, client):
self.authenticate(client)
name = _uuid()
# create a distribution history & a improvement
self._create(
name=name,
results=_fixtures.RESULTS_UP[0],
unit="i/s",
sha=_fixtures.GRANDPARENT,
)
self._create(
name=name,
results=_fixtures.RESULTS_UP[1],
unit="i/s",
sha=_fixtures.PARENT,
)
summary = self._create(
name=name,
results=_fixtures.RESULTS_UP[2],
unit="i/s",
)
expected = _expected_entity(summary)
expected["stats"].update(
{
"data": ["10.000000", "20.000000", "30.000000"],
"iqr": "10.000000",
"iterations": 3,
"max": "30.000000",
"mean": "20.000000",
"median": "20.000000",
"min": "10.000000",
"q1": "15.000000",
"q3": "25.000000",
"stdev": "10.000000",
"times": [],
"z_score": "{:.6f}".format(abs(_fixtures.Z_SCORE_UP)),
"z_improvement": True,
"unit": "i/s",
}
)
response = client.get(f"/api/benchmarks/{summary.id}/")
self.assert_200_ok(response, expected)
def test_get_benchmark_improvement_less_is_better(self, client):
self.authenticate(client)
name = _uuid()
# create a distribution history & a improvement
self._create(
name=name,
results=_fixtures.RESULTS_DOWN[0],
unit="s",
sha=_fixtures.GRANDPARENT,
)
self._create(
name=name,
results=_fixtures.RESULTS_DOWN[1],
unit="s",
sha=_fixtures.PARENT,
)
summary = self._create(
name=name,
results=_fixtures.RESULTS_DOWN[2],
unit="s",
)
expected = _expected_entity(summary)
expected["stats"].update(
{
"data": ["1.000000", "2.000000", "3.000000"],
"iqr": "1.000000",
"iterations": 3,
"max": "3.000000",
"mean": "2.000000",
"median": "2.000000",
"min": "1.000000",
"q1": "1.500000",
"q3": "2.500000",
"stdev": "1.000000",
"times": [],
"z_score": "{:.6f}".format(abs(_fixtures.Z_SCORE_DOWN)),
"z_improvement": True,
}
)
response = client.get(f"/api/benchmarks/{summary.id}/")
self.assert_200_ok(response, expected)
class TestBenchmarkDelete(_asserts.DeleteEnforcer):
url = "/api/benchmarks/{}/"
def test_delete_benchmark(self, client):
self.authenticate(client)
summary = _fixtures.summary()
# can get before delete
Summary.one(id=summary.id)
# delete
response = client.delete(f"/api/benchmarks/{summary.id}/")
self.assert_204_no_content(response)
# cannot get after delete
with pytest.raises(NotFound):
Summary.one(id=summary.id)
class TestBenchmarkList(_asserts.ListEnforcer):
url = "/api/benchmarks/"
public = True
def test_benchmark_list(self, client):
self.authenticate(client)
summary = _fixtures.summary()
response = client.get("/api/benchmarks/")
self.assert_200_ok(response, contains=_expected_entity(summary))
def test_benchmark_list_filter_by_name(self, client):
self.authenticate(client)
_fixtures.summary(name="aaa")
summary = _fixtures.summary(name="bbb")
_fixtures.summary(name="ccc")
response = client.get("/api/benchmarks/?name=bbb")
self.assert_200_ok(response, [_expected_entity(summary)])
def test_benchmark_list_filter_by_batch_id(self, client):
self.authenticate(client)
_fixtures.summary(batch_id="10")
summary = _fixtures.summary(batch_id="20")
_fixtures.summary(batch_id="30")
response = client.get("/api/benchmarks/?batch_id=20")
self.assert_200_ok(response, [_expected_entity(summary)])
def test_benchmark_list_filter_by_run_id(self, client):
self.authenticate(client)
_fixtures.summary(run_id="100")
summary = _fixtures.summary(run_id="200")
_fixtures.summary(run_id="300")
response = client.get("/api/benchmarks/?run_id=200")
self.assert_200_ok(response, [_expected_entity(summary)])
class TestBenchmarkPost(_asserts.PostEnforcer):
url = "/api/benchmarks/"
valid_payload = _fixtures.VALID_PAYLOAD
required_fields = [
"batch_id",
"context",
"info",
"machine_info",
"run_id",
"stats",
"tags",
"timestamp",
]
def test_create_benchmark(self, client):
self.authenticate(client)
response = client.post("/api/benchmarks/", json=self.valid_payload)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_benchmark_normalizes_data(self, client):
self.authenticate(client)
response = client.post("/api/benchmarks/", json=self.valid_payload)
summary_1 = Summary.one(id=response.json["id"])
data = copy.deepcopy(self.valid_payload)
data["run_id"] = data["run_id"] + "_X"
response = client.post("/api/benchmarks/", json=data)
summary_2 = Summary.one(id=response.json["id"])
assert summary_1.id != summary_2.id
assert summary_1.case_id == summary_2.case_id
assert summary_1.info_id == summary_2.info_id
assert summary_1.context_id == summary_2.context_id
assert summary_1.run.machine_id == summary_2.run.machine_id
assert summary_1.run_id != summary_2.run_id
assert summary_1.run.commit_id == summary_2.run.commit_id
def test_create_benchmark_can_specify_run_and_batch_id(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
run_id, batch_id = _uuid(), _uuid()
data["run_id"] = run_id
data["batch_id"] = batch_id
response = client.post("/api/benchmarks/", json=data)
summary = Summary.one(id=response.json["id"])
assert summary.run_id == run_id
assert summary.batch_id == batch_id
def test_create_benchmark_cannot_omit_batch_id(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
# omit
del data["batch_id"]
response = client.post("/api/benchmarks/", json=data)
message = {
"batch_id": ["Missing data for required field."],
}
self.assert_400_bad_request(response, message)
# null
data["batch_id"] = None
response = client.post("/api/benchmarks/", json=data)
message = {
"batch_id": ["Field may not be null."],
}
self.assert_400_bad_request(response, message)
def test_create_benchmark_cannot_omit_run_id(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
# omit
del data["run_id"]
response = client.post("/api/benchmarks/", json=data)
message = {
"run_id": ["Missing data for required field."],
}
self.assert_400_bad_request(response, message)
# null
data["run_id"] = None
response = client.post("/api/benchmarks/", json=data)
message = {
"run_id": ["Field may not be null."],
}
self.assert_400_bad_request(response, message)
def test_nested_schema_validation(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
del data["stats"]["iterations"]
del data["github"]["commit"]
del data["machine_info"]["os_name"]
data["machine_info"]["os_version"] = None
data["stats"]["extra"] = "field"
data["github"]["extra"] = "field"
data["machine_info"]["extra"] = "field"
response = client.post("/api/benchmarks/", json=data)
message = {
"github": {
"extra": ["Unknown field."],
"commit": ["Missing data for required field."],
},
"machine_info": {
"extra": ["Unknown field."],
"os_name": ["Missing data for required field."],
"os_version": ["Field may not be null."],
},
"stats": {
"extra": ["Unknown field."],
"iterations": ["Missing data for required field."],
},
}
self.assert_400_bad_request(response, message)
def _assert_none_commit(self, response):
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == ""
assert summary.run.commit.repository == ""
assert summary.run.commit.parent is None
return summary, new_id
def test_create_no_commit_context(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["run_id"] = _uuid()
del data["github"]
# create benchmark without commit context
response = client.post("/api/benchmarks/", json=data)
summary, new_id = self._assert_none_commit(response)
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
# create another benchmark without commit context
# (test duplicate key duplicate key -- commit_index)
response = client.post("/api/benchmarks/", json=data)
summary, new_id = self._assert_none_commit(response)
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_empty_commit_context(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["run_id"] = _uuid()
data["github"]["commit"] = ""
data["github"]["repository"] = ""
# create benchmark without commit context
response = client.post("/api/benchmarks/", json=data)
summary, new_id = self._assert_none_commit(response)
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
# create another benchmark without commit context
# (test duplicate key duplicate key -- commit_index)
response = client.post("/api/benchmarks/", json=data)
summary, new_id = self._assert_none_commit(response)
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_unknown_commit_context(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["run_id"] = _uuid()
data["github"]["commit"] = "unknown commit"
data["github"]["repository"] = ARROW_REPO
# create benchmark with unknown commit context
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == "unknown commit"
assert summary.run.commit.repository == ARROW_REPO
assert summary.run.commit.parent is None
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
# create another benchmark with unknown commit context
# (test duplicate key duplicate key -- commit_index)
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == "unknown commit"
assert summary.run.commit.repository == ARROW_REPO
assert summary.run.commit.parent is None
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_different_git_repo_format(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["run_id"] = _uuid()
data["github"]["commit"] = "testing repository with git@g"
data["github"]["repository"] = "<EMAIL>:apache/arrow"
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == "testing repository with git@g"
assert summary.run.commit.repository == ARROW_REPO
assert summary.run.commit.parent is None
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_repo_not_full_url(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["run_id"] = _uuid()
data["github"]["commit"] = "testing repository with just org/repo"
data["github"]["repository"] = "apache/arrow"
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == "testing repository with just org/repo"
assert summary.run.commit.repository == ARROW_REPO
assert summary.run.commit.parent is None
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_allow_just_repository(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["run_id"] = _uuid()
data["github"]["commit"] = ""
data["github"]["repository"] = ARROW_REPO
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == ""
assert summary.run.commit.repository == ARROW_REPO
assert summary.run.commit.parent is None
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
# And again with a different repository with an empty sha
data["run_id"] = _uuid()
data["github"]["commit"] = ""
data["github"]["repository"] = CONBENCH_REPO
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == ""
assert summary.run.commit.repository == CONBENCH_REPO
assert summary.run.commit.parent is None
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_allow_just_sha(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["run_id"] = _uuid()
data["github"]["commit"] = "something something"
data["github"]["repository"] = ""
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary = Summary.one(id=new_id)
assert summary.run.commit.sha == "something something"
assert summary.run.commit.repository == ""
assert summary.run.commit.parent is None
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary), location)
def test_create_benchmark_distribution(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["tags"]["name"] = _uuid()
# first result
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary_1 = Summary.one(id=new_id)
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary_1), location)
case_id = summary_1.case_id
# after one result
distributions = Distribution.all(case_id=case_id)
assert len(distributions) == 1
assert distributions[0].unit == "s"
assert distributions[0].observations == 1
assert distributions[0].mean_mean == decimal.Decimal("0.03636900000000000000")
assert distributions[0].mean_sd is None
assert distributions[0].min_mean == decimal.Decimal("0.00473300000000000000")
assert distributions[0].min_sd is None
assert distributions[0].max_mean == decimal.Decimal("0.14889600000000000000")
assert distributions[0].max_sd is None
assert distributions[0].median_mean == decimal.Decimal("0.00898800000000000000")
assert distributions[0].median_sd is None
# second result
response = client.post("/api/benchmarks/", json=data)
new_id = response.json["id"]
summary_2 = Summary.one(id=new_id)
location = "http://localhost/api/benchmarks/%s/" % new_id
self.assert_201_created(response, _expected_entity(summary_2), location)
assert summary_1.case_id == summary_2.case_id
assert summary_1.context_id == summary_2.context_id
assert summary_1.run.machine_id == summary_2.run.machine_id
assert summary_1.run.commit_id == summary_2.run.commit_id
# after two results
distributions = Distribution.all(case_id=case_id)
assert len(distributions) == 1
assert distributions[0].unit == "s"
assert distributions[0].observations == 2
assert distributions[0].mean_mean == decimal.Decimal("0.03636900000000000000")
assert distributions[0].mean_sd == decimal.Decimal("0")
assert distributions[0].min_mean == decimal.Decimal("0.00473300000000000000")
assert distributions[0].min_sd == decimal.Decimal("0")
assert distributions[0].max_mean == decimal.Decimal("0.14889600000000000000")
assert distributions[0].max_sd == decimal.Decimal("0")
assert distributions[0].median_mean == decimal.Decimal("0.00898800000000000000")
assert distributions[0].median_sd == decimal.Decimal("0")
| 2.046875 | 2 |
stac_fastapi/pgstac/tests/api/test_api.py | borism/stac-fastapi | 64 | 12758247 | <reponame>borism/stac-fastapi<filename>stac_fastapi/pgstac/tests/api/test_api.py
from datetime import datetime, timedelta
import pytest
STAC_CORE_ROUTES = [
"GET /",
"GET /collections",
"GET /collections/{collection_id}",
"GET /collections/{collection_id}/items",
"GET /collections/{collection_id}/items/{item_id}",
"GET /conformance",
"GET /search",
"POST /search",
]
STAC_TRANSACTION_ROUTES = [
"DELETE /collections/{collection_id}",
"DELETE /collections/{collection_id}/items/{item_id}",
"POST /collections",
"POST /collections/{collection_id}/items",
"PUT /collections",
"PUT /collections/{collection_id}/items",
]
@pytest.mark.asyncio
async def test_post_search_content_type(app_client):
params = {"limit": 1}
resp = await app_client.post("search", json=params)
assert resp.headers["content-type"] == "application/geo+json"
@pytest.mark.asyncio
async def test_get_search_content_type(app_client):
resp = await app_client.get("search")
assert resp.headers["content-type"] == "application/geo+json"
@pytest.mark.asyncio
async def test_api_headers(app_client):
resp = await app_client.get("/api")
assert (
resp.headers["content-type"] == "application/vnd.oai.openapi+json;version=3.0"
)
assert resp.status_code == 200
@pytest.mark.asyncio
async def test_core_router(api_client):
core_routes = set(STAC_CORE_ROUTES)
api_routes = set(
[f"{list(route.methods)[0]} {route.path}" for route in api_client.app.routes]
)
assert not core_routes - api_routes
@pytest.mark.asyncio
async def test_transactions_router(api_client):
transaction_routes = set(STAC_TRANSACTION_ROUTES)
api_routes = set(
[f"{list(route.methods)[0]} {route.path}" for route in api_client.app.routes]
)
assert not transaction_routes - api_routes
@pytest.mark.asyncio
async def test_app_transaction_extension(
app_client, load_test_data, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
@pytest.mark.asyncio
async def test_app_query_extension(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
params = {"query": {"proj:epsg": {"eq": item["properties"]["proj:epsg"]}}}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
@pytest.mark.asyncio
async def test_app_query_extension_limit_1(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
params = {"limit": 1}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
@pytest.mark.asyncio
async def test_app_query_extension_limit_eq0(app_client):
params = {"limit": 0}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 400
@pytest.mark.asyncio
async def test_app_query_extension_limit_lt0(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
params = {"limit": -1}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 400
@pytest.mark.asyncio
async def test_app_query_extension_limit_gt10000(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
params = {"limit": 10001}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 400
@pytest.mark.asyncio
async def test_app_query_extension_gt(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
params = {"query": {"proj:epsg": {"gt": item["properties"]["proj:epsg"]}}}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 0
@pytest.mark.asyncio
async def test_app_query_extension_gte(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
params = {"query": {"proj:epsg": {"gte": item["properties"]["proj:epsg"]}}}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
@pytest.mark.asyncio
async def test_app_sort_extension(load_test_data, app_client, load_test_collection):
coll = load_test_collection
first_item = load_test_data("test_item.json")
item_date = datetime.strptime(
first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ"
)
resp = await app_client.post(f"/collections/{coll.id}/items", json=first_item)
assert resp.status_code == 200
second_item = load_test_data("test_item.json")
second_item["id"] = "another-item"
another_item_date = item_date - timedelta(days=1)
second_item["properties"]["datetime"] = another_item_date.strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
resp = await app_client.post(f"/collections/{coll.id}/items", json=second_item)
assert resp.status_code == 200
params = {
"collections": [coll.id],
"sortby": [{"field": "datetime", "direction": "desc"}],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert resp_json["features"][0]["id"] == first_item["id"]
assert resp_json["features"][1]["id"] == second_item["id"]
params = {
"collections": [coll.id],
"sortby": [{"field": "datetime", "direction": "asc"}],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert resp_json["features"][1]["id"] == first_item["id"]
assert resp_json["features"][0]["id"] == second_item["id"]
@pytest.mark.asyncio
async def test_search_invalid_date(load_test_data, app_client, load_test_collection):
coll = load_test_collection
first_item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=first_item)
assert resp.status_code == 200
params = {
"datetime": "2020-XX-01/2020-10-30",
"collections": [coll.id],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 400
@pytest.mark.asyncio
async def test_bbox_3d(load_test_data, app_client, load_test_collection):
coll = load_test_collection
first_item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=first_item)
assert resp.status_code == 200
australia_bbox = [106.343365, -47.199523, 0.1, 168.218365, -19.437288, 0.1]
params = {
"bbox": australia_bbox,
"collections": [coll.id],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
@pytest.mark.asyncio
async def test_app_search_response(load_test_data, app_client, load_test_collection):
coll = load_test_collection
params = {
"collections": [coll.id],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert resp_json.get("type") == "FeatureCollection"
# stac_version and stac_extensions were removed in v1.0.0-beta.3
assert resp_json.get("stac_version") is None
assert resp_json.get("stac_extensions") is None
@pytest.mark.asyncio
async def test_search_point_intersects(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
point = [150.04, -33.14]
intersects = {"type": "Point", "coordinates": point}
params = {
"intersects": intersects,
"collections": [item["collection"]],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
@pytest.mark.asyncio
async def test_search_line_string_intersects(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll.id}/items", json=item)
assert resp.status_code == 200
line = [[150.04, -33.14], [150.22, -33.89]]
intersects = {"type": "LineString", "coordinates": line}
params = {
"intersects": intersects,
"collections": [item["collection"]],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
| 2.0625 | 2 |
aedes_server/api/urls.py | henriquenogueira/aedes | 0 | 12758248 | <gh_stars>0
from django.conf.urls import url, include
from rest_framework import routers
from .viewsets import ReportViewSet, ClusterViewSet
router = routers.DefaultRouter()
router.register(r'reports', ReportViewSet)
router.register(r'clusters', ClusterViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
]
| 1.507813 | 2 |
_2015/elevator/test_elevator.py | dcsparkes/adventofcode | 0 | 12758249 | import elevator
import unittest
class TestElevator(unittest.TestCase):
def test_parenthesesCount_inputFile(self):
self.assertEqual(74, elevator.parenthesesCount("input1.1.txt"))
def test_parenthesesMismatch_inputFile(self):
self.assertEqual(1795, elevator.parenthesesMismatch("input1.1.txt"))
if __name__ == '__main__':
unittest.main()
| 2.84375 | 3 |
files/Output_Script.py | caltechlibrary/2019-01-22-Python-Workshop | 2 | 12758250 |
# coding: utf-8
# In[ ]:
import numpy
import sys
import glob
import matplotlib.pyplot
def analyze(filename):
data = numpy.loadtxt(fname=filename, delimiter=',')
fig = matplotlib.pyplot.figure(figsize=(10.0,3.0))
axes1 = fig.add_subplot(1,3,1)
axes2 = fig.add_subplot(1,3,2)
axes3 = fig.add_subplot(1,3,3)
axes1.set_ylabel("average")
axes1.plot(numpy.mean(data,axis=0))
axes2.set_ylabel("min")
axes2.plot(numpy.min(data,axis=0))
axes3.set_ylabel("max")
axes3.plot(numpy.max(data,axis=0))
fig.tight_layout()
matplotlib.pyplot.savefig(filename+'_fig.eps')
def detect_problems(f_name):
data = numpy.loadtxt(fname=f_name, delimiter=',')
if numpy.max(data, axis=0)[0] == 0 and numpy.max(data, axis=0)[20] == 20:
print("Suspicous looking maxima")
elif numpy.sum(numpy.min(data,axis=0)) == 0:
print("Suspicous looking minima")
else:
print("OK")
name = sys.argv[1]
filenames = sorted(glob.glob(name+'*.csv'))
for f in filenames:
print(f)
analyze(f)
detect_problems(f)
| 2.953125 | 3 |