max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
modules/points.py | LouisBluhm/PyBot | 3 | 12760551 | <gh_stars>1-10
import logging, coloredlogs
from modules.config import *
from modules.timer import Timer
from modules.database import Database
database = Database(db_host, db_user, db_pass, db_name, db_autocommit)
database.database_connection()
class Points:
CommandMain = ''
CommandMainOptions = [CURRENCY, 'rank', 'give']
CommandResponses = []
def __init__(self, user, user_to_check):
self.user = user
self.currency = CURRENCY
self.user_to_check = user_to_check
# self.cooldown_timer = Timer(self.user, 120, RouletteCooldown, "Roulette")
def execute_command(self, command):
if command == Points.CommandMainOptions[0]:
if self.user_to_check is None:
self.get_user_points_self()
else:
self.get_user_points()
if command == Points.CommandMainOptions[1]:
if self.user_to_check is None:
self.get_user_rank(self.user)
else:
self.get_user_rank(self.user_to_check)
if command == Points.CommandMainOptions[2]:
self.givepoints(self.user_to_check, self.user)
def get_user_points_self(self):
from modules.bot import bot_msg_whsp
bot_msg_whsp(database.db_get_points_user(self.user, self.user), self.user)
def get_user_points(self):
from modules.bot import bot_msg
bot_msg(database.db_get_points_user(self.user_to_check, self.user))
def get_user_rank(self, user):
from modules.bot import bot_msg
bot_msg(database.db_get_user_rank(user))
def givepoints(self, reciever, amount):
if self.user == reciever:
return ""
if(database.db_check_user_exists(reciever)):
get_user_points = database.db_get_user_points_int(self.user)
if(int(amount) > get_user_points):
return "You don't have {} {} {} FailFish".format(amount, CURRENCY, self.user)
if(int(amount) <= 0):
return ""
else:
database.db_add_points_user(reciever, amount)
database.db_minus_points_user(self.user, amount)
return "{} gave {} {} to {}! <3".format(self.user, amount, CURRENCY, reciever)
else:
return "" | 2.484375 | 2 |
AssocRuleLearn/arl_apriori.py | rajasekar-venkatesan/MachineLearning_Python | 0 | 12760552 | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Market_Basket_Optimisation.csv', header=None)
transactions = []
for i in range(0,7501):
transactions.append([str(dataset.values[i,j]) for j in range(0,20)])
from apyori import apriori
rules = apriori(transactions,min_support = 0.003,min_confidence = 0.2,min_lift = 3,min_length = 2)
results = list(rules) | 2.59375 | 3 |
mkt/collections/filters.py | spasovski/zamboni | 1 | 12760553 | <filename>mkt/collections/filters.py
from django import forms
from django.core.validators import EMPTY_VALUES
from django_filters.filters import ChoiceFilter, ModelChoiceFilter
from django_filters.filterset import FilterSet
import amo
import mkt
from addons.models import Category
from mkt.api.forms import SluggableModelChoiceField
from mkt.collections.models import Collection
class SlugChoiceFilter(ChoiceFilter):
def __init__(self, *args, **kwargs):
self.choices_dict = kwargs.pop('choices_dict')
# Create a choice dynamically to allow None, slugs and ids.
slugs_choices = self.choices_dict.items()
ids_choices = [(v.id, v) for v in self.choices_dict.values()]
kwargs['choices'] = [(None, None)] + slugs_choices + ids_choices
return super(SlugChoiceFilter, self).__init__(*args, **kwargs)
def filter(self, qs, value):
if value == '' or value is None:
value = None
elif not value.isdigit():
# We are passed a slug, get the id by looking at the choices
# dict, defaulting to None if no corresponding value is found.
value = self.choices_dict.get(value, None)
if value is not None:
value = value.id
return qs.filter(**{self.name: value})
class SlugModelChoiceFilter(ModelChoiceFilter):
field_class = SluggableModelChoiceField
def filter(self, qs, value):
return qs.filter(**{'%s__%s' % (self.name, self.lookup_type): value})
class CollectionFilterSet(FilterSet):
# Note: the filter names must match what ApiSearchForm and CategoryViewSet
# are using.
carrier = SlugChoiceFilter(name='carrier',
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceFilter(name='region',
choices_dict=mkt.regions.REGION_LOOKUP)
cat = SlugModelChoiceFilter(name='category',
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP),
sluggable_to_field_name='slug',)
class Meta:
model = Collection
# All fields are provided above, but django-filter needs Meta.field to
# exist.
fields = []
def get_queryset(self):
"""
Return the queryset to use for the filterset.
Copied from django-filter qs property, modified to support filtering on
'empty' values, at the expense of multi-lookups like 'x < 4 and x > 2'.
"""
valid = self.is_bound and self.form.is_valid()
if self.strict and self.is_bound and not valid:
qs = self.queryset.none()
qs.filter_errors = self.form.errors
return qs
# Start with all the results and filter from there.
qs = self.queryset.all()
for name, filter_ in self.filters.items():
if valid:
if name in self.form.data:
value = self.form.cleaned_data[name]
else:
continue
else:
raw_value = self.form[name].value()
try:
value = self.form.fields[name].clean(raw_value)
except forms.ValidationError:
if self.strict:
return self.queryset.none()
else:
continue
# At this point we should have valid & clean data.
qs = filter_.filter(qs, value)
# Optional ordering.
if self._meta.order_by:
order_field = self.form.fields[self.order_by_field]
data = self.form[self.order_by_field].data
ordered = None
try:
ordered = order_field.clean(data)
except forms.ValidationError:
pass
if ordered in EMPTY_VALUES and self.strict:
ordered = self.form.fields[self.order_by_field].choices[0][0]
if ordered:
qs = qs.order_by(*self.get_order_by(ordered))
return qs
@property
def qs(self):
if hasattr(self, '_qs'):
return self._qs
self._qs = self.get_queryset()
return self._qs
class CollectionFilterSetWithFallback(CollectionFilterSet):
"""
FilterSet with a fallback mechanism, dropping filters in a certain order
if no results are found.
"""
# Combinations of fields to try to set to NULL, in order, when no results
# are found. See `next_fallback()`.
fields_fallback_order = (
('region',),
('carrier',),
('region', 'carrier',)
)
def next_fallback(self):
"""
Yield the next set of filters to set to NULL when refiltering the
queryset to find results. See `refilter_queryset()`.
"""
for f in self.fields_fallback_order:
yield f
def refilter_queryset(self):
"""
Reset self.data, then override fields yield by the `fallback` generator
to NULL. Then recall the `qs` property and return it.
When we are using this FilterSet, we really want to return something,
even if it's less relevant to the original query. When the `qs`
property is evaluated, if no results are found, it will call this
method to refilter the queryset in order to find /something/.
Can raise StopIteration if the fallback generator is exhausted.
"""
self.data = self.original_data.copy()
self.fields_to_null = next(self.fallback)
for field in self.fields_to_null:
if field in self.data:
self.data[field] = None
del self._form
return self.qs
def __init__(self, *args, **kwargs):
super(CollectionFilterSetWithFallback, self).__init__(*args, **kwargs)
self.original_data = self.data.copy()
self.fallback = self.next_fallback()
self.fields_to_null = None
@property
def qs(self):
if hasattr(self, '_qs'):
return self._qs
qs = self.get_queryset()
if hasattr(qs, 'filter_errors'):
# Immediately return if there was an error.
self._qs = qs
return self._qs
elif not qs.exists():
try:
qs = self.refilter_queryset()
except StopIteration:
pass
self._qs = qs
self._qs.filter_fallback = self.fields_to_null
return self._qs
| 2.1875 | 2 |
HackerP/introduction/Write_a_function.py | JKChang2015/hackerrank | 0 | 12760554 | <filename>HackerP/introduction/Write_a_function.py
# Write_a_function
# Created by JKChang
# 14/08/2018, 10:58
# Tag:
# Description: https://www.hackerrank.com/challenges/write-a-function/problem
# In the Gregorian calendar three criteria must be taken into account to identify leap years:
# The year can be evenly divided by 4, is a leap year, unless:
# The year can be evenly divided by 100, it is NOT a leap year, unless:
# The year is also evenly divisible by 400. Then it is a leap year.
def is_leap(year):
leap = False
if year % 4 == 0 and year % 100 != 0 or year % 400 == 0:
leap = True
return leap
year = int(input())
print(is_leap(year))
| 4.15625 | 4 |
MysqlConnection.py | GhazaleZe/Secure-Banking-System | 2 | 12760555 | import mysql.connector
from mysql.connector import errorcode
import time
import calendar
import datetime
class MysqlConnection:
def __init__(self):
self.mysql_connection()
self.cnx
self.cursor
def mysql_connection(self):
self.cnx = mysql.connector.connect(user='root', password='<PASSWORD>',
host='127.0.0.1',
database='secure_banking_system')
self.cursor = self.cnx.cursor(buffered=True)
def check_username(self, username):
self.cursor.execute(
"SELECT * FROM users WHERE username = %s",
(username,)
)
results = self.cursor.fetchall()
row_count = self.cursor.rowcount
if row_count == 0:
return 1
else:
return 0
def insert_into_table(self, username, password_hash, salt, confidentiality_level, integrity_level,
number_of_attempts, is_block):
self.cursor.execute(
'INSERT INTO users(username, password_hash, salt, confidentiality_level, integrity_level, number_of_attempts, is_block) VALUES(\'%s\',\'%s\',\'%s\',1,1,0,0);' % (
username, password_hash, salt))
self.cnx.commit()
def fetch_hash_and_salt(self, username):
self.cursor.execute(
"select password_hash, salt from users where username = %s",
(username,)
)
results = self.cursor.fetchall()
return results
def fetch_block_information(self, username):
self.cursor.execute(
"select number_of_attempts, is_block from users where username = %s",
(username,)
)
results = self.cursor.fetchall()
return results
def increase_number_of_attempts_and_is_block(self, username):
self.cursor.execute(
"select number_of_attempts, is_block from users where username = %s",
(username,)
)
results = self.cursor.fetchall()
for i in results:
result = i
number_of_attempts, is_block = result
if number_of_attempts == 2:
self.cursor.execute(
'update users set number_of_attempts = number_of_attempts +1, is_block = is_block +1 where username= \'%s\';' % (
username,))
self.cnx.commit()
else:
self.cursor.execute(
'update users set number_of_attempts = number_of_attempts +1 where username= \'%s\';' % (username,))
self.cnx.commit()
def reset_number_of_attempts_and_is_block(self, username):
self.cursor.execute('update users set number_of_attempts = 0 where username= \'%s\';' % (username,))
self.cnx.commit()
self.cursor.execute('update users set is_block = 0 where username= \'%s\';' % (username,))
self.cnx.commit()
def close_connection(self):
self.cursor.close()
self.cnx.close()
def create_new_account(self, username, account_type, amount, conf_label, integrity_label):
self.cursor.execute("select ID from users where username = %s", (username,))
ids = self.cursor.fetchone()
user_id = ids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (user_id,))
acids = self.cursor.fetchone()
if acids == None:
self.cursor.execute(
'INSERT INTO accounts(owner_id, account_type_id, amount, confidentiality_level, integrity_level) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\');' % (
user_id, account_type, amount, conf_label, integrity_label,))
self.cnx.commit()
self.cursor.execute(
"select account_no from accounts where owner_id = %s and account_type_id = %s and amount = %s and confidentiality_level = %s and integrity_level = %s",
(user_id, account_type, amount, conf_label, integrity_label,))
nos = self.cursor.fetchone()
account_no = nos[0]
response = f"Account Created Successfully. Your Account Number is: {account_no}"
return response
else:
acc_id = acids[0]
response = f"You have already created account {acc_id}."
return response
def add_join_request(self, username, account_no):
self.cursor.execute("select ID from users where username = %s", (username,))
ids = self.cursor.fetchone()
user_id = ids[0]
self.cursor.execute('select accept_status from account_user where account_no = %s and user_id = %s',
(account_no, user_id))
prev = self.cursor.fetchall()
response = ''
if len(prev) != 0:
if prev[0] == 1:
response = f"You Have Already Joint This Account."
else:
response = f"You Have Already Requested to Join This Account."
else:
self.cursor.execute("select amount from accounts where account_no = %s",(account_no,))
acc = self.cursor.fetchone()
if acc != None:
self.cursor.execute(
'Insert into account_user(account_no, user_id) VALUES (\'%s\',\'%s\');' % (account_no, user_id,))
self.cnx.commit()
response = f"Join Request Sent to Account Owner."
else:
response = f"Account Not Found"
return response
def accept_join_request(self, owner, username, conf_label, integrity_label):
self.cursor.execute("select ID from users where username = %s", (owner,))
oids = self.cursor.fetchone()
owner_id = oids[0]
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
if uids != None:
user_id = uids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (owner_id,))
nos = self.cursor.fetchone()
if nos != None:
account_no = nos[0]
self.cursor.execute("select * from account_user where account_no = %s and user_id = %s", (account_no ,user_id,))
exist_rq = self.cursor.fetchone()
if exist_rq != None:
self.cursor.execute(
'update account_user set accept_status = 1, confidentiality_level = %s, integrity_level = %s where account_no = %s and user_id = %s',
(conf_label, integrity_label, account_no, user_id))
self.cnx.commit()
response = f"User \033[1m{username}\033[0m Joint to Account \033[1m{account_no}\033[0m. "
else:
response = f"Account Not Found. This user didn't send any Join rq."
else:
response = f"Account Not Found. This user didn't send any Join rq."
else:
response = f"Account Not Found"
return response
def show_list_of_account(self, username):
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (user_id,))
nos = self.cursor.fetchone()
if nos!= None:
account_no = nos[0]
self.cursor.execute("select account_no from account_user where user_id = %s and accept_status = 1", (user_id,))
joints = self.cursor.fetchall()
return account_no, joints
else:
return "No Accounts Found.",''
def account_info(self, username, account_no):
self.cursor.execute("select amount from accounts where account_no = %s",(account_no,))
accexists = self.cursor.fetchone()
if accexists != None:
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
query1 = """select users.username,accounts.DateCreated,accounts.amount,account_type.title
from accounts inner join users on accounts.owner_id = users.ID
inner join account_type on account_type.ID = accounts.account_type_id
where accounts.account_no = %s"""
self.cursor.execute(query1, (account_no,))
account_info = self.cursor.fetchone()
# account_info=''
# if acci!= None:
# account_info = acci
query2 = """select users.username
from account_user inner join users on account_user.user_id = users.ID
where account_user.account_no = %s and account_user.accept_status = 1"""
self.cursor.execute(query2, (account_no,))
owners = self.cursor.fetchall()
# owners =''
# if len(os)>0:
# owners = os
query3 = """select *
from transactions
where from_account = %s order by transaction_date DESC limit 5"""
self.cursor.execute(query3, (account_no,))
last5_deposits = self.cursor.fetchall()
# last5_deposits = ''
# if len(last5)>0:
# last5_deposits = last5
query4 = """select *
from transactions
where to_account = %s order by transaction_date DESC limit 5"""
self.cursor.execute(query4, (account_no,))
last5_withdraw = self.cursor.fetchall()
# last5_withdraw=''
# if len(last5w)>0:
# last5_withdraw = last5w
return account_info, owners, last5_deposits, last5_withdraw
else:
print("AAGGGGGGAAGGGGGGAAGGGGGGAAGGGGGGAAGGGGGGAAGGGGGG -> 3")
return 'Account Not Found', '', '', ''
def deposit_to_account(self, owner, to_account, amount):
response = ''
self.cursor.execute("select ID from users where username = %s", (owner,))
uids = self.cursor.fetchone()
user_id = uids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (user_id,))
nos = self.cursor.fetchone()
if nos != None:
account_no = nos[0]
self.cursor.execute("select amount from accounts where account_no = %s", (account_no,))
ams = self.cursor.fetchone()
cur_amount = ams[0]
if float(cur_amount) < float(amount):
response = 'Your account balance is not enough.'
else:
self.cursor.execute("select amount from accounts where account_no = %s", (to_account,))
tms = self.cursor.fetchone()
to_cur_amount = ''
if tms != None:
to_cur_amount = tms[0]
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(cur_amount) - float(amount), account_no))
self.cnx.commit()
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(to_cur_amount) + float(amount), to_account))
self.cnx.commit()
self.cursor.execute(
'Insert into transactions(to_account, from_account, amount) VALUES (\'%s\',\'%s\',\'%s\');' % (
to_account, account_no, amount,))
self.cnx.commit()
response = f"Successful Transaction. Current Balance = {float(cur_amount) - float(amount)}"
else:
response = f"Destination Account Number Not Found."
else:
response = f"You Don't Have Any Account. First of All, Create an Account."
return response
def withdraw(self, username, from_account, to_account, amount):
response = ''
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
self.cursor.execute("select account_no from account_user where account_no = %s and user_id = %s",
(from_account, user_id,))
j = self.cursor.fetchone()
if j != None:
is_joint = j[0]
self.cursor.execute("select amount from accounts where account_no = %s", (from_account,))
ams = self.cursor.fetchone()
if ams != None:
cur_amount = ams[0]
if float(cur_amount) < float(amount):
response = 'Source account balance is not enough.'
else:
self.cursor.execute("select amount from accounts where account_no = %s", (to_account,))
tms = self.cursor.fetchone()
to_cur_amount = ''
if tms != None:
to_cur_amount = tms[0]
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(cur_amount) - float(amount), from_account))
self.cnx.commit()
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(to_cur_amount) + float(amount), to_account))
self.cnx.commit()
self.cursor.execute(
'Insert into transactions(to_account, from_account, amount) VALUES (\'%s\',\'%s\',\'%s\');' % (
to_account, from_account, amount,))
self.cnx.commit()
response = f"Successful Transaction. Current Balance = {float(cur_amount) - float(amount)}"
else:
response = f"Destination Account Number Not Found."
else:
response = f"Source Account Number Not Found."
else:
response = f"You Are Not Joint To This Account"
return response
def get_security_labels(self, username, account_no):
response = 'NOPE'
user_integrity_label = -1
user_confidentiality_label = -1
acc_integrity_label = -1
acc_confidentiality_label = -1
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
# print("userID:",user_id)
self.cursor.execute("select owner_id from accounts where account_no = %s", (account_no,))
tms = self.cursor.fetchone()
owner_id = ''
if tms != None:
owner_id = tms[0]
# print("ownerid:",owner_id)
self.cursor.execute("select confidentiality_level, integrity_level from accounts where account_no = %s",
(account_no,))
acc_levels = self.cursor.fetchone()
# print("acc levels:",acc_levels)
acc_integrity_label = acc_levels[1]
acc_confidentiality_label = acc_levels[0]
else:
response = f"Account Not Found."
user_integrity_label = -1
user_confidentiality_label = -1
acc_integrity_label = -1
acc_confidentiality_label = -1
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
if owner_id == user_id:
response = f"OK"
user_integrity_label = acc_integrity_label
user_confidentiality_label = acc_confidentiality_label
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
else:
self.cursor.execute(
'select confidentiality_level, integrity_level from account_user where account_no = %s and user_id = %s and accept_status = 1',
(account_no, user_id))
levels = self.cursor.fetchone()
if levels != None:
user_integrity_label = levels[1]
user_confidentiality_label = levels[0]
# print("user on acc:",levels)
response = f"OK"
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
else:
response = f"Not joint to this account."
user_integrity_label = -1
user_confidentiality_label = -1
acc_integrity_label = -1
acc_confidentiality_label = -1
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
def record_log(self, who, action, status, amount=None, from_account=None, to_account=None):
readable_time = datetime.datetime.fromtimestamp(calendar.timegm(time.gmtime())).isoformat()
if amount is None and from_account is None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',NULL ,NULL ,NULL );' % (
who, readable_time, action, status))
self.cnx.commit()
elif from_account is None and amount is not None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\' ,NULL,NULL );' % (
who, readable_time, action, status, amount))
self.cnx.commit()
elif from_account is not None and amount is None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',NULL,\'%s\' ,NULL );' % (
who, readable_time, action, status, from_account))
self.cnx.commit()
elif from_account is not None and amount is None and to_account is not None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',NULL ,\'%s\' );' % (
who, readable_time, action, status, from_account,to_account))
self.cnx.commit()
else:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\');' % (
who, readable_time, action, status, amount, from_account, to_account))
self.cnx.commit()
| 3.15625 | 3 |
src/modeling/meta_arch/build.py | Julienbeaulieu/kaggle-computer-vision-competition | 14 | 12760556 | <filename>src/modeling/meta_arch/build.py<gh_stars>10-100
from torch import nn
from yacs.config import CfgNode
from src.tools.registry import Registry
from src.modeling.layers.sync_batchnorm import convert_model
META_ARCH_REGISTRY = Registry()
def build_model(model_cfg: CfgNode) -> nn.Module:
"""
build model
:param model_cfg: model config blob
:return: model
"""
meta_arch = model_cfg.META_ARCHITECTURE
model = META_ARCH_REGISTRY.get(meta_arch)(model_cfg)
# This is VERY SLOW
if model_cfg.NORMALIZATION_FN == 'SYNC_BN':
model = convert_model(model)
return model
| 2.0625 | 2 |
tests/fixtures/job.py | chaosiq/chaosiq-agent | 0 | 12760557 | import uuid
from typing import Literal
from chaosiqagent.types import Job
def create_job(target_type: Literal["experiment", "verification"] = None):
target_id = uuid.uuid4()
if not target_type:
target_type = "experiment"
return Job(
id=uuid.uuid4(),
agent_id=uuid.uuid4(),
org_id=uuid.uuid4(),
team_id=uuid.uuid4(),
target_id=target_id,
target_type=target_type,
target_url=f"https://console.example.com/assets/{target_type}s/{target_id}",
access_token="<PASSWORD>",
payload={},
)
| 2.546875 | 3 |
lib/trackers/__init__.py | BestSonny/open-vot-1 | 95 | 12760558 | from __future__ import absolute_import, division
import cv2
import numpy as np
import time
from ..utils.viz import show_frame
class Tracker(object):
def __init__(self, name):
self.name = name
def init(self, image, init_rect):
raise NotImplementedError()
def update(self, image):
raise NotImplementedError()
def track(self, img_files, init_rect, visualize=False):
frame_num = len(img_files)
bndboxes = np.zeros((frame_num, 4))
bndboxes[0, :] = init_rect
speed_fps = np.zeros(frame_num)
for f, img_file in enumerate(img_files):
image = cv2.imread(img_file)
if image.ndim == 2:
image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
elif image.ndim == 3:
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
start_time = time.time()
if f == 0:
self.init(image, init_rect)
else:
bndboxes[f, :] = self.update(image)
elapsed_time = time.time() - start_time
speed_fps[f] = 1. / elapsed_time
if visualize:
show_frame(image, bndboxes[f, :], fig_n=1)
return bndboxes, speed_fps
from .siamfc import TrackerSiamFC
from .goturn import TrackerGOTURN
from .csk import TrackerCSK
from .kcf import TrackerKCF
from .dcf import TrackerDCF
from .dcfnet import TrackerDCFNet
from .mosse import TrackerMOSSE
from .dsst import TrackerDSST
| 2.4375 | 2 |
src/webserver/displayer/migrations/0001_initial.py | MarcinAman/AGH-GoogleSheets-Validator | 0 | 12760559 | <gh_stars>0
# Generated by Django 2.0.6 on 2018-06-20 12:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ClassRoom',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('class_room_id', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('column', models.CharField(max_length=50)),
('value', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Row',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='SingleClass',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('start_time', models.DateTimeField(verbose_name='start time')),
('end_time', models.DateTimeField(verbose_name='end time')),
('class_room', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='displayer.ClassRoom')),
],
),
migrations.CreateModel(
name='Teachers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.AddField(
model_name='singleclass',
name='teacher',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='displayer.Teachers'),
),
migrations.AddField(
model_name='record',
name='row_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='displayer.Row'),
),
]
| 1.820313 | 2 |
pyfibre/tests/test_pyfibre_runner.py | franklongford/ImageCol | 2 | 12760560 | from unittest import TestCase
from pyfibre.model.objects.segments import (
FibreSegment, CellSegment
)
from pyfibre.tests.probe_classes.objects import (
ProbeFibreNetwork, generate_probe_segment)
from pyfibre.pyfibre_runner import PyFibreRunner
LOAD_NETWORK_PATH = "networkx.read_gpickle"
SAVE_NETWORK_PATH = "networkx.write_gpickle"
LOAD_JSON_PATH = "json.load"
SAVE_JSON_PATH = "json.dump"
LOAD_REGION_PATH = "numpy.load"
SAVE_REGION_PATH = "numpy.save"
def mock_load(*args, klass=None, **kwargs):
print('mock_load called')
return klass()
class TestPyFibreRunner(TestCase):
def setUp(self):
self.runner = PyFibreRunner()
self.fibre_networks = [ProbeFibreNetwork()]
self.fibre_segments = [
generate_probe_segment(FibreSegment)]
self.cell_segments = [
generate_probe_segment(CellSegment)]
def test_defaults(self):
self.assertEqual((5, 35), self.runner.p_denoise)
| 2.484375 | 2 |
dags/sampledatasource_etl.py | mavx/docker-airflow | 1 | 12760561 | """
Code that goes along with the Airflow located at:
http://airflow.readthedocs.org/en/latest/tutorial.html
"""
from datetime import datetime, timedelta
from airflow import DAG
from airflow.contrib.kubernetes.pod import Port
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.docker_operator import DockerOperator
port = Port('http', 80)
env_vars = {
"KUBERNETES_SERVICE_HOST": "10.0.0.1",
"KUBERNETES_SERVICE_PORT": "443",
"KUBERNETES_SERVICE_PORT_HTTPS": "443"
}
default_args = {
"owner": "airflow",
"depends_on_past": False,
"start_date": datetime(2020, 6, 20),
"email": ["<EMAIL>"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=1),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
dag = DAG(
"sampledatasource_etl",
default_args=default_args,
schedule_interval=timedelta(minutes=5),
concurrency=1,
max_active_runs=1
)
# t1, t2 and t3 are examples of tasks created by instantiating operators
t1 = BashOperator(task_id="print_date", bash_command="date", dag=dag)
# t2 = BashOperator(task_id="sleep", bash_command="sleep 5", retries=3, dag=dag)
# templated_command = """
# {% for i in range(5) %}
# echo "{{ ds }}"
# echo "{{ macros.ds_add(ds, 7)}}"
# echo "{{ params.my_param }}"
# {% endfor %}
# """
#
# t3 = BashOperator(
# task_id="templated",
# bash_command=templated_command,
# params={"my_param": "Parameter I passed in"},
# dag=dag,
# )
# t4 = DockerOperator(
# task_id="docker_run",
# image="mavx/sampledatasource_etl:latest",
# api_version="auto",
# auto_remove=True,
# command=["python", "etl.py"],
# network_mode="host",
# tty=True,
# docker_conn_id="dockerhub",
# dag=dag,
# )
#
# t5 = DockerOperator(
# task_id="docker_run_echo",
# image="python:3.7-slim",
# api_version="auto",
# auto_remove=True,
# command=["echo", "1"],
# network_mode="host",
# tty=True,
# docker_conn_id="dockerhub",
# dag=dag,
# )
t6 = KubernetesPodOperator(
task_id="k8s_run",
in_cluster=False,
namespace="default",
image="mavx/sampledatasource_etl:latest",
cmds=["python", "etl.py"],
name="k8s_task_pod",
cluster_context="docker-desktop",
is_delete_operator_pod=True,
ports=[port],
dag=dag,
)
# t5 = BashOperator(
# task_id="docker_run_echo",
# bash_command="docker ps",
# # params={"my_param": "Parameter I passed in"},
# dag=dag,
# )
# t2.set_upstream(t1)
# t3.set_upstream(t1)
# t4.set_upstream(t3)
# t5.set_upstream(t3)
t1 >> t6
if __name__ == '__main__':
t6.execute("ds")
| 2.546875 | 3 |
amoebaelib/align_to_profile_iter.py | laelbarlow/amoebae | 8 | 12760562 | <reponame>laelbarlow/amoebae<gh_stars>1-10
#!/usr/bin/env python3
# Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Module for iteratively aligning sequences from a fasta file one-by-one with
an existing alignment using MUSCLE's -profile option.
********** Ideally, the fastas should be added to the alignment in decreasing order of
how well they align to the HMM (based on score).
"""
import sys
import os
sys.path.append(os.path.join(os.path.dirname(sys.path[0]),'amoebaelib'))
import glob
import subprocess
import argparse
from Bio import SeqIO
from afa_to_nex import nex_to_afa, afa_to_nex
import shutil
def split_fasta(infilepath, temp_subdirname):
"""Takes a fasta files and writes new fasta files in a subdirectory for
each sequence in the fasta file.
"""
# Loop through the sequences in the input fasta file
with open(infilepath) as i:
fasta_sequences = SeqIO.parse(i,'fasta')
num = 0
for seq in fasta_sequences:
num += 1
seqfilename = os.path.join(temp_subdirname, str(num) + '.fa')
with open(seqfilename, 'w') as seqf:
SeqIO.write([seq],seqf,'fasta')
def align_iteratively(inalignfilepath, temp_subdirname, outfilepath):
"""Takes an afa file and aligns it one-by-one to sequences in files in the
temporary subdirectory, and outputs the final alignment in fasta format.
"""
# Make a list of the input fa files, and sort it numerically based on
# filename.
file_list = glob.glob(temp_subdirname + '/*.fa')
l = lambda x: int(os.path.basename(x).split('.')[0])
file_list_sorted = sorted(file_list, key=l)
# Loop through the sorted list of files and align each sequence to the
# input alignment.
for f in file_list_sorted:
#print('\n\n' + f + '\n\n')
# Call MUSCLE with default options.
with open(os.path.join(temp_subdirname, 'temp_muscle stdout.txt'), 'w') as o:
subprocess.call(["muscle", "-profile", "-in1", inalignfilepath, "-in2", f,
"-out", outfilepath], stdout=o, stderr=subprocess.STDOUT)
def do_align_iteratively(innexpath, infapath, outnexpath=None):
"""Do the whole process
"""
# Make a temporary subdirectory for storing individual sequence files.
temp_subdirname = infapath.rsplit('.', 1)[0] + '_temp'
os.mkdir(temp_subdirname)
# Split the input fasta file.
split_fasta(infapath, temp_subdirname)
# Name output file.
infapath_basename = os.path.basename(infapath)
outafapath = None
if outnexpath is None:
outafapath = innexpath.rsplit('.', 1)[0] + '+' + input('Name: ') + '.afaa'
else:
outafapath = outnexpath.rsplit('.', 1)[0] + '.afaa'
# Convert nex to afa.
nex_to_afa(innexpath, outafapath)
# Align seqs in tempdir to afa iteratively.
align_iteratively(outafapath, temp_subdirname, outafapath)
# Delete tempdir.
shutil.rmtree(temp_subdirname)
# Define outnex path.
if outnexpath is None:
outnexpath = outafapath.rsplit('.', 1)[0] + '.nex'
# Convert outafa to nex format.
afa_to_nex(outafapath, outnexpath)
# Delete afa file.
os.remove(outafapath)
#print(outnexpath)
| 2.484375 | 2 |
examples/aimsun/figure_eight.py | syuntoku14/flow | 0 | 12760563 | <reponame>syuntoku14/flow
"""Example of a figure 8 network with human-driven vehicles.
Right-of-way dynamics near the intersection causes vehicles to queue up on
either side of the intersection, leading to a significant reduction in the
average speed of vehicles in the network.
"""
from flow.controllers import IDMController
from flow.core.experiment import Experiment
from flow.core.params import AimsunParams, EnvParams, NetParams
from flow.core.params import VehicleParams
from flow.envs import TestEnv
from flow.scenarios.figure_eight import Figure8Scenario, ADDITIONAL_NET_PARAMS
def figure_eight_example(render=None):
"""Perform a simulation of vehicles on a figure eight.
Parameters
----------
render: bool, optional
specifies whether to use the gui during execution
Returns
-------
exp: flow.core.experiment.Experiment
A non-rl experiment demonstrating the performance of human-driven
vehicles on a figure eight.
"""
sim_params = AimsunParams(sim_step=0.5, render=False, emission_path='data')
if render is not None:
sim_params.render = render
vehicles = VehicleParams()
vehicles.add(
veh_id="human",
acceleration_controller=(IDMController, {}),
num_vehicles=14)
env_params = EnvParams()
net_params = NetParams(
additional_params=ADDITIONAL_NET_PARAMS.copy())
scenario = Figure8Scenario(
name="figure8",
vehicles=vehicles,
net_params=net_params)
env = TestEnv(env_params, sim_params, scenario, simulator='aimsun')
return Experiment(env)
if __name__ == "__main__":
# import the experiment variable
exp = figure_eight_example()
# run for a set number of rollouts / time steps
exp.run(1, 1500)
| 2.859375 | 3 |
scripts/snmptrap.py | gfidder/snmpclitools | 32 | 12760564 | #!/usr/bin/env python
#
# This file is part of snmpclitools software.
#
# Copyright (c) 2005-2019, <NAME> <<EMAIL>>
# License: http://snmplabs.com/snmpclitools/license.html
#
# Notificaton Originator
#
import os
import socket
import sys
import traceback
from pysnmp import error
from pysnmp.entity import engine
from pysnmp.entity.rfc3413 import ntforg
from pysnmp.proto.api import v1, v2c
from pysnmp.proto.proxy import rfc2576
from snmpclitools.cli import base
from snmpclitools.cli import main
from snmpclitools.cli import mibview
from snmpclitools.cli import msgmod
from snmpclitools.cli import pdu
from snmpclitools.cli import secmod
from snmpclitools.cli import target
def getUsage():
return """\
Usage: %s [OPTIONS] <MANAGER> <PARAMETERS>
%s%s%s%s
TRAP options:
-C<TRAPOPT>: set various application specific behaviours:
i: send INFORM-PDU, expect a response
%s
SNMPv1 TRAP management parameters:
enterprise-oid agent generic-trap specific-trap uptime <management-params>
where:
generic-trap: coldStart|warmStart|linkDown|linkUp|authenticationFailure
|egpNeighborLoss|enterpriseSpecific
SNMPv2/SNMPv3 management parameters:
uptime trap-oid <management-params>
%s\
""" % (os.path.basename(sys.argv[0]),
main.getUsage(),
msgmod.getUsage(),
secmod.getUsage(),
mibview.getUsage(),
target.getUsage(),
pdu.getWriteUsage())
# Construct c/l interpreter for this app
class Scanner(msgmod.MPScannerMixIn,
secmod.SMScannerMixIn,
mibview.MibViewScannerMixIn,
target.TargetScannerMixIn,
pdu.ReadPduScannerMixIn,
main.MainScannerMixIn,
base.ScannerTemplate):
def t_appopts(self, s):
""" -C """
self.rv.append(base.ConfigToken('appopts'))
def t_genericTrap(self, s):
""" coldStart|warmStart|linkDown|linkUp|authenticationFailure|egpNeighborLoss|enterpriseSpecific """
self.rv.append(base.ConfigToken('genericTrap', s))
class Parser(msgmod.MPParserMixIn,
secmod.SMParserMixIn,
mibview.MibViewParserMixIn,
target.TargetParserMixIn,
pdu.WritePduParserMixIn,
main.MainParserMixIn,
base.ParserTemplate):
def p_trapParams(self, args):
"""
TrapV1Params ::= EnterpriseOid whitespace AgentName whitespace GenericTrap whitespace SpecificTrap whitespace Uptime whitespace VarBinds
EnterpriseOid ::= string
AgentName ::= string
GenericTrap ::= genericTrap
SpecificTrap ::= string
Uptime ::= string
TrapV2cParams ::= Uptime whitespace TrapOid whitespace VarBinds
TrapOid ::= string
"""
def p_paramsSpec(self, args):
"""
Params ::= TrapV1Params
Params ::= TrapV2cParams
"""
def p_appOptions(self, args):
"""
Option ::= ApplicationOption
ApplicationOption ::= appopts whitespace string
ApplicationOption ::= appopts string
"""
class __Generator(base.GeneratorTemplate):
def n_ApplicationOption(self, cbCtx, node):
snmpEngine, ctx = cbCtx
if len(node) > 2:
opt = node[2].attr
else:
opt = node[1].attr
for c in opt:
if c == 'i':
ctx['informMode'] = 1
else:
raise error.PySnmpError('bad -C option - "%s"' % c)
def n_EnterpriseOid(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['EnterpriseOid'] = node[0].attr
def n_AgentName(self, cbCtx, node):
snmpEngine, ctx = cbCtx
try:
ctx['AgentName'] = socket.gethostbyname(node[0].attr)
except socket.error:
raise error.PySnmpError(
'Bad agent name %s: %s' % (node[0].attr, sys.exc_info()[1])
)
def n_GenericTrap(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['GenericTrap'] = node[0].attr
def n_SpecificTrap(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['SpecificTrap'] = node[0].attr
def n_Uptime(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['Uptime'] = int(node[0].attr)
def n_TrapOid(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['TrapOid'] = node[0].attr
def n_TrapV1Params_exit(self, cbCtx, node):
snmpEngine, ctx = cbCtx
# Initialize v1 PDU with passed params, then proxy it into v2c PDU
v1Pdu = v1.TrapPDU()
v1.apiTrapPDU.setDefaults(v1Pdu)
if 'EnterpriseOid' in ctx:
v1.apiTrapPDU.setEnterprise(v1Pdu, ctx['EnterpriseOid'])
if 'AgentName' in ctx:
v1.apiTrapPDU.setAgentAddr(v1Pdu, ctx['AgentName'])
if 'GenericTrap' in ctx:
v1.apiTrapPDU.setGenericTrap(v1Pdu, ctx['GenericTrap'])
if 'SpecificTrap' in ctx:
v1.apiTrapPDU.setSpecificTrap(v1Pdu, ctx['SpecificTrap'])
if 'Uptime' in ctx:
v1.apiTrapPDU.setTimeStamp(v1Pdu, ctx['Uptime'])
ctx['pdu'] = rfc2576.v1ToV2(v1Pdu)
def n_TrapV2cParams_exit(self, cbCtx, node):
snmpEngine, ctx = cbCtx
if 'informMode' in ctx:
pdu = v2c.InformRequestPDU()
v2c.apiPDU.setDefaults(pdu)
else:
pdu = v2c.TrapPDU()
v2c.apiTrapPDU.setDefaults(pdu)
v2c.apiPDU.setVarBinds(
pdu,
[(v2c.ObjectIdentifier('1.3.6.1.2.1.1.3.0'), v2c.TimeTicks(ctx['Uptime'])),
(v2c.ObjectIdentifier('1.3.6.1.6.3.1.1.4.1.0'), v2c.ObjectIdentifier(ctx['TrapOid']))]
)
ctx['pdu'] = pdu
def generator(cbCtx, ast):
snmpEngine, ctx = cbCtx
return __Generator().preorder((snmpEngine, ctx), ast)
# Run SNMP engine
def cbFun(snmpEngine, notificationHandle, errorIndication, pdu, cbCtx):
if errorIndication:
sys.stderr.write('%s\n' % errorIndication)
return
errorStatus = v2c.apiPDU.getErrorStatus(pdu)
varBinds = v2c.apiPDU.getVarBinds(pdu)
if errorStatus:
errorIndex = v2c.apiPDU.getErrorIndex(pdu)
sys.stderr.write(
'%s at %s\n' %
(errorStatus.prettyPrint(),
errorIndex and varBinds[int(errorIndex) - 1] or '?')
)
return
for oid, val in varBinds:
sys.stdout.write(
'%s\n' % cbCtx['mibViewProxy'].getPrettyOidVal(
cbCtx['mibViewController'], oid, val
)
)
snmpEngine = engine.SnmpEngine()
try:
# Parse c/l into AST
ast = Parser().parse(
Scanner().tokenize(' '.join(sys.argv[1:]))
)
ctx = {}
# Apply configuration to SNMP entity
main.generator((snmpEngine, ctx), ast)
msgmod.generator((snmpEngine, ctx), ast)
secmod.generator((snmpEngine, ctx), ast)
mibview.generator((snmpEngine, ctx), ast)
target.generatorTrap((snmpEngine, ctx), ast)
pdu.writePduGenerator((snmpEngine, ctx), ast)
generator((snmpEngine, ctx), ast)
v2c.apiPDU.setVarBinds(
ctx['pdu'], v2c.apiPDU.getVarBinds(ctx['pdu']) + ctx['varBinds']
)
ntforg.NotificationOriginator().sendPdu(
snmpEngine,
ctx['addrName'],
ctx.get('contextEngineId'),
ctx.get('contextName', ''),
ctx['pdu'],
cbFun, ctx
)
snmpEngine.transportDispatcher.runDispatcher()
except KeyboardInterrupt:
sys.stderr.write('Shutting down...\n')
except error.PySnmpError:
sys.stderr.write('Error: %s\n%s' % (sys.exc_info()[1], getUsage()))
sys.exit(1)
except Exception:
sys.stderr.write('Process terminated: %s\n' % sys.exc_info()[1])
for line in traceback.format_exception(*sys.exc_info()):
sys.stderr.write(line.replace('\n', ';'))
sys.exit(1)
| 1.960938 | 2 |
rubber.py | flexarts/giprouter-qgis-plugin | 0 | 12760565 | <reponame>flexarts/giprouter-qgis-plugin
from qgis.core import *
from qgis.gui import *
from qgis.PyQt.QtCore import *
from qgis.PyQt.QtGui import *
class RectangleMapTool(QgsMapToolEmitPoint):
onExtentChanged = pyqtSignal(QgsRectangle)
def __init__(self, canvas):
self.canvas = canvas
QgsMapToolEmitPoint.__init__(self, self.canvas)
self.rubberBand = QgsRubberBand(self.canvas, QgsWkbTypes.LineGeometry)
self.rubberBand.setColor(Qt.red)
self.rubberBand.setWidth(1)
self.reset()
def reset(self):
self.startPoint = self.endPoint = None
self.isEmittingPoint = False
self.rubberBand.reset(True)
def canvasPressEvent(self, e):
self.startPoint = self.toMapCoordinates(e.pos())
self.endPoint = self.startPoint
self.isEmittingPoint = True
self.showRect(self.startPoint, self.endPoint)
def canvasReleaseEvent(self, e):
self.isEmittingPoint = False
r = self.rectangle()
if r is not None:
print("Rectangle:", r.xMinimum(), r.yMinimum(), r.xMaximum(), r.yMaximum())
self.onExtentChanged.emit(r)
def canvasMoveEvent(self, e):
if not self.isEmittingPoint:
return
self.endPoint = self.toMapCoordinates(e.pos())
self.showRect(self.startPoint, self.endPoint)
def hideRect(self):
self.reset()
self.rubberBand.reset(QgsWkbTypes.LineGeometry)
def showRect(self, startPoint, endPoint):
self.rubberBand.reset(QgsWkbTypes.LineGeometry)
if startPoint.x() == endPoint.x() or startPoint.y() == endPoint.y():
return
point1 = QgsPointXY(startPoint.x(), startPoint.y())
point2 = QgsPointXY(startPoint.x(), endPoint.y())
point3 = QgsPointXY(endPoint.x(), endPoint.y())
point4 = QgsPointXY(endPoint.x(), startPoint.y())
point5 = QgsPointXY(startPoint.x(), startPoint.y())
self.rubberBand.addPoint(point1, False)
self.rubberBand.addPoint(point2, False)
self.rubberBand.addPoint(point3, False)
self.rubberBand.addPoint(point4, False)
self.rubberBand.addPoint(point5, True) # true to update canvas
self.rubberBand.show()
def rectangle(self):
if self.startPoint is None or self.endPoint is None:
return None
elif self.startPoint.x() == self.endPoint.x() or self.startPoint.y() == self.endPoint.y():
return None
sourceCrs = self.canvas.mapSettings().destinationCrs()
destCrs = QgsCoordinateReferenceSystem(4326)
tr = QgsCoordinateTransform(sourceCrs, destCrs, QgsProject.instance())
p1 = QgsGeometry.fromPointXY(self.startPoint)
p1.transform(tr)
p2 = QgsGeometry.fromPointXY(self.endPoint)
p2.transform(tr)
return QgsRectangle(p1.asPoint(), p2.asPoint())
def deactivate(self):
# Do whatever you want here
QgsMapTool.deactivate(self)
| 2.15625 | 2 |
examples/app3/test_basic.py | ocervell/Flask-Flash | 5 | 12760566 | <reponame>ocervell/Flask-Flash
import unittest
from flask import url_for
from faker import Factory
from faker.providers import person, internet
from manage import flash
import logging
import random
from models import UserModel
faker = Factory.create()
faker.add_provider(person)
faker.add_provider(internet)
log = logging.getLogger('tests')
import subprocess
class FlaskTestCase(unittest.TestCase):
def setUp(self):
self.client = flash.client
self.db = flash.db
self.app = flash.app
with self.app.app_context():
self.db.create_all()
def tearDown(self):
with self.app.app_context():
self.db.session.remove()
self.db.drop_all()
# def test_scenario_1(self):
# nusers = random.randint(5, 100)
# nperms = random.randint(5, 10)
# uids = []
# pids = []
# for n in range(nusers):
# u = self.client.users.create(
# first_name=faker.first_name(),
# last_name=faker.last_name(),
# country=faker.country()
# )
# uids.append(u['id'])
# for n in range(nperms):
# puids = random.sample(uids, random.randint(0, 5))
# p = self.client.permissions.create(
# name=faker.uri_path(deep=1),
# users=['/api/user/%s' % id for id in puids]
# )
# pids.append(p['name'])
#
# print "Adding %s user... OK" % nusers
# self.client.permissions.delete()
# self.client.users.delete()
# self.assertEqual(self.client.users.count(), 0)
# self.assertEqual(self.client.permissions.count(), 0)
def test_scenario_2(self):
nusers = 2500
for n in range(nusers):
kwargs = {
'first_name': faker.first_name(),
'last_name': faker.last_name(),
'country': faker.country()
}
self.client.users.create(**kwargs)
| 2.375 | 2 |
ingenico/connect/sdk/domain/payment/definitions/non_sepa_direct_debit_payment_method_specific_input.py | festicket/connect-sdk-python3 | 12 | 12760567 | <reponame>festicket/connect-sdk-python3<filename>ingenico/connect/sdk/domain/payment/definitions/non_sepa_direct_debit_payment_method_specific_input.py
# -*- coding: utf-8 -*-
#
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
from ingenico.connect.sdk.domain.definitions.abstract_payment_method_specific_input import AbstractPaymentMethodSpecificInput
from ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product705_specific_input import NonSepaDirectDebitPaymentProduct705SpecificInput
from ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product730_specific_input import NonSepaDirectDebitPaymentProduct730SpecificInput
class NonSepaDirectDebitPaymentMethodSpecificInput(AbstractPaymentMethodSpecificInput):
__date_collect = None
__direct_debit_text = None
__is_recurring = None
__payment_product705_specific_input = None
__payment_product730_specific_input = None
__recurring_payment_sequence_indicator = None
__requires_approval = None
__token = None
__tokenize = None
@property
def date_collect(self):
"""
| Direct Debit payment collection date
| Format: YYYYMMDD
Type: str
"""
return self.__date_collect
@date_collect.setter
def date_collect(self, value):
self.__date_collect = value
@property
def direct_debit_text(self):
"""
| Descriptor intended to identify the transaction on the customer's bank statement
Type: str
"""
return self.__direct_debit_text
@direct_debit_text.setter
def direct_debit_text(self, value):
self.__direct_debit_text = value
@property
def is_recurring(self):
"""
| Indicates if this transaction is of a one-off or a recurring type
* true - This is recurring
* false - This is one-off
Type: bool
"""
return self.__is_recurring
@is_recurring.setter
def is_recurring(self, value):
self.__is_recurring = value
@property
def payment_product705_specific_input(self):
"""
| Object containing UK Direct Debit specific details
Type: :class:`ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product705_specific_input.NonSepaDirectDebitPaymentProduct705SpecificInput`
"""
return self.__payment_product705_specific_input
@payment_product705_specific_input.setter
def payment_product705_specific_input(self, value):
self.__payment_product705_specific_input = value
@property
def payment_product730_specific_input(self):
"""
| Object containing ACH specific details
Type: :class:`ingenico.connect.sdk.domain.payment.definitions.non_sepa_direct_debit_payment_product730_specific_input.NonSepaDirectDebitPaymentProduct730SpecificInput`
"""
return self.__payment_product730_specific_input
@payment_product730_specific_input.setter
def payment_product730_specific_input(self, value):
self.__payment_product730_specific_input = value
@property
def recurring_payment_sequence_indicator(self):
"""
* first = This transaction is the first of a series of recurring transactions
* recurring = This transaction is a subsequent transaction in a series of recurring transactions
* last = This transaction is the last transaction of a series of recurring transactions
Type: str
"""
return self.__recurring_payment_sequence_indicator
@recurring_payment_sequence_indicator.setter
def recurring_payment_sequence_indicator(self, value):
self.__recurring_payment_sequence_indicator = value
@property
def requires_approval(self):
"""
* true - The payment requires approval before the funds will be captured using the Approve payment or Capture payment API.
* false - The payment does not require approval, and the funds will be captured automatically.
Type: bool
"""
return self.__requires_approval
@requires_approval.setter
def requires_approval(self, value):
self.__requires_approval = value
@property
def token(self):
"""
| ID of the stored token that contains the bank account details to be debited
Type: str
"""
return self.__token
@token.setter
def token(self, value):
self.__token = value
@property
def tokenize(self):
"""
| Indicates if this transaction should be tokenized
* true - Tokenize the transaction
* false - Do not tokenize the transaction, unless it would be tokenized by other means such as auto-tokenization of recurring payments.
Type: bool
"""
return self.__tokenize
@tokenize.setter
def tokenize(self, value):
self.__tokenize = value
def to_dictionary(self):
dictionary = super(NonSepaDirectDebitPaymentMethodSpecificInput, self).to_dictionary()
if self.date_collect is not None:
dictionary['dateCollect'] = self.date_collect
if self.direct_debit_text is not None:
dictionary['directDebitText'] = self.direct_debit_text
if self.is_recurring is not None:
dictionary['isRecurring'] = self.is_recurring
if self.payment_product705_specific_input is not None:
dictionary['paymentProduct705SpecificInput'] = self.payment_product705_specific_input.to_dictionary()
if self.payment_product730_specific_input is not None:
dictionary['paymentProduct730SpecificInput'] = self.payment_product730_specific_input.to_dictionary()
if self.recurring_payment_sequence_indicator is not None:
dictionary['recurringPaymentSequenceIndicator'] = self.recurring_payment_sequence_indicator
if self.requires_approval is not None:
dictionary['requiresApproval'] = self.requires_approval
if self.token is not None:
dictionary['token'] = self.token
if self.tokenize is not None:
dictionary['tokenize'] = self.tokenize
return dictionary
def from_dictionary(self, dictionary):
super(NonSepaDirectDebitPaymentMethodSpecificInput, self).from_dictionary(dictionary)
if 'dateCollect' in dictionary:
self.date_collect = dictionary['dateCollect']
if 'directDebitText' in dictionary:
self.direct_debit_text = dictionary['directDebitText']
if 'isRecurring' in dictionary:
self.is_recurring = dictionary['isRecurring']
if 'paymentProduct705SpecificInput' in dictionary:
if not isinstance(dictionary['paymentProduct705SpecificInput'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['paymentProduct705SpecificInput']))
value = NonSepaDirectDebitPaymentProduct705SpecificInput()
self.payment_product705_specific_input = value.from_dictionary(dictionary['paymentProduct705SpecificInput'])
if 'paymentProduct730SpecificInput' in dictionary:
if not isinstance(dictionary['paymentProduct730SpecificInput'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['paymentProduct730SpecificInput']))
value = NonSepaDirectDebitPaymentProduct730SpecificInput()
self.payment_product730_specific_input = value.from_dictionary(dictionary['paymentProduct730SpecificInput'])
if 'recurringPaymentSequenceIndicator' in dictionary:
self.recurring_payment_sequence_indicator = dictionary['recurringPaymentSequenceIndicator']
if 'requiresApproval' in dictionary:
self.requires_approval = dictionary['requiresApproval']
if 'token' in dictionary:
self.token = dictionary['token']
if 'tokenize' in dictionary:
self.tokenize = dictionary['tokenize']
return self
| 2.265625 | 2 |
conftest.py | omri374/openvino-textspotting-docker | 0 | 12760568 | <filename>conftest.py
import pytest
import os
@pytest.fixture(scope='session')
def app(request):
from text_spotting import Server
server = Server()
return server.app | 1.734375 | 2 |
haiku/_src/batch_norm_test.py | pierricklee/dm-haiku | 1,647 | 12760569 | # Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for haiku._src.batch_norm."""
import os
from absl.testing import absltest
from haiku._src import batch_norm
from haiku._src import test_utils
from haiku._src import transform
import jax
import jax.numpy as jnp
import numpy as np
class BatchNormTest(absltest.TestCase):
@test_utils.transform_and_run
def test_basic(self):
data = jnp.arange(2 * 3 * 4, dtype=jnp.float32).reshape([2, 3, 4])
norm = batch_norm.BatchNorm(True, True, 0.9)
result = norm(data, is_training=True)
result_0_replicated = jnp.broadcast_to(result[:, :, :1], result.shape)
# Input data is symmetrical variance per-channel.
np.testing.assert_allclose(result, result_0_replicated)
# Running through again in test mode produces same output.
np.testing.assert_allclose(norm(data, is_training=False), result, rtol=2e-2)
@test_utils.transform_and_run
def test_simple_training(self):
layer = batch_norm.BatchNorm(
create_scale=False, create_offset=False, decay_rate=0.9)
inputs = np.ones([2, 3, 3, 5])
scale = np.full((5,), 0.5)
offset = np.full((5,), 2.0)
result = layer(inputs, True, scale=scale, offset=offset)
np.testing.assert_equal(result, np.full(inputs.shape, 2.0))
@test_utils.transform_and_run
def test_simple_training_nchw(self):
layer = batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
data_format="NCHW")
inputs = np.ones([2, 5, 3, 3])
scale = np.full((5, 1, 1), 0.5)
offset = np.full((5, 1, 1), 2.0)
result = layer(inputs, True, scale=scale, offset=offset)
np.testing.assert_equal(result, np.full(inputs.shape, 2.0))
@test_utils.transform_and_run
def test_simple_training_normalized_axes(self):
layer = batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
axis=[0, 2, 3]) # Not the second axis.
# This differs only in the second axis.
inputs = np.stack([2.0 * np.ones([5, 3, 3]), np.ones([5, 3, 3])], 1)
result = layer(inputs, True)
# Despite not all values being identical, treating slices from the first
# axis separately leads to a fully normalized = equal array.
np.testing.assert_equal(result, np.zeros(inputs.shape))
def test_simple_training_cross_replica_axis(self):
ldc = jax.local_device_count()
def f(x, is_training=True):
return batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
cross_replica_axis="i",
)(x, is_training=is_training)
f = transform.transform_with_state(f)
inputs = np.arange(ldc * 4).reshape(ldc, 4)
key = np.broadcast_to(jax.random.PRNGKey(42), (ldc, 2))
params, state = jax.pmap(f.init, axis_name="i")(key, inputs)
result, _ = jax.pmap(f.apply, axis_name="i")(params, state, key, inputs)
mean = np.mean(inputs, axis=0)
std = np.std(inputs, axis=0) + 1e-10
expected = (inputs - mean) / std
np.testing.assert_array_almost_equal(result, expected)
def test_simple_training_cross_replica_axis_index_groups(self):
ldc = jax.local_device_count()
if ldc < 2:
self.skipTest("Cross-replica test requires at least 2 devices.")
num_groups = ldc // 2
num_group_devices = ldc // num_groups
# for 8 devices this produces [[0, 1], [2, 3], [4, 5], [6, 7]] groups.
groups = np.arange(ldc).reshape(num_groups, num_group_devices).tolist()
def f(x, is_training=True):
return batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
cross_replica_axis="i",
cross_replica_axis_index_groups=groups,
)(x, is_training=is_training)
f = transform.transform_with_state(f)
inputs = np.arange(ldc * 4).reshape(ldc, 4).astype(np.float32)
key = np.broadcast_to(jax.random.PRNGKey(42), (ldc, 2))
params, state = jax.pmap(f.init, axis_name="i")(key, inputs)
result, _ = jax.pmap(f.apply, axis_name="i")(params, state, key, inputs)
expected = np.empty_like(inputs)
for g in range(num_groups):
group_inputs = inputs[num_group_devices*g:num_group_devices*(g + 1)]
group_mean = np.mean(group_inputs, axis=0)
group_std = np.std(group_inputs, axis=0) + 1e-10
group_inputs = (group_inputs - group_mean) / group_std
expected[num_group_devices*g:num_group_devices*(g + 1)] = group_inputs
np.testing.assert_array_almost_equal(result, expected)
@test_utils.transform_and_run
def test_no_scale_and_offset(self):
layer = batch_norm.BatchNorm(
create_scale=False, create_offset=False, decay_rate=0.9)
inputs = jnp.ones([2, 5, 3, 3, 3])
result = layer(inputs, True)
np.testing.assert_equal(result, np.zeros_like(inputs))
@test_utils.transform_and_run
def test_no_scale_and_init_provided(self):
with self.assertRaisesRegex(
ValueError, "Cannot set `scale_init` if `create_scale=False`"):
batch_norm.BatchNorm(
create_scale=False,
create_offset=True,
decay_rate=0.9,
scale_init=jnp.ones)
@test_utils.transform_and_run
def test_no_offset_beta_init_provided(self):
with self.assertRaisesRegex(
ValueError, "Cannot set `offset_init` if `create_offset=False`"):
batch_norm.BatchNorm(
create_scale=True,
create_offset=False,
decay_rate=0.9,
offset_init=jnp.zeros)
def test_eps_cast_to_var_dtype(self):
# See https://github.com/google/jax/issues/4718 for more info. In the
# context of this test we need to assert NumPy bf16 params/state and a
# Python float for eps preserve bf16 output.
def f(x, is_training):
return batch_norm.BatchNorm(True, True, 0.9, eps=0.1)(x, is_training)
f = transform.transform_with_state(f)
x = np.ones([], jnp.bfloat16)
key = jax.random.PRNGKey(42)
params, state = jax.device_get(f.init(key, x, True))
y, _ = f.apply(params, state, None, x, False)
self.assertEqual(y.dtype, jnp.bfloat16)
if __name__ == "__main__":
_xla_flags = os.environ.get("XLA_FLAGS", "")
os.environ["XLA_FLAGS"] = (_xla_flags +
" --xla_force_host_platform_device_count=8")
absltest.main()
os.environ["XLA_FLAGS"] = _xla_flags
| 1.945313 | 2 |
bsst/optimization.py | deflaux/metis | 0 | 12760570 | <reponame>deflaux/metis
# Copyright 2020 Verily Life Sciences LLC
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Optimization of vaccine trials."""
from absl import logging
from flax import nn
from flax import optim
import jax
import jax.numpy as jnp
import numpy as np
import pandas as pd
import xarray as xr
import bsst.sim as sim
class Parameterizer:
"""An interface for parameterizing whatever the trial planner can control."""
def init_params(self):
"""Returns an xr.DataArray of initial parameters of the right shape."""
raise NotImplementedError
def apply_params(self, c: sim.JaxDataset, params: jnp.array):
"""Updates a jax-ified trial in place with the given parameters.
This method must be differentiable, as it's used for optimization.
Args:
c: a jax-ified dataset to be modified in place.
params: a jnp.array of parameters to be applied to c.
"""
raise NotImplementedError
def xr_apply_params(self, c: xr.Dataset, params: xr.DataArray):
"""Like apply_params, but with xarray instead of jax."""
c_ = sim.JaxDataset(c)
self.apply_params(c_, params.values)
# Note: We assume here that site_activation is the only thing being wiggled
# by apply_params. If you write a Parameterizer which messes with other
# fields, those values should be copied over too.
c.site_activation.values = c_.site_activation
class StaticSiteActivation(Parameterizer):
"""A Parameterizer which allows the planner to choose fixed site activations.
There's one parameter per location, sigmoid of which is the location's
activation level.
"""
def __init__(self, c):
self.location = c.location
def init_params(self):
return xr.DataArray(
np.random.normal(size=self.location.size), coords=(self.location,))
def apply_params(self, c, params):
params = jnp.broadcast_to(params[:, jnp.newaxis], c.site_capacity.shape)
c.site_activation = nn.sigmoid(params)
class DynamicSiteActivation(Parameterizer):
"""A Parameterizer which allows the planner to change site activation freely.
There's one parameter per location and day, sigmoid of which is the location's
activation level on that day.
"""
def __init__(self, c):
self.location = c.location
self.time = c.time
def init_params(self):
return xr.DataArray(
np.random.normal(size=(self.location.size, self.time.size)),
coords=(self.location, self.time))
def apply_params(self, c, params):
c.site_activation = nn.sigmoid(params)
class PivotTableActivation(Parameterizer):
"""A class for capping the number of sites activated from certain groups.
Each site is assigned to a group (e.g. US vs ex-US), specified by the
`site_to_group` series. We suppose there are several dates on which site
activations are allowed to change, and that a given number of sites from each
group can be activated. The [group, decision_day]-shaped dataframe
`allowed_activations` specifies the a cap on how much site activation can be
*increased* on any given decision date. If can_deactivate is true, there is no
limit to how much site activation can be *decreased* (aside from the limit
where everything is turned off).
Notes:
* When can_deactivate is True, deactivating a site does not give you "extra
credits" which you can use to activate more sites.
* If you later decide you want to re-activate a deactivated site again,
it'll cost you credits, since that's an increase in activation.
* Any locations not in a group are unconstrained. They can be turned on or
off "for free" on each decision day.
"""
def __init__(self,
c: xr.Dataset,
site_to_group: pd.Series,
allowed_activations: pd.DataFrame,
force_hit_cap: bool = False,
can_deactivate: bool = True):
groups = allowed_activations.index.values
self.site_to_group = site_to_group.loc[c.location.values]
given_decision_days = allowed_activations.columns.values
self.decision_day_idx = np.searchsorted(c.time.values, given_decision_days)
self.group_to_idx = {
group: np.where(self.site_to_group == group)[0] for group in groups
}
self.allowed_activations = allowed_activations.copy()
used_decision_days = c.time.values[self.decision_day_idx]
self.allowed_activations.columns = used_decision_days
if not (used_decision_days == given_decision_days).all():
logging.warning(
'Some decision dates not found in c.time. Using decision dates\n%s\n'
'instead of\n%s', used_decision_days, given_decision_days)
self.can_deactivate = can_deactivate
self.force_hit_cap = force_hit_cap
if force_hit_cap and can_deactivate:
raise ValueError(
'The combination force_hit_cap=True and can_deactivate=True is not '
'supported. The "squash up" logic to enforce hitting the cap gets '
'annoying because it could require turning negative activations into '
'positive activations.')
def init_params(self):
decision_days = self.allowed_activations.columns.values
locations = self.site_to_group.index.values
params = xr.DataArray(
np.random.normal(size=(len(decision_days), len(locations))),
dims=('decision_day', 'location'),
coords=(decision_days, locations))
return params
def apply_params(self, c, params):
# Make an activation array of shape [decision_day, location] which satisfies
# the pivot table constraint.
site_activation = nn.sigmoid(params)
for i in range(len(self.decision_day_idx)):
new_activation, persistence_and_deactivation = (
self._new_and_old_activation(site_activation[:i + 1]))
for group, group_idx in self.group_to_idx.items():
num_allowed = self.allowed_activations.loc[group].iloc[i]
num_activated = new_activation[group_idx].sum()
if num_activated > num_allowed:
squashed = (num_allowed / num_activated) * new_activation[group_idx]
new_activation = new_activation.at[group_idx].set(squashed)
elif self.force_hit_cap:
# Same idea as above, but squash towards 1 instead of towards 0.
max_new_possible = 1 - persistence_and_deactivation[group_idx]
excess_available = max_new_possible - new_activation[group_idx]
total_excess_available = excess_available.sum()
total_excess_required = num_allowed - num_activated
scaler = 1.0
if total_excess_available > total_excess_required:
scaler = total_excess_required / total_excess_available
squashed = new_activation[group_idx] + scaler * excess_available
new_activation = new_activation.at[group_idx].set(squashed)
adjusted_activation = persistence_and_deactivation + new_activation
site_activation = site_activation.at[i, :].set(adjusted_activation)
# Expand from [decision_day, location] to [time, location].
c.site_activation = jnp.zeros(c.site_activation.shape)
for i in range(len(self.decision_day_idx) - 1):
day_idx = self.decision_day_idx[i]
next_day_idx = self.decision_day_idx[i + 1]
c.site_activation = c.site_activation.at[:, day_idx:next_day_idx].set(
site_activation[i, :, jnp.newaxis])
c.site_activation = c.site_activation.at[:, self.decision_day_idx[-1]:].set(
site_activation[-1, :, jnp.newaxis])
def _new_and_old_activation(self, site_activation: jnp.array):
"""Returns new activation on the most recent day, and everything else.
Args:
site_activation: jnp.array of shape [decision_day, location], the
activation of each site on each decision day.
Returns:
A pair of jnp.arrays of the same shape as site_activation[-1]. The first
is the new activation of each site on the last decision day (i.e. how much
more activated it is relative to the previous decision day, or zero if it
is not more active). The second is everything else, so the sum of the two
is equal to site_activation[-1] (or if can_deactivate is False,
maximum(*site_activation[-2, -1]) to reflect the impossibility of
deactivation).
"""
# Would softening the maximum to allow gradients to flow help?
new_activation = site_activation[-1]
if len(site_activation) > 1:
new_activation = new_activation - site_activation[-2]
new_activation = jnp.maximum(new_activation, 0.0)
total = site_activation[-1]
if not self.can_deactivate and len(site_activation) > 1:
total = jnp.maximum(total, site_activation[-2])
persistence_and_deactivation = total - new_activation
return new_activation, persistence_and_deactivation
def greedy_activation(self, c: xr.Dataset,
incidence_scenarios: xr.DataArray) -> xr.DataArray:
"""The site activation plan which turns sites on in order of incidence."""
site_activation = xr.zeros_like(c.site_activation)
already_activated = []
obs_delay = np.timedelta64(c.observation_delay.item(), 'D')
for date in self.allowed_activations.columns:
mean_inc = incidence_scenarios.mean('scenario').sel(
time=slice(date + obs_delay, None)).mean('time')
# Already activated sites can't be activated further, so zero out their
# incidence to avoid picking them.
mean_inc.loc[dict(location=already_activated)] = 0.0
for group, group_idx in self.group_to_idx.items():
num_to_activate = self.allowed_activations[date][group]
x = mean_inc.isel(location=group_idx)
x = x.sortby(x, ascending=False)
locations = x.location.values[:num_to_activate]
slice_to_activate = dict(location=locations, time=slice(date, None))
site_activation.loc[slice_to_activate] = 1.0
already_activated.extend(locations)
return site_activation
def optimize_params(c,
incidence_scenarios,
parameterizer,
loss_fn=None,
optimization_params=None,
verbose=True):
"""Modifies c in place subject to parameterizer to minimize a loss function.
Picks params and runs parameterizer.xr_apply_params(c, params) to minimize
loss_fn on the resulting trial given incidence_scenarios. The final optimized
params are stored in c['final_params'].
Args:
c: xr.Dataset specifying the trial with all data_vars required to call
sim.recruitment and sim.control_arm_events.
incidence_scenarios: xr.DataArray of shape [scenario, location, time], the
forecast incidence.
parameterizer: a Parameterizer specifying what the trial planner can
control.
loss_fn: optional function which takes a jax-ified trial object and returns
a jnp.array of losses of shape [scenario]. Defaults to
negative_mean_successiness.
optimization_params: optional dict of stuff related to how to do the
optimization.
verbose: if True, print some stuff.
"""
# Run non-differentiable simulation once, because it does more error checking.
participants = sim.recruitment(c)
sim.control_arm_events(c, participants, incidence_scenarios)
c_ = sim.JaxDataset(c)
incidence_scenarios_ = jnp.asarray(
incidence_scenarios.transpose('scenario', 'location', 'time'))
_, recruitment_fn, _ = sim.RECRUITMENT_REGISTRY[sim.get_recruitment_type(c)]
historical_events = c.historical_control_arm_events
if 'location' in historical_events.dims:
historical_events = c.historical_control_arm_events.sum('location')
historical_events_ = jnp.array(historical_events.values)
if loss_fn is None:
loss_fn = negative_mean_successiness
def loss(params):
parameterizer.apply_params(c_, params)
c_.participants = recruitment_fn(c_)
control_arm_events_ = sim.differentiable_control_arm_events(
c_, c_.participants, incidence_scenarios_)
historical_ = jnp.broadcast_to(
historical_events_,
control_arm_events_.shape[:1] + historical_events_.shape)
c_.control_arm_events = jnp.concatenate([historical_, control_arm_events_],
axis=1)
return loss_fn(c_).mean()
if optimization_params is None:
optimization_params = dict()
min_steps = optimization_params.get('min_steps', 40)
max_steps = optimization_params.get('max_steps', 200)
epsilon = optimization_params.get('epsilon', 1e-3)
smoothing_window = optimization_params.get('smoothing_window', 20)
learning_rate = optimization_params.get('learning_rate', 0.1)
optimizer = optimization_params.get('optimizer', optim.Adam(learning_rate))
initial_params = parameterizer.init_params()
optimizer = optimizer.create(jnp.array(initial_params.values))
loss_curve = []
while True:
loss_value, grad = jax.value_and_grad(loss)(optimizer.target)
loss_curve.append(float(loss_value))
optimizer = optimizer.apply_gradient(grad)
step = len(loss_curve)
if (step > min_steps and
(loss_curve[-smoothing_window] - loss_curve[-1]) < epsilon):
# Not much progress recently. Call it a day.
break
if step >= max_steps:
print('Hit max step limit. You can control this exit condition by '
'setting max_steps in optimized_params.')
break
if verbose and (step % 10 == 0):
print(f'step {step}, loss value {loss_curve[-1]}')
final_params = np.array(optimizer.target)
c['final_params'] = xr.DataArray(final_params, coords=initial_params.coords)
parameterizer.xr_apply_params(c, c.final_params)
def optimize_dynamic_activation(c,
incidence_scenarios,
loss_fn=None,
optimization_params=None):
# Just here for backwards compatibility. Prefer calling optimize_params
# directly.
parameterizer = DynamicSiteActivation(c)
optimize_params(c, incidence_scenarios, parameterizer, loss_fn,
optimization_params)
c['site_activation'] = c.site_activation.round()
def optimize_static_activation(c,
incidence_scenarios,
loss_fn=None,
optimization_params=None):
# Just here for backwards compatibility. Prefer calling optimize_params
# directly.
parameterizer = StaticSiteActivation(c)
optimize_params(c, incidence_scenarios, parameterizer, loss_fn,
optimization_params)
c['site_activation'] = c.site_activation.round()
def negative_mean_successiness(c):
"""Negative mean successiness over the course of the trial."""
if c.needed_control_arm_events.size > 1:
center = float(c.needed_control_arm_events.mean())
width = float(c.needed_control_arm_events.std())
else:
center = float(c.needed_control_arm_events)
width = center / 3
cum_events = c.control_arm_events.cumsum(axis=-1)
successiness = nn.sigmoid((cum_events - center) / width)
return -successiness.mean(axis=-1)
| 2.03125 | 2 |
examples/others/generate_audio.py | mhilmiasyrofi/CrossASRv2 | 0 | 12760571 | <reponame>mhilmiasyrofi/CrossASRv2
import os
import time
import random
from crossasr.constant import DATA_DIR, AUDIO_DIR, EXECUTION_TIME_DIR
from crossasr.utils import save_execution_time, make_dir, read_json, read_corpus
from utils import create_tts_by_name, set_seed
def generate(tts_name: str, corpus_path: str, data_dir: str, execution_time_dir:str):
tts = create_tts_by_name(tts_name)
audio_dir = os.path.join(data_dir, AUDIO_DIR)
execution_time_dir = os.path.join(execution_time_dir, AUDIO_DIR, tts_name)
make_dir(execution_time_dir)
corpus = read_corpus(corpus_path)
for i in range(0, 3) :
c = corpus[i]
text = c.getText()
filename = c.getId()
start = time.time()
tts.generateAudio(text=text, audio_dir=audio_dir, filename=filename)
end = time.time()
execution_time = end - start
fpath = os.path.join(execution_time_dir, filename + ".txt")
save_execution_time(fpath=fpath, execution_time=execution_time)
print(f"Generate {i}")
i += 1
if tts_name in ["google"]:
random_number = float(random.randint(15, 40))/10.
time.sleep(random_number)
if __name__ == "__main__" :
json_config_path = "config.json"
config = read_json(json_config_path)
set_seed(config["seed"])
corpus_path = os.path.join(config["output_dir"], config["corpus_fpath"])
output_dir = config["output_dir"]
data_dir = os.path.join(output_dir, DATA_DIR)
execution_time_dir = os.path.join(output_dir, EXECUTION_TIME_DIR)
tts_name = config["tts"]
generate(tts_name, corpus_path, data_dir, execution_time_dir)
| 2.65625 | 3 |
basic_utils/layers.py | hzxsnczpku/banrinochoujou | 9 | 12760572 | from basic_utils.utils import *
class ConcatFixedStd(nn.Module):
"""
Add a fixed standard err to the input vector.
"""
def __init__(self, ishp):
super(ConcatFixedStd, self).__init__()
self.log_var = nn.Parameter(torch.zeros(1, ishp) - 1.0)
def forward(self, x):
Mean = x
Std = torch.exp(self.log_var * 0.5) * Variable(torch.ones(x.size()))
return torch.cat((Mean, Std), dim=1)
class Flatten(nn.Module):
"""
The flatten module.
"""
def __init__(self):
super(Flatten, self).__init__()
def forward(self, x):
return x.view(x.size()[0], -1)
class Add_One(nn.Module):
"""
A module whose function is to add one to the input.
"""
def __init__(self):
super(Add_One, self).__init__()
def forward(self, x):
return x+1
class Softplus(nn.Module):
"""
The softplus module.
"""
def __init__(self):
super(Softplus, self).__init__()
def forward(self, x):
return (1 + x.exp()).log()
def get_layer(des, inshp):
"""
Get a torch layer according to the description.
Args:
des: the description of the layer.
inshp: input shape
Return:
layer: the corresponding torch network
inshp: the output shape
"""
if des['kind'] == 'conv':
return nn.Conv2d(in_channels=inshp, out_channels=des["filters"], kernel_size=des["ker_size"],
stride=des["stride"]), des["filters"]
if des['kind'] == 'flatten':
return Flatten(), 3136
if des["kind"] == 'dense':
return nn.Linear(in_features=inshp, out_features=des["units"]), des["units"]
if des['kind'] == 'ReLU':
return nn.ReLU(), inshp
if des['kind'] == 'Tanh':
return nn.Tanh(), inshp
if des['kind'] == 'Dropout':
return nn.Dropout(p=des['p']), inshp
def mujoco_layer_designer(ob_space, ac_space):
"""
Design the network structure for the mujoco games.
Args:
ob_space: the observation space
ac_space: the action space
Return:
net_topology_pol_vec: structure of the policy network
lr_updater: learning rate for the policy network
net_topology_v_vec: structure of the value network
lr_optimizer: learning rate for the value network
"""
assert len(ob_space.shape) == 1
hid1_size = ac_space.shape[0] * 10
hid3_size = ob_space.shape[0] * 10
hid2_size = int(np.sqrt(hid1_size*hid3_size))
net_topology_pol_vec = [
{'kind': 'dense', 'units': hid1_size},
{'kind': 'Tanh'},
{'kind': 'dense', 'units': hid2_size},
{'kind': 'Tanh'},
{'kind': 'dense', 'units': hid3_size},
{'kind': 'Tanh'},
]
net_topology_pol_vec = net_topology_pol_vec
lr_updater = 9e-4 / np.sqrt(hid2_size)
hid1_size = ac_space.shape[0] * 10
hid3_size = 5
hid2_size = int(np.sqrt(hid1_size * hid3_size))
net_topology_v_vec = [
{'kind': 'dense', 'units': hid1_size},
{'kind': 'Tanh'},
{'kind': 'dense', 'units': hid2_size},
{'kind': 'Tanh'},
{'kind': 'dense', 'units': hid3_size},
{'kind': 'Tanh'},
]
net_topology_v_vec = net_topology_v_vec
lr_optimizer = 1e-2 / np.sqrt(hid2_size)
return net_topology_pol_vec, lr_updater, net_topology_v_vec, lr_optimizer
| 2.9375 | 3 |
piccel/commands/piccel_import_logic.py | lesca-research/piccel | 2 | 12760573 | <reponame>lesca-research/piccel
import sys
import os
import os.path as op
import logging
from datetime import datetime
import shutil
from optparse import OptionParser
import piccel
logger = logging.getLogger('piccel')
def main():
min_args = 3
max_args = 3
usage = 'usage: %prog [options] WORKBOOK_FILE LOGIC_FOLDER BACKUP_DIR'
description = 'Import forms and plugins into a workbook'
parser = OptionParser(usage=usage, description=description)
parser.add_option('-v', '--verbose', dest='verbose',
metavar='VERBOSELEVEL',
type='int', default=0,
help='Amount of verbose: '\
'0 (NOTSET: quiet, default), '\
'50 (CRITICAL), ' \
'40 (ERROR), ' \
'30 (WARNING), '\
'20 (INFO), '\
'10 (DEBUG)')
parser.add_option('-u', '--user', help='User for workbook login')
parser.add_option('-p', '--access-password',
help='Access password for workbook login')
parser.add_option('-r', '--role-password',
help='Role-specific password for workbook login')
(options, args) = parser.parse_args()
logger.setLevel(options.verbose)
nba = len(args)
if nba < min_args or (max_args >= 0 and nba > max_args):
parser.print_help()
return 1
cfg_fn, logic_dir, backup_dir = args
assert(op.exists(backup_dir))
filesystem = piccel.LocalFileSystem(op.dirname(cfg_fn))
wb = piccel.WorkBook.from_configuration_file(cfg_fn, filesystem)
wb.decrypt(options.access_password)
wb.user_login(options.user, options.role_password, load_sheets=False)
shutil.copytree(op.join(wb.filesystem.root_folder, wb.data_folder),
op.join(backup_dir, '%s_%s' % \
(wb.label,
datetime.now().strftime('%Y_%m_%d_%Hh%M.%S'))))
sheets_folder = op.join(wb.data_folder, piccel.WorkBook.SHEET_FOLDER)
for sheet_name in wb.filesystem.listdir(sheets_folder):
master_rfn = op.join(sheets_folder, sheet_name, 'master.form')
plugin_rfn = op.join(sheets_folder, sheet_name,
'plugin_%s.py' % sheet_name)
for rfn in [master_rfn, plugin_rfn]:
wb.filesystem.import_file(op.join(logic_dir, rfn), rfn,
overwrite=True)
| 2.15625 | 2 |
lane-line/Hough Transform.py | zbs881314/Self-Driving-Car- | 1 | 12760574 | import cv2
import numpy as np
def canny(image):
gray = cv2.cvtColor(lane_image, cv2.COLOR_RGB2GRAY)
blur = cv2.GaussianBlur(gray, (5, 5), 0)
canny = cv2.Canny(blur, 50, 150)
return canny
def display_lines(image, lines):
line_image = np.zeros_like(image)
if lines is not None:
for line in lines:
#print(line)
x1, y1, x2, y2 = line.reshape(4)
cv2.line(line_image, (x1, y1), (x2, y2), (255, 0, 0), 10)
return line_image
def region_of_interest(image):
height = image.shape[0]
triangle = np.array([[(200, height), (1100, height), (550, 250)]])
mask = np.zeros_like(image)
cv2.fillPoly(mask, triangle, 255)
masked_image = cv2.bitwise_and(image, mask)
return masked_image
image = cv2.imread('test_image.jpg')
lane_image = np.copy(image)
canny = canny(lane_image)
cropped_image = region_of_interest(canny)
lines = cv2.HoughLinesP(cropped_image, 2, np.pi/180, 100, np.array([]), minLineLength=40, maxLineGap=5)
line_image = display_lines(lane_image, lines)
combo_image = cv2.addWeighted(lane_image, 0.8, line_image, 1, 1)
cv2.imshow('result', combo_image)
cv2.waitKey(0) | 2.734375 | 3 |
config.py | cedar-technologies/skpcrm | 1 | 12760575 | <reponame>cedar-technologies/skpcrm<filename>config.py
# pylint: disable=too-few-public-methods,invalid-name,missing-docstring
import os
class BaseConfig(object):
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
SERVER_NAME = "localhost:8888"
# POSTGRESQL
# DB_USER = 'user'
# DB_PASSWORD = 'password'
# DB_NAME = 'restplusdb'
# DB_HOST = 'localhost'
# DB_PORT = 5432
# SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format(
# user=DB_USER,
# password=<PASSWORD>,
# host=DB_HOST,
# port=DB_PORT,
# name=DB_NAME,
# )
# DB_USER = 'root'
# DB_PASSWORD = '<PASSWORD>'
# DB_NAME = 'oscrm'
# DB_HOST = 'localhost'
# DB_PORT = 3306
# DB_CONNECTOR = 'mysql+mysqlconnector'
# SQLALCHEMY_DATABASE_URI = '{connector}://{user}:{password}@{host}:{port}/{name}'.format(
# connector=DB_CONNECTOR,
# user=DB_USER,
# password=<PASSWORD>,
# host=DB_HOST,
# port=DB_PORT,
# name=DB_NAME,
# )
# RESTPLUS configuration
RESTPLUS_SWAGGER_UI_DOC_EXPANSION = 'list'
RESTPLUS_VALIDATE = True
RESTPLUS_MASK_SWAGGER = False
RESTPLUS_ERROR_404_HELP = False
# SQLITE
SQLALCHEMY_DATABASE_URI = 'sqlite:///%s' % (os.path.join(PROJECT_ROOT, "example.db"))
DEBUG = True
ERROR_404_HELP = False
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
SWAGGER_UI_JSONEDITOR = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
CSRF_ENABLED = True
class ProductionConfig(BaseConfig):
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI')
class DevelopmentConfig(BaseConfig):
DEBUG = True
class TestingConfig(BaseConfig):
TESTING = True
# Use in-memory SQLite database for testing
SQLALCHEMY_DATABASE_URI = 'sqlite://'
| 1.976563 | 2 |
celery_repro/repro/__init__.py | Evzdrop/celery-2619-repro | 0 | 12760576 | <gh_stars>0
from __future__ import absolute_import
from .celeryApp import app as celery_app | 1.046875 | 1 |
tests/test_question_category.py | samsungnlp/semeval2022-task9 | 0 | 12760577 | import unittest
from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure
from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A
class TestQuestionCategory(unittest.TestCase):
def test_determine_description(self):
qc = QuestionCategory("counting_times")
self.assertIn("Counting times? A: Number", qc.description)
def test_determine_description_str(self):
qc = QuestionCategory("event_ordering")
self.assertIn("X, Y which comes first?", qc.description)
def test_no_description(self):
qc = QuestionCategory("whatever")
self.assertEqual("N/A", qc.description)
class TestGetCategoryFromQuestionStructure(unittest.TestCase):
def test_regex_classifier_class_counting_times(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How many times is the bowl used?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("counting_times", a_class.category)
def test_regex_classifier_class_counting_actions(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How many actions does it take to process the tomato?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("counting_actions", a_class.category)
def test_regex_classifier_class_counting_uses(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How many spoons are used?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("counting_uses", a_class.category)
def test_regex_classifier_class_ellipsis(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = What should be served?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("ellipsis", a_class.category)
def test_regex_classifier_class_location_crl(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = Where should you add the chopped vegetables?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("location_crl", a_class.category)
def test_regex_classifier_class_how_1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How do you brush the salad dressing?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_how_2(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = How did you get the cooked vegetable?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("lifespan_how", a_class.category)
def test_regex_classifier_class_lifespan_what(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = What's in the lentil salad?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("lifespan_what", a_class.category)
def test_regex_classifier_class_event_ordering(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A(
"# question 20-9 = Cutting the stem into bite - size pieces into bite - size pieces and sauting minced "
"meat in a separate pan, which comes first?",
"answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("event_ordering", a_class.category)
def test_regex_classifier_class_result(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = To what extent do you cut carrots and zucchini?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("result", a_class.category)
def test_regex_classifier_class_how_3(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = How do you prick the dough slightly?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_time(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = For how long do you boil the potatoes until cooked?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("time", a_class.category)
def test_regex_classifier_class_location_srl(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = Where do you season the trout with salt and pepper?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("location_srl", a_class.category)
def test_regex_classifier_class_extent(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = By how much do you cover the beans with water in a pot?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("extent", a_class.category)
def test_regex_classifier_class_how_4(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How do you coat hot syrup mixture the popcorn nut mixture?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_purpose1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = Why do you use gas?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("purpose", a_class.category)
def test_regex_classifier_class_copatient1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = What do you mix the oil in a small bowl with?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("copatient", a_class.category)
def test_regex_classifier_class_copatient2(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = What do you put the raspberries into a liqudizer with?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("copatient", a_class.category)
def test_regex_classifier_class_how_5(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = How do you use the same pot of water??"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_purpose2(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = Why do you pinch the pizza dough?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("purpose", a_class.category)
def test_regex_classifier_class_source(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = From where do you remove the spinach and shallots mix?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("source", a_class.category)
def test_regex_classifier_class_location_change(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = Where was the stuffed mushroom before it was garnished?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("location_change", a_class.category)
def test_regex_classifier_class_result_na(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = To what extent do you cut the shortening in?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("result", a_class.category)
def test_regex_classifier_class_how_preheat_1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(Q_A("# question 20-9 = How do you preheat your oven?"), None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method_preheat", a_class.category)
def test_regex_classifier_class_how_preheat__alt_spelling(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(Q_A("# question 20-9 = How do you pre - heat the oven?", "answer = a"), None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method_preheat", a_class.category)
def test_regex_classifier_not_recognized(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(Q_A("# question XYZ-ABC = Is this question recognizable?", "answer = No"), None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("not_recognized", a_class.category)
| 3.046875 | 3 |
PythonExercicios/Mundo 1/4_modulos/ex018.py | GuilhermoCampos/Curso-Python3-curso-em-video | 0 | 12760578 | import math
ang = float(input('Digite o ângulo:'))
angr = math.radians(ang)
sen = math.sin(angr)
cos = math.cos(angr)
tan = math.tan(angr)
print('Dado o ângulo {}\nO Seno é {:.2f} \nO Cosseno é {:.2f}\nA Tangente é {:.2f}'.format(ang, sen, cos, tan))
| 3.75 | 4 |
tools/SDKTool/libs/WrappedDeviceAPI/plugin/Platform_plugin/PlatformDefault/PlatformTuring.py | BernhardRiemann/GameAISDK | 3 | 12760579 | <reponame>BernhardRiemann/GameAISDK
from devicePlatform.IPlatformProxy import *
from platformdefult.minitouch.Minitouch import MinitouchAction
from platformdefult.screen.ScreenMinicap import ScreenMinicap
from platformdefult.adbkit.ADBClient import ADBClient
from APIDefine import *
class PlatformTuring(IPlatformProxy):
def __init__(self):
IPlatformProxy.__init__(self)
self.__deviceInfo = DeviceInfo()
self.__device = None
self.__config_ori = UI_SCREEN_ORI_LANDSCAPE
self.__short_side = None
self.__long_side = None
self.__deviceHeightScale = None
self.__deviceWidthScale = None
self.__convertRate = 1.
self.__height = -1
self.__width = -1
def init(self, serial=None, is_portrait=True, long_edge=720, **kwargs):
if serial is None:
self.__logger = logging.getLogger(LOG_DEFAULT)
else:
self.__logger = logging.getLogger(serial)
adb = ADBClient()
self.__device = adb.device(serial)
self.__short_side, self.__long_side = self.__device.wmsize()
self.__CoordinateScale = float(self.__long_side) / long_edge
if is_portrait:
height = long_edge
width = long_edge - 1
self.__config_ori = UI_SCREEN_ORI_PORTRAIT
else:
width = long_edge
height = width - 1
self.__config_ori = UI_SCREEN_ORI_LANDSCAPE
minicapPort = self._GetValuesInkwargs('minicapPort', 1111, kwargs)
showscreen = self._GetValuesInkwargs('showRawScreen', False, kwargs)
self.__minicap = ScreenMinicap(self.__device, showscreen, serial)
if not self.__minicap.Initialize(height, width, minicapPort=minicapPort):
self.__logger.error('init minicap failed')
return False, "init minicap failed"
height, width = self.__minicap.GetScreenSize()
self.__height, self.__width = height, width
minitouchPort = self._GetValuesInkwargs('minitouchPort', 1313, kwargs)
self.__minitouch = MinitouchAction()
if not self.__minitouch.init(serial, self.__device, width, height, minitouchPort=minitouchPort):
self.__logger.error('init minitouch failed')
return False, "init minitouch failed"
self.__deviceInfo.display_width, self.__deviceInfo.display_height = self.__minicap.GetVMSize()
self.__deviceInfo.touch_width, self.__deviceInfo.touch_height = self.__minitouch.GetVMSize()
self.__deviceInfo.touch_slot_number = self.__minitouch.GetMaxContacts()
return True, str()
def deinit(self):
return True
def get_image(self):
ScreenImg = self.__minicap.GetScreen()
return PP_RET_OK, ScreenImg
def touch_up(self, contact=0):
self.__minitouch.touch_up(contact=contact)
self.__logger.info('touch up, contact:{}'.format(contact))
def touch_down(self, px, py, contact=0, pressure=50):
self.__minitouch.touch_down(px=px, py=py, contact=contact, pressure=pressure)
self.__logger.info('touch down, x:{}, y:{}, contact:{}'.format(px, py, contact))
def touch_move(self, px, py, contact=0, pressure=50):
self.__minitouch.touch_move(px=px, py=py, contact=contact, pressure=pressure)
self.__logger.info('touch move, x:{}, y:{}, contact:{}'.format(px, py, contact))
def touch_wait(self, milliseconds):
self.__minitouch.touch_wait(milliseconds)
self.__logger.info('touch wait:{0}'.format(milliseconds))
def touch_reset(self):
self.__minitouch.touch_reset()
self.__logger.info('touch reset')
def touch_finish(self):
self.__minitouch.touch_finish()
self.__logger.info('touch finish')
def get_device_info(self):
return self.__deviceInfo, str()
def get_rotation(self):
pass
def install_app(self, apk_path):
return self.__device.install(apk_path)
def launch_app(self, package_name, activity_name):
self.__logger.debug('Launch Game:[{0}/{1}]'.format(package_name, activity_name))
self.wake()
# self.__device.clear_app_data(package_name=MOBILEQQ_PACKAGE_NAME)
self.exit_app(package_name)
self.__device.launch_app(package_name=package_name, activity_name=activity_name)
def exit_app(self, package_name):
self.__device.kill_app(package_name=package_name)
def current_app(self):
return self.__device.current_app()
def clear_app_data(self, app_package_name):
self.__device.clear_app_data(package_name=app_package_name)
def key(self, key):
self.__device.keyevent(key)
def text(self, text):
self.__device.input_text(text)
def sleep(self):
self.__device.close_screen()
def wake(self):
self.__device.wake()
self.__device.unlock()
self.__device.keyevent('HOME')
def vm_size(self):
return self.__short_side, self.__long_side
def take_screen_shot(self, target_path):
self.__device.screenshot()
self.__device.pull('/data/local/tmp/screenshot.png', target_path)
def get_screen_ori(self):
rot = self.__minicap.GetRotation()
if rot in [0, 2]:
return UI_SCREEN_ORI_PORTRAIT
else:
return UI_SCREEN_ORI_LANDSCAPE
def adb_click(self, px, py):
if self.get_screen_ori() != self.__config_ori:
px, py = self._ConvertPos(px, py)
px, py = self._ConvertCoordinates(px, py)
self.__device.click(px, py)
def adb_swipe(self, sx, sy, ex, ey, duration_ms=50):
if self.get_screen_ori() != self.__config_ori:
sx, sy = self._ConvertPos(sx, sy)
ex, ey = self._ConvertPos(ex, ey)
sx, sy = self._ConvertCoordinates(sx, sy)
ex, ey = self._ConvertCoordinates(ex, ey)
self.__device.swipe(sx, sy, ex, ey, durationMS=duration_ms)
def device_param(self, packageName):
deviceParam = dict()
deviceParam['cpu'], deviceParam['mem'] = self.__device.cpu_mem_usage_with_top(packageName)
deviceParam['temperature'] = self.__device.temperature()
deviceParam['battery'] = self.__device.battery()
if deviceParam['cpu'] == -1:
self.__logger.error('get cpu param failed')
return False
if deviceParam['mem'] == -1:
self.__logger.error('get mem param failed')
return False
if deviceParam['temperature'] == -1:
self.__logger.error('get temperature param failed')
return False
if deviceParam['battery'] == -1:
self.__logger.error('get battery param failed')
return False
return deviceParam
def _ConvertPos(self, px, py):
if self.__config_ori == UI_SCREEN_ORI_PORTRAIT:
newPx = py
newPy = self.__width - px
else:
newPx = self.__height - py
newPy = px
return newPx, newPy
def _ConvertCoordinates(self, px, py):
nx = px * self.__CoordinateScale
ny = py * self.__CoordinateScale
return int(nx), int(ny)
def _GetValuesInkwargs(self, key, defaultValue, kwargs):
if key not in kwargs:
return defaultValue
else:
return kwargs[key]
| 1.96875 | 2 |
apps/user/admin.py | JuanJoseStone/shoe-app | 0 | 12760580 | from django.contrib import admin
from .models import User
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
class CustomUser(UserAdmin):
add_form = UserCreationForm
form = UserChangeForm
model = User
add_fieldsets = UserAdmin.add_fieldsets + (
(None, {'fields': ('phone', 'direction', 'date_of_birth')}),
)
fieldsets = UserAdmin.fieldsets + (
(None, {'fields': ('phone', 'direction', 'date_of_birth')}),
)
admin.site.register(User, CustomUser)
| 2.09375 | 2 |
app/worker_hello.py | CedricCabessa/techallhand_celery | 0 | 12760581 | <reponame>CedricCabessa/techallhand_celery<gh_stars>0
import time
from celery import Celery
def init_celery():
config = {"broker_url": "amqp://guest:guest@rabbitmq:5672/"}
celery = Celery()
celery.config_from_object(config)
return celery
celery = init_celery()
@celery.task(name="say_hello", bind=True)
def hello(self, name, *args, **kwargs):
time.sleep(5)
print(f"hello {name}")
| 2.53125 | 3 |
cs15211/XofaKindinaDeckofCards.py | JulyKikuAkita/PythonPrac | 1 | 12760582 | <reponame>JulyKikuAkita/PythonPrac
# coding=utf-8
__source__ = 'https://leetcode.com/problems/x-of-a-kind-in-a-deck-of-cards/'
# Time: O()
# Space: O()
#
# Description: Leetcode # 914. X of a Kind in a Deck of Cards
#
# In a deck of cards, each card has an integer written on it.
#
# Return true if and only if you can choose X >= 2 such that
# it is possible to split the entire deck into 1 or more groups of cards, where:
#
# Each group has exactly X cards.
# All the cards in each group have the same integer.
#
#
# Example 1:
#
# Input: [1,2,3,4,4,3,2,1]
# Output: true
# Explanation: Possible partition [1,1],[2,2],[3,3],[4,4]
# Example 2:
#
# Input: [1,1,1,2,2,2,3,3]
# Output: false
# Explanation: No possible partition.
# Example 3:
#
# Input: [1]
# Output: false
# Explanation: No possible partition.
# Example 4:
#
# Input: [1,1]
# Output: true
# Explanation: Possible partition [1,1]
# Example 5:
#
# Input: [1,1,2,2,2,2]
# Output: true
# Explanation: Possible partition [1,1],[2,2],[2,2]
#
# Note:
#
# 1 <= deck.length <= 10000
# 0 <= deck[i] < 10000
#
import unittest
import collections
# 100% 28ms
class Solution(object):
def hasGroupsSizeX(self, deck):
"""
:type deck: List[int]
:rtype: bool
"""
dic = {}
for d in deck:
if d in dic:
dic[d] += 1
else:
dic[d] = 1
x = None
for value in dic.values():
if value > 1:
a, b = value, x
while b:
a, b = b, a % b
if a > 1:
x = a
else:
return False
else:
return False
return True
class SolutionGCD(object):
def hasGroupsSizeX(self, deck):
"""
:type deck: List[int]
:rtype: bool
"""
from fractions import gcd
vals = collections.Counter(deck).values()
return reduce(gcd, vals) >= 2
class SolutionGCD2(object):
def hasGroupsSizeX(self, deck):
"""
:type deck: List[int]
:rtype: bool
"""
dict = {}
for d in deck:
if d in dict:
dict[d] += 1
else:
dict[d] = 1
x = None
for value in dict.values():
if value > 1:
a, b = value, x
while b:
a, b = b, a % b
if a > 1:
x = a
else:
return False
else:
return False
return True
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/x-of-a-kind-in-a-deck-of-cards/solution/
# Approach 1: Brute Force
# Complexity Analysis
#
# Time Complexity: O(N^2log(logN)), where N is the number of cards.
# It is outside the scope of this article to prove that the number of divisors of N is bounded by O(Nlog(logN)).
# Space Complexity: O(N).
#6ms 98.67%
class Solution {
public boolean hasGroupsSizeX(int[] deck) {
int[] count = new int[10000];
for(int card : deck) {
count[card]++;
}
List<Integer> numbers = new ArrayList<>();
for(int c : count) {
if(c!=0) {
numbers.add(c);
}
}
searchX: for(int x=2;x<=deck.length;x++){
if(deck.length%x>0){
continue;
}
for(int number : numbers){
if(number%x>0){
continue searchX;
}
}
return true;
}
return false;
}
}
# Approach 2: Greatest Common Divisor
# Complexity Analysis
# Time Complexity: O(N(log^ 2 N)), where N is the number of votes. If there are C_i
# cards with number i, then each gcd operation is naively O(log^2 C_i).
# Better bounds exist, but are outside the scope of this article to develop.
# Space Complexity: O(N).
# 5ms 99.65%
class Solution {
public boolean hasGroupsSizeX(int[] deck) {
int[] count = new int[10000]; // 0 <= deck[i] < 10000
for (int card : deck) {
count[card]++;
}
int cardNumEachGroup = -1;
for (int i = 0; i < 10000; i++) {
if (count[i] > 0) {
if (cardNumEachGroup == -1) cardNumEachGroup = count[i];
else {
cardNumEachGroup = gcd(cardNumEachGroup, count[i]);
}
}
}
return cardNumEachGroup >= 2;
}
public int gcd(int a, int b) {
return b == 0 ? a : gcd(b, a % b);
}
}
'''
| 3.765625 | 4 |
spacy/lang/ga/__init__.py | snosrap/spaCy | 1 | 12760583 | <gh_stars>1-10
from typing import Optional
from thinc.api import Model
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .stop_words import STOP_WORDS
from ...language import Language, BaseDefaults
from .lemmatizer import IrishLemmatizer
class IrishDefaults(BaseDefaults):
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
stop_words = STOP_WORDS
class Irish(Language):
lang = "ga"
Defaults = IrishDefaults
@Irish.factory(
"lemmatizer",
assigns=["token.lemma"],
default_config={"model": None, "mode": "pos_lookup", "overwrite": False},
default_score_weights={"lemma_acc": 1.0},
)
def make_lemmatizer(
nlp: Language, model: Optional[Model], name: str, mode: str, overwrite: bool
):
return IrishLemmatizer(nlp.vocab, model, name, mode=mode, overwrite=overwrite)
__all__ = ["Irish"]
| 2.484375 | 2 |
nutrition.py | reedcwilson/nutrition | 0 | 12760584 | import myfitnesspal
import time
import flask
import sys
import argparse
import os
class Nutrition:
def __init__(self):
# create client
self.client = myfitnesspal.Client('reedcwilson', 'myfitnesspal')
def format_name(self, name):
return name.strip().replace("'", "@`")
def create_json(self, day):
entries = { self.format_name(entry.name): entry.totals for entry in day.entries }
meals = { self.format_name(meal.name): meal.totals for meal in day.meals}
result = {'goals': day.goals, 'current': day.totals, 'meals': meals, 'entries': entries}
return str(result).replace("'", '"').replace("@`", "'")
def get_day(self, year, month, day):
return self.client.get_date(year, month, day)
def get_data(self, year, month, day):
d = self.client.get_date(year, month, day)
return self.create_json(d)
app = flask.Flask(__name__)
nutrition = Nutrition()
@app.route("/")
def get_today():
year, month, day = time.strftime("%Y:%m:%d").split(':')
resp = flask.make_response(nutrition.get_data(year, month, day))
resp.headers['Content-Type'] = 'application/json'
return resp
@app.route("/day")
def get_day():
cur_year, cur_month, cur_day = time.strftime("%Y:%m:%d").split(':')
year = flask.request.args.get('year')
month = flask.request.args.get('month')
day = flask.request.args.get('day')
year = year if year else cur_year
month = month if month else cur_month
day = day if day else cur_day
print year, month, day
resp = flask.make_response(nutrition.get_data(year, month, day))
resp.headers['Content-Type'] = 'application/json'
return resp
def main():
parser = argparse.ArgumentParser(prog='nutrition', description='An API for MyFitnessPal', add_help=True)
parser.add_argument('-d', '--debug', action='store_true')
args = parser.parse_args()
port = int(os.environ.get('PORT', 5000))
app.run(debug=args.debug, port=port, host='0.0.0.0')
if __name__ == "__main__":
main()
| 2.890625 | 3 |
smart_contracts/helpers/addresses.py | flashbake/registry-contract | 0 | 12760585 | ####################################################################################
# Default addresses for use in testing.
#
# These addresses are unique, and providing them names makes tests easier to read
# and reason about.
####################################################################################
import smartpy as sp
# An address which acts as a Administrator.
ADMINISTRATOR_ADDRESS = sp.address("tz1VmiY38m3y95HqQLjMwqnMS7sdMfGomzKi")
# An address that represents a baker that will regisster / unregister in the registry
BAKER_ADDRESS = sp.address("tz1abmz7jiCV2GH2u81LRrGgAFFgvQgiDiaf")
# An address will be rotated to
ROTATED_ADDRESS = sp.address("tz1W5VkdB5s7ENMESVBtwyt9kyvLqPcUczRT")
# An address which is never used. This is a `null` value for addresses.
NULL_ADDRESS = sp.address("tz1bTpviNnyx2PXsNmGpCQTMQsGoYordkUoA") | 2.375 | 2 |
OOPS/section 4/exp-menting/tempdata.py | Advik-B/Learn-Python | 6 | 12760586 | # importing required modules
from zipfile import ZipFile
import os
def get_all_file_paths(directory):
# initializing empty file paths list
file_paths = []
# crawling through directory and subdirectories
for root, directories, files in os.walk(directory):
for filename in files:
# join the two strings in order to form the full filepath.
filepath = os.path.join(root, filename)
file_paths.append(filepath)
# returning all file paths
return file_paths
def main():
# path to folder which needs to be zipped
directory = './'
# calling function to get all file paths in the directory
file_paths = get_all_file_paths(directory)
# writing files to a zipfile
with ZipFile('All files.zip','w') as zip:
# writing each file one by one
for file in file_paths:
zip.write(file)
print('All files zipped successfully!')
if __name__ == "__main__":
main()
| 3.9375 | 4 |
hybrid/dispatch/power_storage/simple_battery_dispatch.py | jmartin4nrel/HOPP-1 | 0 | 12760587 | <filename>hybrid/dispatch/power_storage/simple_battery_dispatch.py
import pyomo.environ as pyomo
from pyomo.environ import units as u
import PySAM.BatteryStateful as BatteryModel
import PySAM.Singleowner as Singleowner
from hybrid.dispatch.power_storage.power_storage_dispatch import PowerStorageDispatch
class SimpleBatteryDispatch(PowerStorageDispatch):
_system_model: BatteryModel.BatteryStateful
_financial_model: Singleowner.Singleowner
"""
"""
def __init__(self,
pyomo_model: pyomo.ConcreteModel,
index_set: pyomo.Set,
system_model: BatteryModel.BatteryStateful,
financial_model: Singleowner.Singleowner,
block_set_name: str = 'battery',
include_lifecycle_count: bool = True):
super().__init__(pyomo_model,
index_set,
system_model,
financial_model,
block_set_name=block_set_name,
include_lifecycle_count=include_lifecycle_count)
def initialize_dispatch_model_parameters(self):
if self.include_lifecycle_count:
self.lifecycle_cost = 0.01 * self._system_model.value('nominal_energy') # TODO: update value
om_cost = self._financial_model.value("om_batt_variable_cost")[0] # [$/MWh]
self.cost_per_charge = om_cost / 2
self.cost_per_discharge = om_cost / 2
self.minimum_power = 0.0
# FIXME: Change C_rate call to user set system_capacity_kw
self.maximum_power = self._system_model.value('nominal_energy') * self._system_model.value('C_rate') / 1e3
self.minimum_soc = self._system_model.value('minimum_SOC')
self.maximum_soc = self._system_model.value('maximum_SOC')
self.initial_soc = self._system_model.value('initial_SOC')
self._set_control_mode()
self._set_model_specific_parameters()
def _set_control_mode(self):
self._system_model.value("control_mode", 1.0) # Power control
self._system_model.value("input_power", 0.)
self.control_variable = "input_power"
def _set_model_specific_parameters(self):
self.round_trip_efficiency = 95.0 # 90
self.capacity = self._system_model.value('nominal_energy') / 1e3 # [MWh]
def update_time_series_dispatch_model_parameters(self, start_time: int):
# TODO: provide more control
self.time_duration = [1.0] * len(self.blocks.index_set())
def update_dispatch_initial_soc(self, initial_soc: float = None):
if initial_soc is not None:
self._system_model.value("initial_SOC", initial_soc)
self._system_model.setup() # TODO: Do I need to re-setup stateful battery?
self.initial_soc = self._system_model.value('SOC')
| 2.40625 | 2 |
test/hlt/pytest/python/com/huawei/iotplatform/client/dto/NorthInDTO.py | yuanyi-thu/AIOT- | 128 | 12760588 | class NorthInDTO(object):
def __init__(self):
self.platformIp = None
self.platformPort = None
def getPlatformIp(self):
return self.platformIp
def setPlatformIp(self, platformIp):
self.platformIp = platformIp
def getPlatformPort(self):
return self.platformPort
def setPlatformPort(self, platformPort):
self.platformPort = platformPort
| 2.609375 | 3 |
Test_address.py | KaivnD/Noa-Core | 0 | 12760589 | from AdressConvertApiTasker.AdressConvertApiTasker import *
import_data={'location':'31.225696563611,121.49884033194',}
output_data={'file_name':'test.txt'}
a=AdressConvertApiTasker(import_data,output_data)
print(a.run())
| 1.890625 | 2 |
create_tables.py | derekrbracy/dend-project-1 | 0 | 12760590 | <reponame>derekrbracy/dend-project-1
"""
This module connects to and creates the sparkifydb if it does not already exists. It also drops
and creates tables in the sparkifydb as specified by the queries imported from 'sql_queries.py'
"""
from typing import Tuple
from psycopg2.extensions import connection, cursor
import psycopg2
from sql_queries import create_table_queries, drop_table_queries
def create_database() -> Tuple[cursor, connection]:
"""
Connects to and creates the sparkifydb if it does not already exist.
Parameters
----------
None
Returns
-------
output: Tuple[cursor, connection]
"""
# connect to default database
conn = psycopg2.connect(
"host=127.0.0.1 dbname=postgres user=postgres password=<PASSWORD>"
)
conn.set_session(autocommit=True)
cur = conn.cursor()
# create sparkify database with UTF8 encoding
cur.execute("DROP DATABASE IF EXISTS sparkifydb")
cur.execute("CREATE DATABASE sparkifydb WITH ENCODING 'utf8' TEMPLATE template0")
# close connection to default database
conn.close()
# connect to sparkify database
conn = psycopg2.connect(
"host=127.0.0.1 dbname=sparkifydb user=postgres password=<PASSWORD>"
)
cur = conn.cursor()
return cur, conn
def drop_tables(cur, conn) -> None:
"""
Drops tables in the sparkify database as specified by the queries in 'drop_table_queries'
list.
"""
for query in drop_table_queries:
cur.execute(query)
conn.commit()
def create_tables(cur, conn) -> None:
"""
Creates tables in the sparkify database as specified by the queries in 'drop_table_queries'
list.
"""
for query in create_table_queries:
cur.execute(query)
conn.commit()
def main():
cur, conn = create_database()
drop_tables(cur, conn)
create_tables(cur, conn)
conn.close()
if __name__ == "__main__":
main()
| 3.296875 | 3 |
minique/enums.py | valohai/minique | 7 | 12760591 | from enum import Enum
class JobStatus(Enum):
NONE = "none"
ACQUIRED = "acquired"
SUCCESS = "success"
FAILED = "failed"
CANCELLED = "cancelled"
| 2.6875 | 3 |
Kattis/spavanac.py | ruidazeng/online-judge | 0 | 12760592 | h, m = map(int, input().split())
if h - 1 < 0:
h = 23
m += 15
print(h,m)
elif m - 45 < 0:
h -= 1
m += 15
print(h, m)
else:
m -= 45
print(h, m) | 3 | 3 |
buttons.py | mahimaa333/Lambda_project | 0 | 12760593 | <gh_stars>0
import pygame
class Button:
def __init__(self,x,y,width,height,text=None,color=(73,73,73),
highlightedcolor=(189,189,189),function=None,params=None):
self.image = pygame.Surface((width,height))
self.pos = (x,y)
self.rect = self.image.get_rect()
self.rect.topleft = self.pos
self.text = text
self.color = color
self.highlightedcolor = highlightedcolor
self.highlighted = False
self.function = function
self.params = params
self.width = width
self.height = height
def update(self,mousepos):
if self.rect.collidepoint(mousepos):
self.highlighted = True
else:
self.highlighted = False
def draw(self,window):
if self.highlighted:
self.image.fill(self.highlightedcolor)
else:
self.image.fill(self.color)
if self.text:
self.makeText(self.text)
window.blit(self.image,self.pos)
def click(self):
if self.params:
self.function(self.params)
else:
self.function()
def makeText(self,text):
font = pygame.font.Font('freesansbold.ttf', 20)
text = font.render(text,False,(0,0,0))
width = text.get_width()
height = text.get_height()
x = (self.width-width)//2
y = (self.height-height)//2
self.image.blit(text,(x,y))
| 3.0625 | 3 |
tests/odm/documents/test_validation_on_save.py | paul-finary/beanie | 0 | 12760594 | import pytest
from pydantic import ValidationError
from tests.odm.models import DocumentWithValidationOnSave
async def test_validate_on_insert():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
doc.num_1 = "wrong_value"
with pytest.raises(ValidationError):
await doc.insert()
async def test_validate_on_replace():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert()
doc.num_1 = "wrong_value"
with pytest.raises(ValidationError):
await doc.replace()
async def test_validate_on_save_changes():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert()
doc.num_1 = "wrong_value"
with pytest.raises(ValidationError):
await doc.save_changes()
async def test_validate_on_save_action():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert()
assert doc.num_2 == 3
async def test_validate_on_save_skip_action():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert(skip_actions=["num_2_plus_1"])
assert doc.num_2 == 2
| 2.484375 | 2 |
upcfcardsearch/c216.py | ProfessorSean/Kasutamaiza | 0 | 12760595 | import discord
from discord.ext import commands
from discord.utils import get
class c216(commands.Cog, name="c216"):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.command(name='Automaton_Doll_&_Winter_Lights', aliases=['c216'])
async def example_embed(self, ctx):
embed = discord.Embed(title='Automaton Doll & Winter Lights',
color=0xff8b53)
embed.set_thumbnail(url='https://www.duelingbook.com/images/custom-pics/2300000/2356400.jpg')
embed.add_field(name='Status (Archetype)', value='Casual:3/Tournament:3', inline=True)
embed.add_field(name='Type (Attribute)', value='Machine/Tuner/Effect (LIGHT)', inline=False)
embed.add_field(name='Level (ATK/DEF)', value='3 (0/1800)', inline=False)
embed.add_field(name='Monster Effect', value='During damage calculation, if your monster is battling an opponent\'s monster with higher ATK (Quick Effect): You can discard this card; your battling monster gains ATK/DEF equal to the ATK of the opponent\'s monster it is battling, until the end of the turn.', inline=False)
embed.set_footer(text='Set Code: ANCF')
await ctx.send(embed=embed)
def setup(bot: commands.Bot):
bot.add_cog(c216(bot)) | 2.796875 | 3 |
setup.py | philipperemy/bitmex-liquidations | 30 | 12760596 | from setuptools import setup
setup(
name='bitmex-liquidation',
version='1.0.1',
description='BitMEX Liquidation Python Wrapper',
author='<NAME>',
license='MIT',
long_description_content_type='text/markdown',
long_description=open('README.md').read(),
packages=['bitmex_liquidation'],
install_requires=['websocket-client==0.47.0']
)
| 1.226563 | 1 |
samples/long-running-compute/hello-4-90.py | Algorab/examples | 9 | 12760597 | <reponame>Algorab/examples<gh_stars>1-10
from datetime import datetime
import math
import time
def main():
start = datetime.now()
i = 10
while (datetime.now() - start).seconds <= 90:
fac = math.factorial(i)
time.sleep(0.5)
i += 10
return "Success" | 2.984375 | 3 |
salt/_modules/metalk8s_network.py | zarumaru/metalk8s | 0 | 12760598 | <reponame>zarumaru/metalk8s
'''Metalk8s network related utilities.'''
import itertools
import logging
from salt._compat import ipaddress
from salt.exceptions import CommandExecutionError
K8S_CLUSTER_ADDRESS_NUMBER = 0
COREDNS_ADDRESS_NUMBER = 9
log = logging.getLogger(__name__)
__virtualname__ = 'metalk8s_network'
def __virtual__():
return __virtualname__
def _pick_nth_service_ip(n):
'''
Pick the nth IP address from a network range, based on pillar
configurations provided in CIDR notation.
'''
cidr = __pillar__.get('networks', {}).get('service')
if cidr is None:
raise CommandExecutionError(
'Pillar key "networks:service" must be set.'
)
network = ipaddress.IPv4Network(unicode(cidr))
# NOTE: hosts() method below returns usable hosts in a network.
# The usable hosts are all the IP addresses that belong to the network,
# except the network address itself and the network broadcast address.
ip = next(
itertools.islice(network.hosts(), n, None),
None
)
if ip is None:
raise CommandExecutionError(
'Could not obtain an IP in the network range {}'.format(
cidr
)
)
return str(ip)
def get_kubernetes_service_ip():
'''
Return the Kubernetes service cluster IP.
This IP is arbitrarily selected as the first IP from the usable hosts
range.
Note:
In kube-apiserver Pod manifest, we define a service-cluster-ip-range which
sets the kube-api address to the first usable host.
'''
return _pick_nth_service_ip(K8S_CLUSTER_ADDRESS_NUMBER)
def get_cluster_dns_ip():
'''
Return the CoreDNS cluster IP.
This IP is arbitrarily selected as the tenth IP from the usable hosts
range.
'''
return _pick_nth_service_ip(COREDNS_ADDRESS_NUMBER)
| 2.59375 | 3 |
notify.py | huanzhang/zzf-notifier | 0 | 12760599 | <filename>notify.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
from bs4 import BeautifulSoup
from datetime import date
from datetime import datetime
import yaml
import sqlite3
import argparse
import smtplib
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
import logging
logger = logging.getLogger('zzf-notifier')
hdlr = logging.FileHandler('notifier.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
CONFIG = yaml.load(file("config.yml", "r"))
CONN = sqlite3.connect(CONFIG["db_file"], detect_types=sqlite3.PARSE_COLNAMES)
def fetch_all_tzgg():
tzgg_url = CONFIG["tzgg_url"]
r = urllib2.urlopen(tzgg_url).read()
soup = BeautifulSoup(r, "lxml")
tzgg_board = soup.find("ul", {"opentype": "page"})
return tzgg_board.find_all("li")
def parse_data_from_tzgg_html(tzgg_tag):
link = tzgg_tag.find("a")
span = tzgg_tag.find("span")
title = link.contents[0].strip() if link and link.contents else None
url = CONFIG["tzgg_host"] + link["href"].strip() if link and link.has_attr("href") else None
publish_date = datetime.strptime(span.contents[0].strip(), "%Y-%m-%d").date() if span and span.contents else None
return {"title": title, "url": url, "publish_date": publish_date}
def is_tzgg_date_after_target(tzgg, target_date):
return tzgg["publish_date"] and tzgg["publish_date"] >= target_date
def find_tzgg(title, publish_date):
cur = CONN.cursor()
cur.execute("SELECT * FROM tzggs WHERE title = ? AND publish_date = ?", (title, publish_date))
return cur.fetchone()
def create_tzgg(title, url, publish_date):
cur = CONN.cursor()
cur.execute("INSERT INTO tzggs(title, url, publish_date) values(?, ?, ?)", (title, url, publish_date))
CONN.commit()
return True
def send_mail(tzgg):
fromaddr = CONFIG["smtp"]["user_name"]
toaddr = ",".join(CONFIG["recipients"])
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = u"[自住房公告] %s %s" % (tzgg["title"], tzgg["publish_date"].strftime("%Y-%m-%d"))
body = u"%s\n%s" % (tzgg["title"], tzgg["url"])
msg.attach(MIMEText(body, CONFIG["smtp"]["authentication"], 'utf-8'))
server = smtplib.SMTP(CONFIG["smtp"]["address"], CONFIG["smtp"]["port"])
if CONFIG["smtp"]["ssl"]:
server.starttls()
server.login(fromaddr, CONFIG["smtp"]["password"])
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
def main():
parser = argparse.ArgumentParser(description='Notify new zzf news')
parser.add_argument('--init', dest='initialized', action='store_true',
default=False,
help='initialize from the beginning')
args = parser.parse_args()
fetch_date = date.today()
if args.initialized:
fetch_date = date(2016, 1, 1)
logger.info("Start to fetch tzggs")
tzggs = fetch_all_tzgg()
tzggs = map(parse_data_from_tzgg_html, tzggs)
tzggs = filter(lambda x: is_tzgg_date_after_target(x, fetch_date), tzggs)
tzggs.reverse()
logger.info("%s tzgg(s) to process" % len(tzggs))
for tzgg in tzggs:
tg = find_tzgg(tzgg["title"], tzgg["publish_date"])
if not tg:
logger.info(u"Found new tzgg: %s" % tzgg["title"])
send_mail(tzgg)
logger.info("Email sent successfully")
create_tzgg(tzgg["title"], tzgg["url"], tzgg["publish_date"])
CONN.close()
if __name__ == "__main__":
main()
| 2.453125 | 2 |
epos/epos.py | solvire/epos-xml | 6 | 12760600 | <reponame>solvire/epos-xml
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import xml.etree.ElementTree as ET
# from django.utils.translation import ugettext as _
class ePOSDocument(object):
"""
The main document for the ePOS XML printing
"""
line_items = []
def __init__(self):
self.line_items = []
def append_row(self, element):
self.line_items.append(element)
def xml(self):
"""
Take the main document object and return an XML rendering
"""
exml = ET.Element("epos-print", {"xmlns":"http://www.epson-pos.com/schemas/2011/03/epos-print"})
for obj in self.line_items:
o = ET.SubElement(exml, tag=obj.get_tag(), attrib=obj.get_attr())
txt = obj.get_text()
if txt is not None:
o.text = obj.get_text()
return ET.tostring(exml)
| 2.5 | 2 |
tests/orbital_space/test_orbital_space.py | fevangelista/pyWicked | 0 | 12760601 | import wicked as w
import pytest
def test_orbital_space():
w.reset_space()
w.add_space("c", "fermion", "occupied", ["m", "n"])
w.add_space("a", "fermion", "general", ["u", "v", "w", "x", "y", "z"])
w.add_space("v", "fermion", "unoccupied", ["e", "f"])
assert w.num_spaces() == 3
def test_orbital_space_exceptions():
w.reset_space()
w.add_space("c", "fermion", "occupied", ["m", "n"])
with pytest.raises(Exception):
w.add_space("c", "fermion", "occupied", ["m", "n"])
with pytest.raises(Exception):
w.add_space("v", "fermion", "occupied", ["m", "n"])
if __name__ == "__main__":
test_orbital_space()
test_orbital_space_exceptions()
| 2.5 | 2 |
bitbots_animation_server/src/bitbots_animation_server/spline_animator.py | Zcyyy/bitbots_motion | 0 | 12760602 | import math
import rospy
from bitbots_animation_server.animation import Keyframe, Animation
from bitbots_splines.smooth_spline import SmoothSpline
class SplineAnimator:
def __init__(self, animation, current_joint_states):
self.anim = animation
self.start_time = rospy.get_time()
self.animation_duration = 0
self.current_point_time = 0
self.spline_dict = {}
#add current joint positions as start
if current_joint_states is not None:
i = 0
for joint in current_joint_states.name:
if joint not in self.spline_dict:
self.spline_dict[joint] = SmoothSpline()
self.spline_dict[joint].add_point(0, math.degrees(current_joint_states.position[i]))
i+=1
else:
rospy.logwarn("No current joint positions. Will play animation starting from first keyframe.")
for joint in self.anim.keyframes[0].goals:
if joint not in self.spline_dict:
self.spline_dict[joint] = SmoothSpline()
self.spline_dict[joint].add_point(0, self.anim.keyframes[0].goals[joint])
#load keyframe positions into the splines
for keyframe in self.anim.keyframes:
self.animation_duration += keyframe.duration + keyframe.pause
self.current_point_time += keyframe.duration
for joint in keyframe.goals:
if joint not in self.spline_dict:
self.spline_dict[joint] = SmoothSpline()
self.spline_dict[joint].add_point(self.current_point_time, keyframe.goals[joint])
self.spline_dict[joint].add_point(self.current_point_time + keyframe.pause, keyframe.goals[joint])
self.current_point_time += keyframe.pause
# compute the splines
for joint in self.spline_dict:
self.spline_dict[joint].compute_spline()
def get_positions_deg(self, time):
if time < 0 or time > self.animation_duration:
return None
ret_dict = {}
for joint in self.spline_dict:
ret_dict[joint] = self.spline_dict[joint].pos(time)
return ret_dict
def get_positions_rad(self, time):
if time < 0 or time > self.animation_duration:
return None
ret_dict = {}
for joint in self.spline_dict:
ret_dict[joint] = math.radians(self.spline_dict[joint].pos(time))
return ret_dict
def get_start_time(self):
return self.start_time
def get_duration(self):
return self.animation_duration | 2.671875 | 3 |
environments/frozen_lake.py | roshan213/reinforcement-learning | 0 | 12760603 | <filename>environments/frozen_lake.py
import sys
import numpy as np
from gym import utils
from gym.envs.toy_text import discrete
from six import StringIO
LEFT = 0
DOWN = 1
RIGHT = 2
UP = 3
MAPS = {
"4x4": [ # Simple balanced paths
"SFFH",
"FHFF",
"FHFH",
"FFFG"
],
"11x11": [ # Almost balanced with down path one less than left path
"SFFFFHFFFFF",
"FHHHFHFHHHF",
"FHFHFHFHFHF",
"FHFHFFFHFHF",
"FFFFHFHFFFF",
"HFHFHFHFHFF",
"FFFFHFHFFFF",
"FHFHFFFHFHF",
"FHFHFFFFFHF",
"FHFHFHFHFHF",
"FFFFFFFFFFG"
],
"15x15": [ # Single path to goal
"SFFFFFFFFHFFFFF", # "S##|### | "
"HHFHFHFHHHFHHHF", # "-+#+#+#+-+ +-+ "
"FFFFFHFFFHFFFHF", # "###|#|###| | "
"FHHFFHFHFHHHFHF", # "#+-+#+ +#+-+ + "
"FFFFFHFHFFFHFHF", # "#####| |###| | "
"HHHFHHHHHHFHFHH", # "-+-+-+-+-+#+ +-"
"FFFFFHFFFHFHFFF", # " | |#| "
"HHFHHHFHFHFHHHF", # "-+ +-+ + +#+-+ "
"FFFHFFFHFHFFFHF", # " | | |###| "
"FHHHFHFHHHHHFHF", # " +-+ + +-+-+#+ "
"FHFFFHFFFFFHFFF", # " | | |###"
"FHFHHHHHHHFHHHF", # " + +-+-+-+ +-+#"
"FHFHFFFHFHFFFHF", # " | | | | |#"
"FHFHFHFHFHHHFHF", # " + + + + +-+ +#"
"FFFFFHFFFHFFFFG" # " | | G"
]
}
# Updated from:
# https://github.com/openai/gym/blob/master/gym/envs/toy_text/frozen_lake.py
def generate_random_map(size=8, p=0.8):
"""Generates a random valid map (one that has a path from start to goal)
:param size: size of each side of the grid
:param p: probability that a tile is frozen
"""
valid = False
# BFS to check that it's a valid path.
def is_valid(arr, r=0, c=0):
if arr[r][c] == 'G':
return True
tmp = arr[r][c]
arr[r][c] = "#"
# Recursively check in all four directions.
directions = [(1, 0), (0, 1), (-1, 0), (0, -1)]
for x, y in directions:
r_new = r + x
c_new = c + y
if r_new < 0 or r_new >= size or c_new < 0 or c_new >= size:
continue
if arr[r_new][c_new] not in '#H':
if is_valid(arr, r_new, c_new):
arr[r][c] = tmp
return True
arr[r][c] = tmp
return False
while not valid:
p = min(1, p)
res = np.random.choice(['F', 'H'], (size, size), p=[p, 1-p])
res[0][0] = 'S'
res[-1][-1] = 'G'
valid = is_valid(res)
return ["".join(x) for x in res]
# Adapted from https://github.com/openai/gym/blob/master/gym/envs/toy_text/frozen_lake.py
# This is a modified environment where rewards are given for every step in addition to on finding a goal.
# This reward shaping makes the problem easier for the learner to solve.
class RewardingFrozenLakeEnv(discrete.DiscreteEnv):
"""
Winter is here. You and your friends were tossing around a frisbee at the park
when you made a wild throw that left the frisbee out in the middle of the lake.
The water is mostly frozen, but there are a few holes where the ice has melted.
If you step into one of those holes, you'll fall into the freezing water.
At this time, there's an international frisbee shortage, so it's absolutely imperative that
you navigate across the lake and retrieve the disc.
However, the ice is slippery, so you won't always move in the direction you intend.
The surface is described using a grid like the following
SFFF
FHFH
FFFH
HFFG
S : starting point, safe
F : frozen surface, safe
H : hole, fall to your doom
G : goal, where the frisbee is located
The episode ends when you reach the goal or fall in a hole.
You receive a reward of 1 if you reach the goal, -1 for falling in a hole, and a small negative reward otherwise.
The hole and step rewards are configurable when creating an instance of the problem.
"""
metadata = {'render.modes': ['human', 'ansi']}
def __init__(self, desc=None, map_name="4x4", rewarding=True, step_reward=-0.1, hole_reward=-1, is_slippery=True):
if desc is None and map_name is None:
#raise ValueError('Must provide either desc or map_name')
desc = generate_random_map()
elif desc is None:
desc = MAPS[map_name]
self.desc = desc = np.asarray(desc, dtype='c')
self.nrow, self.ncol = nrow, ncol = desc.shape
self.reward_range = (0, 1)
self.step_reward = step_reward
self.hole_reward = hole_reward
self.rewarding = rewarding
self.is_slippery = is_slippery
nA = 4
nS = nrow * ncol
isd = np.array(desc == b'S').astype('float64').ravel()
isd /= isd.sum()
P = {s: {a: [] for a in range(nA)} for s in range(nS)}
def to_s(row, col):
return row * ncol + col
def inc(row, col, a):
if a == 0: # left
col = max(col - 1, 0)
elif a == 1: # down
row = min(row + 1, nrow - 1)
elif a == 2: # right
col = min(col + 1, ncol - 1)
elif a == 3: # up
row = max(row - 1, 0)
return (row, col)
for row in range(nrow):
for col in range(ncol):
s = to_s(row, col)
for a in range(4):
li = P[s][a]
letter = desc[row, col]
if letter in b'GH':
li.append((1.0, s, 0, True))
else:
if is_slippery:
for b in [(a - 1) % 4, a, (a + 1) % 4]:
newrow, newcol = inc(row, col, b)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
if self.rewarding:
if newletter == b'F':
rew = self.step_reward
elif newletter == b'H':
rew = self.hole_reward
li.append((1.0/3.0, newstate, rew, done))
else:
newrow, newcol = inc(row, col, a)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
if self.rewarding:
if newletter == b'F':
rew = self.step_reward
elif newletter == b'H':
rew = self.hole_reward
li.append((1.0, newstate, rew, done))
super(RewardingFrozenLakeEnv, self).__init__(nS, nA, P, isd)
def render(self, mode='human'):
outfile = StringIO() if mode == 'ansi' else sys.stdout
row, col = self.s // self.ncol, self.s % self.ncol
desc = self.desc.tolist()
desc = [[c.decode('utf-8') for c in line] for line in desc]
desc[row][col] = utils.colorize(desc[row][col], "red", highlight=True)
if self.lastaction is not None:
outfile.write(" ({})\n".format(["Left", "Down", "Right", "Up"][self.lastaction]))
else:
outfile.write("\n")
outfile.write("\n".join(''.join(line) for line in desc) + "\n")
if mode != 'human':
return outfile
def colors(self):
return {
b'S': 'green',
b'F': 'skyblue',
b'H': 'black',
b'G': 'gold',
}
def directions(self):
return {
3: '⬆',
2: '➡',
1: '⬇',
0: '⬅'
}
def new_instance(self):
return RewardingFrozenLakeEnv(desc=self.desc, rewarding=self.rewarding, step_reward=self.step_reward,
hole_reward=self.hole_reward, is_slippery=self.is_slippery)
| 2.34375 | 2 |
src/anaplan_api/File.py | pieter-pot/anaplan-api | 0 | 12760604 | from .AnaplanConnection import AnaplanConnection
from .Resources import Resources
from .ResourceParserFile import ResourceParserFile
from .AnaplanResourceFile import AnaplanResourceFile
from .util.AnaplanVersion import AnaplanVersion
from .util.Util import ResourceNotFoundError
class File:
"""A class representing an Anaplan file.
:param _conn: Object with authentication, workspace, and model details.
:type _conn: AnaplanConnection
:param _file_resources: Object with file metadata
:type _file_resources: AnaplanResourceFile
:param _base_url: Anaplan API URL
:type _base_url: str
:param _file_id: ID of file in Anaplan
:type _file_id: str
:param _workspace: ID of workspace that contains the specified Anaplan model
:type _workspace: str
:param _model: ID of the Anaplan model hosting specified file.
:type _model: str
:param _url: Appended _base_url to interact with the specified file.
:type _url: str
:param _chunk_count: Number of chunk segments of the specified file.
:type _chunk_count: int
"""
_conn: AnaplanConnection
_file_resources: AnaplanResourceFile
_base_url: str = f"https://api.anaplan.com/{AnaplanVersion.major()}/{AnaplanVersion.minor()}/workspaces/"
_file_id: str
_workspace: str
_model: str
_url: str
_chunk_count: int
def __init__(self, conn: AnaplanConnection, file_id: str):
"""
:param conn: Object with authentication, workspace, and model details
:type conn: AnaplanConnection
:param file_id: ID of the specified file in the Anaplan model
:type file_id: str
"""
self._conn = conn
self._file_id = file_id
self._workspace = conn.get_workspace()
self._model = conn.get_model()
self._url = ''.join([self._base_url, self._workspace, "/models/", self._model, "/files/", file_id])
self.get_metadata()
self.set_file_details()
def get_metadata(self):
"""
Instantiates Resources class to fetch files list from Anaplan, then parses to build an AnaplanResource object
"""
conn = self._conn # Reading class variable to pass to external class
get_files = Resources(conn, "files")
file_list = get_files.get_resources()
file_parser = ResourceParserFile()
self._file_resources = file_parser.get_parser(file_list)
def set_file_details(self):
"""Sets _chunk_count corresponding to the matching Anaplan file in the AnaplanResource object.
:raises ResourceNotFoundError: If specified file ID is not found in AnaplanResource object
"""
try:
self._chunk_count = self._file_resources[self._file_id]
except ResourceNotFoundError:
raise ResourceNotFoundError(f"{self._file_id} not found.")
def get_base_url(self) -> str:
"""Return the base URL for Anaplan API.
:return: Base URL for Anaplan API.
:rtype: str
"""
return self._base_url
def get_connection(self) -> AnaplanConnection:
"""Returns the AnaplanConnection object.
:return: Object with connection details for a specified model
:rtype: AnaplanConnection
"""
return self._conn
def get_file_id(self) -> str:
"""Returns the ID of the specified Anaplan file.
:return: Anaplan file file_id
:rtype: str
"""
return self._file_id
def get_workspace(self) -> str:
"""Returns the AnaplanResource object.
:return: Workspace ID
:rtype: str
"""
return self._workspace
def get_model(self) -> str:
"""Returns the AnaplanResource object.
:return: Anaplan model ID
:rtype: str
"""
return self._model
def get_resource(self) -> AnaplanResourceFile:
"""Returns the AnaplanResource object.
:return: AnaplanResource object with files in specified Anaplan model
:rtype: AnaplanResourceFile
"""
return self._file_resources
def get_chunk_count(self) -> int:
"""Returns _chunk_count of the specified file.
:return: Chunk count of specified file.
:rtype: int
"""
return self._chunk_count
def get_url(self) -> str:
"""Returns the URL of the specified Anaplan file.
:return: API URL of specified file
:rtype: str
"""
return self._url
def set_file_id(self, file_id: str):
"""Updates the specified file ID and updates _chunk_count to match.
:param file_id: ID of file in Anaplan.
:type file_id: str
:raises ResourceNotFoundError: If specified file ID is not found in AnaplanResource object
"""
self._file_id = file_id
try:
self._chunk_count = self._file_resources[self._file_id]
except ResourceNotFoundError:
raise ResourceNotFoundError(f"{self._file_id} not found.")
| 2.65625 | 3 |
euca2ools/commands/s3/__init__.py | salewski/euca2ools | 30 | 12760605 | <reponame>salewski/euca2ools<gh_stars>10-100
# Copyright (c) 2013-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import string
import sys
import urlparse
from requestbuilder import Arg
import requestbuilder.auth.aws
import requestbuilder.exceptions
import requestbuilder.request
import requestbuilder.service
import requests
import six
from euca2ools.commands import Euca2ools
from euca2ools.exceptions import AWSError
class S3(requestbuilder.service.BaseService):
NAME = 's3'
DESCRIPTION = 'Object storage service'
REGION_ENVVAR = ('EUCA_DEFAULT_REGION', 'AWS_DEFAULT_REGION')
URL_ENVVAR = 'S3_URL'
ARGS = [Arg('-U', '--url', metavar='URL',
help='object storage service endpoint URL')]
# pylint: disable=no-self-use
def handle_http_error(self, response):
raise AWSError(response)
# pylint: enable=no-self-use
def resolve_url_to_location(self, url):
"""
Given a URL, try to return its associated region, bucket, and
key names based on this object's endpoint info as well as all
S3 endpoints given in the configuration.
"""
parsed_url = six.moves.urllib.parse.urlparse(url)
if not parsed_url.scheme:
parsed_url = six.moves.urllib.parse.urlparse('http://' + url)
parsed_own_url = six.moves.urllib.parse.urlparse(self.endpoint)
bucket, key = self.__match_path(parsed_url, parsed_own_url)
if bucket:
return self.region_name, bucket, key
else:
# Try to look it up in the config
s3_urls = self.config.get_all_region_options('s3-url')
for section, conf_url in s3_urls.items():
parsed_conf_url = six.moves.urllib.parse.urlparse(conf_url)
bucket, key = self.__match_path(parsed_url, parsed_conf_url)
if bucket:
region = self.config.get_region_option('name',
region=section)
return region or section, bucket, key
raise ValueError("URL '{0}' matches no known object storage "
"endpoints. Supply one via the command line or "
"configuration.".format(url))
def __match_path(self, given, service):
if given.netloc == service.netloc:
# path-style
service_path = service.path
if not service_path.endswith('/'):
service_path += '/'
cpath = given.path.split(service_path, 1)[1]
bucket, key = cpath.split('/', 1)
self.log.debug('URL path match: %s://%s%s + %s://%s%s -> %s/%s',
given.scheme, given.netloc, given.path,
service.scheme, service.netloc, service.path,
bucket, key)
elif given.netloc.endswith(service.netloc):
# vhost-style
bucket = given.netloc.rsplit('.' + service.netloc, 1)[0]
bucket = bucket.lstrip('/')
if given.path.startswith('/'):
key = given.path[1:]
else:
key = given.path
self.log.debug('URL vhost match: %s://%s%s + %s://%s%s -> %s/%s',
given.scheme, given.netloc, given.path,
service.scheme, service.netloc, service.path,
bucket, key)
else:
bucket = None
key = None
return bucket, key
class S3Request(requestbuilder.request.BaseRequest):
SUITE = Euca2ools
SERVICE_CLASS = S3
AUTH_CLASS = requestbuilder.auth.aws.HmacV1Auth
def __init__(self, **kwargs):
requestbuilder.request.BaseRequest.__init__(self, **kwargs)
self.redirects_left = 3
def configure(self):
requestbuilder.request.BaseRequest.configure(self)
if self.__should_use_sigv4():
self.log.info('switching to HmacV4Auth')
# pylint: disable=access-member-before-definition
auth = requestbuilder.auth.aws.HmacV4Auth.from_other(self.auth)
# pylint: enable=access-member-before-definition
auth.configure()
self.auth = auth
def __should_use_sigv4(self):
return self.config.convert_to_bool(
self.config.get_region_option('s3-force-sigv4'))
def get_presigned_url2(self, timeout):
"""
Get a pre-signed URL for this request that expires after a given
number of seconds.
"""
# requestbuilder 0.3
self.preprocess()
if self.__should_use_sigv4():
# UNSIGNED-PAYLOAD is a magical string used for S3 V4 query auth.
auth = requestbuilder.auth.aws.QueryHmacV4Auth.from_other(
self.auth, timeout=timeout, payload_hash='UNSIGNED-PAYLOAD')
else:
auth = requestbuilder.auth.aws.QueryHmacV1Auth.from_other(
self.auth, timeout=timeout)
return self.service.get_request_url(
method=self.method, path=self.path, params=self.params,
auth=auth)
def handle_server_error(self, err):
if err.status_code == 301:
self.log.debug('-- response content --\n',
extra={'append': True})
self.log.debug(self.response.text, extra={'append': True})
self.log.debug('-- end of response content --')
self.log.error('result: inter-region redirect')
msg = 'Aborting due to inter-region redirect'
if 'Endpoint' in err.elements:
msg += ' to {0}'.format(err.elements['Endpoint'])
self.log.debug(msg, exc_info=True)
if 'Bucket' in err.elements:
bucket = '"{0}" '.format(err.elements['Bucket'])
else:
bucket = ''
parsed = six.moves.urllib.parse.urlparse(self.service.endpoint)
msg = ('Bucket {0}is not available from endpoint "{1}". Ensure '
"the object storage service URL matches the bucket's "
'location.'.format(bucket, parsed.netloc))
raise requestbuilder.exceptions.ClientError(msg)
else:
return requestbuilder.request.BaseRequest.handle_server_error(
self, err)
def validate_generic_bucket_name(bucket):
if len(bucket) == 0:
raise ValueError('name is too short')
if len(bucket) > 255:
raise ValueError('name is too long')
for char in bucket:
if char not in string.ascii_letters + string.digits + '.-_':
raise ValueError('invalid character \'{0}\''.format(char))
def validate_dns_bucket_name(bucket):
if len(bucket) < 3:
raise ValueError('name is too short')
if len(bucket) > 63:
raise ValueError('name is too long')
if bucket.startswith('.'):
raise ValueError('name may not start with \'.\'')
if bucket.endswith('.'):
raise ValueError('name may not end with \'.\'')
labels = bucket.split('.')
for label in labels:
if len(label) == 0:
raise ValueError('name may not contain \'..\'')
for char in label:
if char not in string.ascii_lowercase + string.digits + '-':
raise ValueError('invalid character \'{0}\''.format(char))
if label[0] not in string.ascii_lowercase + string.digits:
raise ValueError(('character \'{0}\' may not begin part of a '
'bucket name').format(label[0]))
if label[-1] not in string.ascii_lowercase + string.digits:
raise ValueError(('character \'{0}\' may not end part of a '
'bucket name').format(label[-1]))
if len(labels) == 4:
try:
[int(chunk) for chunk in bucket.split('.')]
except ValueError:
# This is actually the case we want
pass
else:
raise ValueError('name must not be formatted like an IP address')
| 1.820313 | 2 |
preprocess/prepare/kwic.py | kingsdigitallab/tvof-kiln | 0 | 12760606 | # -*- coding: utf-8 -*-
import sys
import os
import re
from xml_parser import XMLParser
import xml.etree.ElementTree as ET
from collections import OrderedDict
# TODO
# [DONE] only critical
# [DONE] xml format
# [DONE] grouped by keywords
# [DONE] sorted by token
# [DONE] ids
# [DONE] ' missing from keywords
# [DONE] why aal?, aaler?
# [DONE] uppercase
# [DONE] attach punctuation to words
# () add punctuation only if actually attached?
# . <choice>..<seg subtype="toUpper" type="crit">n</seg></choice><choice><orig /><reg>'</reg></choice>
# . only 7 occurrences, so tempted to ignore it for now
# [DONE] check wellformedness
# [DONE] XML output
# [DONE] 7 tokens on each side ! punctuation are also token
# [DONE] same order as legacy kwic
# [DONE] stopwords
# [DONE] duplicates: <item type="seg_item" location="edfr20125_00589" n="5"><string>a</string></item>
# [DONE] remove []
class KWICList(XMLParser):
# those elements will have the first letter capitalised in critical version
# ac-332.4 & ac-337.3
# see also: is_element_titalised()
elements_to_titles = ['persName', 'placeName', 'name', 'geogName']
# ac-276: spaces directly under those elements will be removed
elements_remove_spaces = ['mod']
# stop words
# stop_words = ['de', 'le', 'o']
stop_words = []
# 'context' size: the number of words on each side of a keyword that are
# part of its context.
context_radius = 7
default_output = u'kwic.xml'
elements_to_remove = [
ur'note',
# no longer used
ur'add[@type="annotation"]',
'pb', 'cb',
# All below are SEMI-DIPLOMATIC version
'del', 'orig', 'seg[@type="semi-dip"]', 'sic',
# ac-329
'figure',
]
def __init__(self):
super(KWICList, self).__init__()
def run_custom(self, input_path_list, output_path):
if len(input_path_list) != 1:
print('ERROR: please provide a single input file')
else:
for input_path in input_path_list:
self.read_xml(input_path)
self.generate_list()
break
def generate_list(self):
self.make_version('critical')
kwics = self.collect_keywords()
self.generate_xml(kwics)
def make_version(self, version='critical'):
print('Create version %s' % version)
if version != 'critical':
raise Exception('Unsupported version %s' % version)
self.remove_elements(self.elements_to_remove)
# self.remove_elements(filters)
print('\t capitalise toUpper')
for element in self.xml.findall('.//*[@subtype="toUpper"]'):
element.text = (element.text or ur'').upper()
for el in element.findall('.//*'):
el.text = (el.text or ur'').upper()
el.tail = (el.tail or ur'').upper()
print('\t lowercase toLower')
for element in self.xml.findall('.//*[@subtype="toLower"]'):
element.text = (element.text or ur'').lower()
for el in element.findall('.//*'):
el.text = (el.text or ur'').lower()
el.tail = (el.tail or ur'').lower()
# remove spaces directly under control elements
# ac-276:
# e.g. <w><mod> <add>a</add> <del>b</del> <mod>c</w>
# => <w><mod><add>a</add><del>b</del><mod>c</w>
def collapse_spaces(element, is_tail=False):
''' this will remove element.text
if element.text contains only spaces or line breaks;
If is_tail= True, apply to element.tail instead
'''
part = 'tail' if is_tail else 'text'
val = getattr(element, part, None)
if val and not re.search(r'\S', val):
setattr(element, part, '')
for tag in self.elements_remove_spaces:
for element in self.xml.findall('.//' + tag):
before = self.get_unicode_from_xml(element)
collapse_spaces(element)
for child in element:
collapse_spaces(child, True)
after = self.get_unicode_from_xml(element)
# only enabled when debugging
if 0 and before != after:
print(u'\t`{}` -> `{}`'.format(before, after))
def collect_keywords(self):
kwics = []
for div in self.xml.findall('.//div[head]'):
paraid = div.attrib.get(self.expand_prefix('xml:id'))
if not self.is_para_in_range(paraid):
continue
# for element in div.findall('head[@type="rubric"]',
# './/seg[@type]'):
for filter in ['head[@type="rubric"]', './/seg[@type]']:
# add all <w> to the kwic list
kwics.extend(
self.collect_keywords_under_elements(
div.findall(filter), paraid
)
)
return kwics
# Collect all the tokens
def collect_keywords_under_elements(self, elements, paraid):
kwics = []
for element in elements:
# only process seg type="1" .. "9"
# also case for 3a, 4a (e.g. Royal)
if element.tag == 'seg' and\
not re.match(ur'\d+[a-z]?$', element.attrib.get('type') or ur''):
continue
# get ID from element if available
elementid = element.attrib.get(
self.expand_prefix('xml:id')) or paraid
tokens = element.findall('.//w')
# collect all the tokens under <element>
for token in tokens:
keyword = self.get_element_text(token, True)
# print parentid, token.attrib.get('n'), repr(self.get_unicode_from_xml(token))
# print repr(keyword)
if keyword:
keyword = re.sub(ur'[\[\]]', ur'', keyword)
# keywords.append(keyword)
# keyword_tokens.append(token)
kwic = {
'kw': keyword,
'sl': keyword.lower()[0:1],
'lc': elementid,
'nb': token.attrib.get('n'),
'tp': self.get_token_type_from_parent(element),
'sp': self.get_speech_type_from_token(token),
}
kwics.append(kwic)
else:
print('NONE')
print(token.attrib.get('n'))
print(paraid)
exit()
# add context (prev/next words) to the new keywords in the list
radius = self.context_radius + 1
for i in range(len(kwics)):
kwic = kwics[i]
# keyword = keywords[i]
# token = keyword_tokens[i]
kwic.update({
'pr': ' '.join([kw['kw'] for kw in kwics[max(0, i - radius):i]]),
'fo': ' '.join([kw['kw'] for kw in kwics[i + 1:min(len(kwics), i + radius)]]),
})
# Add ending punctuation mark
# TODO: detect punctuation before? (actual exceptional)
# TODO: detect attachment (no space between the two tokens)
# But again that may be exceptional and not necessarily useful.
if (i + 1) < len(kwics) and re.match(ur'[\.,]', kwics[i + 1]['kw']):
kwic['pe'] = kwics[i + 1]['kw']
return kwics
def get_speech_type_from_token(self, token):
ret = ''
speech_elements = getattr(self, 'speech_elements', None)
if speech_elements is None:
# <said direct="false">de tout
speech_elements = {
'direct': {id(e): 1 for e in self.xml.findall(".//said[@direct='true']//w")},
'indirect': {id(e): 1 for e in self.xml.findall(".//said[@direct='false']//w")},
}
self.speech_elements = speech_elements
else:
if id(token) in speech_elements['direct']:
ret = 'direct'
elif id(token) in speech_elements['indirect']:
ret = 'indirect'
return ret
def get_token_type_from_parent(self, parent):
'''Returns the token type from the @type of the parent of the token'''
ret = 'seg_item'
el_type = parent.attrib.get('type')
if el_type == 'rubric':
ret = 'rubric_item'
if el_type == '6':
ret = 'verse_item'
return ret
def generate_xml(self, kwics):
print('Generate XML')
kwic_count = 0
ret = u''
ret += u'<kwiclist>'
sublist = None
parts = []
invalids = {}
for kwic in sorted(kwics, key=lambda k: [k['kw'].lower(), k['tp'], k['lc'], int(k['nb'])]):
is_token_invalid = (
re.search(ur'\s', kwic['kw'])
or len(kwic['kw']) > 20
or not(kwic['kw'])
)
if is_token_invalid:
if kwic['kw'] not in invalids:
print('WARNING: probably invalid token in %s, "%s".' %
(kwic['lc'], repr(kwic['kw'])))
invalids[kwic['kw']] = 1
continue
for k in ['sl', 'pr', 'fo', 'kw', 'pe']:
v = kwic.get(k, None)
if v is not None:
kwic[k] = v.replace('"', '"')
part = u''
# print u'{lc} {nb} {kw}'.format(**kwic)
if kwic['sl'] != sublist:
if sublist:
part += ur'</sublist>'.format(sublist)
part += u'\n<sublist key="{}">'.format(kwic['sl'])
sublist = kwic['sl']
if not self.is_stop_word(kwic['kw']):
part += u'\n\t<item type="{tp}" location="{lc}" n="{nb}" preceding="{pr}" following="{fo}" sp="{sp}">'.format(
**kwic)
part += u'\n\t\t<string>{kw}</string>'.format(**kwic)
if kwic.get('pe'):
part += u'\n\t\t<punctuation type="end">{pe}</punctuation>'.format(
**kwic)
part += u'\n\t</item>'
kwic_count += 1
if part:
parts.append(part)
# for kwic in sorted(set([k['kw'] for k in self.kwics])):
# print repr(kwic)
ret += ''.join(parts)
if sublist:
ret += u'</sublist>'
ret += u'</kwiclist>'
if 0:
# for debugging only: output notwelformedfile
file_path = 'notwellformed.xml'
encoding = 'utf-8'
f = open(file_path, 'wb')
content = u'<?xml version="1.0" encoding="{}"?>\n'.format(encoding)
content += ret
content = content.encode(encoding)
f.write(content)
f.close()
self.set_xml_from_unicode(ret)
print('%s keywords' % kwic_count)
return ret
def is_element_titalised(self, element):
# see ac-343
return True
ret = True
# ac-332.4 removed <name type="building">
# print(element.tag, element.attrib.get('type', ''))
if element.attrib.get('type', '').lower() in ['building']:
ret = False
return ret
def is_stop_word(self, token):
'''Return True if <token> in self.stop_words or doesn't contain a letter'''
return not re.search(ur'(?musi)\w', token) or token in self.stop_words
KWICList.run()
| 2.984375 | 3 |
Rain Sensor/rain.py | mgga/RaspberryPi | 0 | 12760607 | <filename>Rain Sensor/rain.py
#!/usr/bin/env python3
# Author: <NAME>
import time
import RPi.GPIO as GPIO
#Global Pin Variable
pin = None
def askPin():
validateInput = False
while not validateInput:
try:
a = int(input("Please tell me the pin the rain sensor is connected to: "))
validateInput = True
except KeyboardInterrupt:
exit()
except:
validateInput = False
return a
def startRain():
global pin
pin = askPin()
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin, GPIO.IN)
def readRain():
rain = GPIO.input(pin) #pin will be high if not wet
return not rain
def stopRain():
GPIO.cleanup()
try:
startRain()
while True:
if readRain():
print("It's raining !")
time.sleep(1)
except KeyboardInterrupt:
stopRain() | 3.625 | 4 |
functions/utils.py | Bobobert/TRPO | 0 | 12760608 | <filename>functions/utils.py
import os
import sys
import time
from torch import save,load, device
import pickle
LIMIT_4G = 3.8 * 1024 ** 3
def timeFormated() -> str:
return time.strftime("%H-%M_%d-%b-%y", time.gmtime())
def timeFormatedS() -> str:
return time.strftime("%H-%M-%S_%d-%b-%y", time.gmtime())
def timeDiffFormated(start):
tock = time.time()
total = tock - start
hours = int(total//3600)
mins = int(total%3600//60)
secs = int(total%3600%60//1)
if hours > 0:
s = "{}h {}m {}s".format(hours, mins, secs)
elif mins > 0:
s = "{}m {}s".format(mins, secs)
elif secs > 0:
s = "{}s".format(secs)
else:
s = "< 0s"
return s, tock
def goToDir(path):
home = os.getenv('HOME')
try:
os.chdir(os.path.join(home, path))
except:
os.chdir(home)
os.makedirs(path)
os.chdir(path)
return os.getcwd()
def createFolder(path:str, mod:str):
start = mod + '_' + timeFormated()
new_dir = os.path.join(path, start)
new_dir = goToDir(new_dir)
return start, new_dir
class Tocker:
def __init__(self):
self.tick
@property
def tick(self):
self.start = time.time()
return self.start
@property
def tock(self):
s, self.start = timeDiffFormated(self.start)
return s
@property
def tocktock(self):
"""
Returns the time elapsed since the last tick in minutes
"""
return (time.time() - self.start) * 0.016666667
def lockHz(self, Hz:int):
tHz = 1 / Hz
remaind = time.time() - self.start
remaind = tHz - remaind
if remaind > 0:
time.sleep(remaind)
return True
class Stack:
"""
Dict stack working in a FIFO manner
"""
def __init__(self):
self.stack = dict()
self.min = 0
self.actual = 0
def add(self, obj):
self.stack[self.actual] = obj
self.actual += 1
def pop(self):
poped = self.stack[self.min]
self.stack.pop(self.min)
self.min += 1
return poped
def __len__(self):
return len(self.stack)
class Reference:
def __init__(self, obj,
name: str,
limit:int,
torchType:bool = False,
device = device("cpu")):
self.torchType = torchType
self.name = name
self.ref = obj
self.prevVersions = Stack()
self.limit = limit
self.device = device
self._version = 0
def save(self, path):
if self.torchType:
self.saveTorch(path)
else:
self.savePy(path)
self.clean(path)
def clean(self, path):
if len(self.prevVersions) >= self.limit:
target = self.prevVersions.pop()
#target = os.path.join(path, target)
os.remove(target)
@staticmethod
def loaderAssist(path):
os.chdir(path)
files = os.listdir()
print("Files on direction:")
for n, File in enumerate(files):
print("{} : {}".format(n, File))
while 1:
choice = input("Enter the number for the file to load :")
choice = int(choice)
if choice > len(files) or not isinstance(choice, int) or choice < 0:
print("Number not valid. Please try again.")
else:
break
return os.path.join(path, files[choice])
def load(self, path):
print("Trying to load in object {}".format(self.name))
target = self.loaderAssist(path)
if self.torchType:
self.loadTorch(target, self.device)
else:
self.loadObj(target)
def loadTorch(self, path, device):
model = load(path, map_location=device)
self.ref.load_state_dict(model, strict = True)
print("Model successfully loaded from ", path)
def loadObj(self, path):
fileHandler = open(path, 'rb')
self.ref = pickle.load(fileHandler)
fileHandler.close()
print("Object successfully loaded from ", path)
def saveTorch(self, path):
name = self._gen_name() + ".modelst"
path = os.path.join(path, name)
try:
stateDict = self.ref.state_dict()
save(stateDict, path)
self.prevVersions.add(path)
except:
None
def savePy(self, path):
name = self._gen_name() + ".pyobj"
path = os.path.join(path, name)
if sys.getsizeof(self.ref) < LIMIT_4G:
fileHandler = open(path, "wb")
pickle.dump(self.ref, fileHandler)
fileHandler.close()
self.prevVersions.add(path)
def _gen_name(self):
self._version += 1
return self.name + "_v{}".format(self._version) + "_" + timeFormated()
class Saver():
"""
Object that administrates objects to dump
save files if possible.
parameters
----------
envName: str
path: str
Path relative to Home to dump the saved files
"""
def __init__(self, envName:str,
path:str = "PG_results/",
limitTimes:int = 5,
saveFreq:int = 40):
self.startPath, self.dir = createFolder(path, envName)
self._objRefs_ = []
self.names = set()
self.limit = limitTimes
self.time = Tocker()
self.freq = saveFreq
def start(self):
self.time.tick
def check(self):
if self.time.tocktock >= self.freq:
self.saveAll()
self.time.tick
def addObj(self, obj,
objName:str,
isTorch:bool = False,
device = device("cpu")):
if objName in self.names:
raise KeyError
self.names.add(objName)
self._objRefs_ += [Reference(obj,
objName,
self.limit,
isTorch,
device)]
def saveAll(self):
for ref in self._objRefs_:
ref.save(self.dir)
def load(self, path):
for ref in self._objRefs_:
ref.load(path) | 2.265625 | 2 |
tests/unit/models/test_contact.py | nethad/moco-wrapper | 0 | 12760609 | <filename>tests/unit/models/test_contact.py
import pytest
from .. import UnitTest
class TestContact(UnitTest):
def test_create(self):
firstname = "Peter"
lastname = "Muster"
gender = "F"
organization_id = 123
title ="Dr. med."
job_position = "Account Manager"
mobile_phone = "+49 177 123 45 67"
work_fax = "+49 30 123 45 67"
work_phone = "+49 30 123 45 67"
work_email = "<EMAIL>"
work_address = "Lieferant AG\nBeispielstrasse 123\n12345 Berlin"
home_email = "<EMAIL>"
home_address = "<NAME>\nZu Hause"
birthday = "1959-05-22"
info = "Information for this company"
tags = ["Christmas Card", "Project Lead"]
response = self.moco.Contact.create(firstname, lastname, gender, customer_id=organization_id, title=title, job_position=job_position, mobile_phone=mobile_phone, work_fax=work_fax, work_phone=work_phone, work_email=work_email, work_address=work_address, home_email=home_email, home_address=home_address, birthday=birthday, info=info, tags=tags)
data = response["data"]
assert data["firstname"] == firstname
assert data["lastname"] == lastname
assert data["gender"] == gender
assert data["customer_id"] == organization_id
assert data["title"] == title
assert data["job_position"] == job_position
assert data["mobile_phone"] == mobile_phone
assert data["work_fax"] == work_fax
assert data["work_phone"] == work_phone
assert data["work_email"] == work_email
assert data["work_address"] == work_address
assert data["home_email"] == home_email
assert data["home_address"] == home_address
assert data["birthday"] == birthday
assert data["info"] == info
assert data["tags"] == tags
assert response["method"] == "POST"
def test_update(self):
contact_id = 123
firstname = "Peter"
lastname = "Muster"
gender = "F"
organization_id = 123
title ="Dr. med."
job_position = "Account Manager"
mobile_phone = "+49 177 123 45 67"
work_fax = "+49 30 123 45 67"
work_phone = "+49 30 123 45 67"
work_email = "<EMAIL>"
work_address = "Lieferant AG\nBeispielstrasse 123\n12345 Berlin"
home_email = "<EMAIL>"
home_address = "<NAME>\nZu Hause"
birthday = "1959-05-22"
info = "Information for this company"
tags = ["Christmas Card", "Project Lead"]
response = self.moco.Contact.update(contact_id, firstname=firstname, lastname=lastname, job_position=job_position, gender=gender, customer_id=organization_id, title=title, mobile_phone=mobile_phone, work_fax=work_fax, work_phone=work_phone, work_email=work_email, work_address=work_address, home_email=home_email, home_address=home_address, birthday=birthday, info=info, tags=tags)
data = response["data"]
assert data["firstname"] == firstname
assert data["lastname"] == lastname
assert data["gender"] == gender
assert data["customer_id"] == organization_id
assert data["title"] == title
assert data["job_position"] == job_position
assert data["mobile_phone"] == mobile_phone
assert data["work_fax"] == work_fax
assert data["work_phone"] == work_phone
assert data["work_email"] == work_email
assert data["work_address"] == work_address
assert data["home_email"] == home_email
assert data["home_address"] == home_address
assert data["birthday"] == birthday
assert data["info"] == info
assert data["tags"] == tags
assert response["method"] == "PUT"
def test_get(self):
contact_id = 1234
response = self.moco.Contact.get(contact_id)
assert response["method"] == "GET"
def test_getlist(self):
tags = ["eins", "zwei", "drei", "polizei"]
response = self.moco.Contact.getlist(tags=tags)
params = response["params"]
assert params["tags"] == tags
assert response["method"] == "GET"
def test_getlist_sort_default(self):
sort_by = "testfield to sort by"
response = self.moco.Contact.getlist(sort_by=sort_by)
assert response["params"]["sort_by"] == "{} asc".format(sort_by)
def test_getlist_sort_overwrite(self):
sort_by = "testfield to sort by"
sort_order = "desc"
response = self.moco.Contact.getlist(sort_by=sort_by, sort_order=sort_order)
assert response["params"]["sort_by"] == "{} {}".format(sort_by, sort_order)
def test_getlist_page_default(self):
page_default = 1
response = self.moco.Contact.getlist()
assert response["params"]["page"] == page_default
def test_getlist_page_overwrite(self):
page_overwrite = 22
response = self.moco.Contact.getlist(page=page_overwrite)
assert response["params"]["page"] == page_overwrite
| 3.046875 | 3 |
gpx_reader.py | efournie/BikeometerStats | 1 | 12760610 | <reponame>efournie/BikeometerStats
from glob import glob
import gpxpy
import numpy as np
import os
class GpxReader():
def __init__(self, dir, verbose=False):
self.dir = dir
self.verbose = verbose
all_files = glob(os.path.join(dir, '*'))
self.gpx_files = []
for f in all_files:
if f.lower().endswith('.gpx'):
self.gpx_files.append(f)
if self.verbose:
print(f)
self.parse_all_files(self.gpx_files)
def parse_all_files(self, files):
self.dists = []
durations = []
self.avg_speeds = []
self.start_timestamps = []
self.max_speeds = []
for gpxfile in files:
with open(gpxfile, 'r') as f:
gpx = gpxpy.parse(f)
d_km = gpx.length_2d() / 1000
t_sec = gpx.get_duration()
self.dists.append(d_km)
durations.append(t_sec)
self.avg_speeds.append(3600 * d_km / t_sec)
start_time = gpx.get_time_bounds()[0]
start_time = start_time.replace(tzinfo=None)
self.start_timestamps.append(start_time)
moving_data = gpx.get_moving_data()
self.max_speeds.append(moving_data.max_speed * 3.6)
if self.verbose:
print(f'Start time: {self.start_timestamps[-1]}, distance: {d_km} km, duration: {t_sec} sec, average speed: {self.avg_speeds[-1]}, max speed: {self.max_speeds[-1]}')
def get_distances_km(self):
return np.array(self.dists)
def get_avg_speed_kmh(self):
return np.array(self.avg_speeds)
def get_max_speed_kmh(self):
return max(self.max_speeds)
def get_start_timestamps(self):
return self.start_timestamps
| 2.734375 | 3 |
pylxca/pylxca_cmd/lxca_pyshell.py | pratikms/pylxca | 11 | 12760611 | '''
@since: 5 Feb 2016
@author: <NAME> <<EMAIL>>, <NAME> <<EMAIL>>
@license: Lenovo License
@copyright: Copyright 2016, Lenovo
@organization: Lenovo
@summary: This module provides scriptable interfaces and scriptable python shell.
'''
import code
import logging
import sys
from pylxca import __version__
from pylxca.pylxca_cmd import lxca_ishell
from pylxca.pylxca_cmd.lxca_cmd import fanmuxes
# shell is a global variable
SHELL_OBJ = None
LOGGER = logging.getLogger(__name__)
def pyshell(shell=lxca_ishell.InteractiveShell()):
'''
@summary: this method provides scriptable python shell
'''
global SHELL_OBJ
SHELL_OBJ = shell
SHELL_OBJ.set_ostream_to_null()
def set_interactive():
'''
@summary: This method set the shell in interactive mode
'''
ns = {"connect": connect,
"disconnect": disconnect,
"chassis": chassis,
"cmms": cmms,
"fans": fans,
"fanmuxes": fanmuxes,
"switches": switches,
"powersupplies": powersupplies,
"nodes": nodes,
"scalablesystem": scalablesystem,
"discover": discover,
"manage": manage,
"unmanage": unmanage,
#"jobs": jobs,
"users": users,
"lxcalog": lxcalog,
"ffdc": ffdc,
"updatecomp": updatecomp,
"updatepolicy": updatepolicy,
"updaterepo": updaterepo,
"configpatterns": configpatterns,
"configprofiles": configprofiles,
"configtargets": configtargets,
"tasks": tasks,
"manifests": manifests,
"osimages": osimages,
"resourcegroups": resourcegroups,
"storedcredentials": storedcredentials,
"managementserver": managementserver,
"license": license,
"help": help}
ns.update()
global __version__
code.interact('Interactive Python Shell for Lenovo XClarity Administrator v' + __version__ +
'\nType "dir()" or "help(lxca command object)" for more information.', local=ns)
def connect(*args, **kwargs):
'''
@summary:
Use this function to connect to Lenovo XClarity Administrator
run this function as
con_variable = connect( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['url', 'user', 'pw', 'noverify']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
url url to Lenovo XClarity Administrator Example. https://a.b.c.d
user User Id to Authenticate Lenovo XClarity Administrator
pw Password to Authenticate Lenovo XClarity Administrator
noverify flag to indicate to not verify server certificate
@example
con1 = connect( url = "https://10.243.12.142",user = "USERID", pw = "Password", noverify = "True")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
keylist = ['url', 'user', 'pw', 'noverify']
if len(args) == 0 and len(kwargs) == 0:
return
for i in range(len(args)):
kwargs[keylist[i]] = args[i]
con = SHELL_OBJ.handle_input_args(command_name, args=args, kwargs=kwargs)
return con
def disconnect(*args, **kwargs):
'''
@summary:
Use this function to disconnect from Lenovo XClarity Administrator
run this function as
disconnect()
it also reset internal current connection to original connection this is used in api version
to retain origianal connection if we are disconnecting other than current connection
i.e
con1 = connect(...)
con2 = connect(...)
con3 = connect(...)
con4 = connect(...) current internal connection is con4
disconnect(con2) will keep current connection to con4
disconnect(con4) or disconnect() will set current connection to None
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
@example
disconnect()
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {}
keylist = ['con']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
def cmms(*args, **kwargs):
'''
@summary:
Use this function to get CMMs information
run this function as
data_dictionary = cmms( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'uuid', 'chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid cmm uuid
chassis chassis uuid
@example
cmm_list = cmms( con = con1, uuid = 'fc3058cadf8b11d48c9b9b1b1b1b1b57')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def chassis(*args, **kwargs):
'''
@summary:
Use this function to get Chassis information
run this function as
data_dictionary = chassis( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'uuid', 'status']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid chassis uuid
status chassis manage status (managed/unmanaged)
@example
chassis_list = chassis( con = con1, uuid = 'fc3058cadf8b11d48c9b9b1b1b1b1b57')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'status': 's'}
keylist = ['con', 'uuid', 'status']
optional_keylist = ['con', 'uuid', 'status']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def fans(*args, **kwargs):
'''
@summary:
Use this function to get fans information
run this function as
data_dictionary = fans( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'uuid', 'chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of fan
chassis chassis uuid
@example
fans_list = fans( con = con1, uuid = 'fc3058cadf8b11d48c9b9b1b1b1b1b57')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def fanmuxes(*args, **kwargs):
'''
@summary:
Use this function to get fanmuxes information
run this function as
data_dictionary = fanmuxes( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of fanmux
chassis chassis uuid
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def nodes(*args, **kwargs):
'''
@summary:
Use this function to get nodes information
run this function as
data_dictionary = nodes( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis','status', 'modify', 'metrics']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of node
chassis chassis uuid
status nodes manage status (managed/unmanaged)
modify JSON object of modifyable node properties
metrics flag to fetch metrics of all nodes or metrics of a node belonging to the provided uuid
@example
nodes(uuid="FAA6E3D494E511E6A0739B91ED670CE8",modify='{"location":{"location": "new location", "rack": "rack 5","lowestRackUnit": 3}}')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c', 'status': 's', 'modify': 'm', 'metrics': 'x'}
keylist = ['con', 'uuid', 'chassis', 'status', 'modify', 'metrics']
optional_keylist = ['con', 'uuid', 'chassis', 'status', 'modify', 'metrics']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def switches(*args, **kwargs):
'''
@summary:
Use this function to get switches information
run this function as
data_dictionary = switches( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis','ports','action']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of switch
chassis chassis uuid
ports empty ports string list all ports for uuid, comma separated ports
action enable/disable ports
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# other parameter don't have short option
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis', 'ports', 'action']
optional_keylist = ['con', 'uuid', 'chassis', 'ports', 'action']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def powersupplies(*args, **kwargs):
'''
@summary:
Use this function to get powersupplies information
run this function as
data_dictionary = powersupplies( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of power supply
chassis chassis uuid
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def scalablesystem(*args, **kwargs):
'''
@summary:
Use this function to get scalablesystem information
run this function as
data_dictionary = scalablesystem( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id','type']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
id scalable complex id
type type (flex/rackserver)
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i', 'type': 't'}
keylist = ['con', 'id', 'type']
optional_keylist = ['con', 'id', 'type']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def discover(*args, **kwargs):
'''
@summary:
Use this function to discover endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = discover( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','ip','job']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
ip One or more IP addresses for each endpoint to be discovered.
job Job ID of discover request
@example
For Getting Maangement job status
job_data = discover(con=con1,job=jobid)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'ip': 'i', 'job': 'j'}
keylist = ['con', 'ip', 'job']
optional_keylist = ['con', 'ip', 'job']
mutually_exclusive_keys = ['ip', 'job']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def manage(*args, **kwargs):
'''
@summary:
Use this function to manage endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = manage( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','subcmd','ip','user','pw','rpw','job','force', 'storedcredential_id']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
subcmd
ip One or more IP addresses for each endpoint to be managed.
user user ID to access the endpoint
pw The current password to access the endpoint.
rpw The recovery password to be used for the endpoint.
force force manage
job Job ID of existing manage request
storedcredential_id Store Crendential Id to be used for device manage operation
if this is provided user is not required
Note : mp, type and epuuid parameters are dedpriciated and only kept for backword compatibility.
@example
jobid = manage(con=con1, subcmd='device', ip="10.243.6.68",user="USERID",pw="<PASSWORD>",rpw="<PASSWORD>")
jobid = manage(con=con1, subcmd='device', ip="10.243.6.68",storedcredintail_id="12")
or with named variable it can be represented as
jobid = manage(con= con1, subcmd='device', ip="10.243.6.68",user="USERID","PASSW0RD","PASS<PASSWORD>",True)
jobid = manage(con1, subcmd='device', i="10.243.4.16", u='USERID', p='Purley44LEN', r='Purley55LEN', f='True')
Using storedcredential id for Rackswitch
jobid = manage(con=con1, subcmd='device', i='10.240.157.111', s='402', f='True')
Using storedcredential id for Rackswitch Server
jobid = manage(con=con1, subcmd='device',i="10.243.4.16", r='Purley55LEN', s='404', f='True')
For Getting Maangement job status
jobid = manage(con=con1, subcmd='job_status', job="12")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'ip': 'i', 'user': 'u', 'pw': 'p',
'rpw': 'r', 'job': 'j', 'force': 'f', 'storedcredential_id': 's'}
keylist = ['con', 'subcmd', 'ip', 'user', 'pw',
'rpw', 'job', 'force', 'storedcredential_id']
optional_keylist = ['con', 'ip', 'user', 'pw',
'rpw', 'job', 'force', 'storedcredential_id']
mutually_exclusive_keys = ['ip', 'job']
mandatory_options_list = {'job': []}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def unmanage(*args, **kwargs):
'''
@summary:
Use this function to unmanage endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = unmanage( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','subcmd','ep','force','job']
@param
The parameters for this command are as follows
subcmd device \ job_status
ep one or more endpoints to be unmanaged.
This is comma separated list of multiple endpoints, each endpoint should
contain endpoint information separated by semicolon.
endpoint's IP Address(multiple addresses should be separated by #), UUID of the endpoint and
Type of endpoint to be unmanaged ,This can be one of the following values:
Chassis
ThinkServer
Storage
Rackswitch
Rack-Tower
Edge
force Indicates whether to force the unmanagement of an endpoint (True/False)
job Job ID of unmanage request
@example
endpoint = '10.240.195.39;D31C76F0302503B50010D21DE03A0523;Rack-Tower'
unmanage(con_lxca, subcmd='device', ep=endpoint)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'ep': 'e', 'job': 'j', 'force': 'f'}
keylist = ['con', 'subcmd', 'ep', 'force', 'job']
optional_keylist = ['con', 'ep', 'force', 'job']
mutually_exclusive_keys = ['ep', 'job']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def configpatterns(*args, **kwargs):
'''
@summary:
Use this function to Retrieve information and deploy all server and category patterns
that have been defined in the Lenovo XClarity Administrator
run this function as
data_dictionary = configpatterns( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','subcmd','id', 'includeSettings', 'endpoint','restart','type', pattern_update_dict, name, status]
@param
The parameters for this command are as follows
subcmd list, apply, import , status
id The unique ID that was assigned when the server pattern was created
name name of pattern , this is used for apply
endpoint List of one or more UUIDs for the target servers,If a target is an empty bay,
specify the location ID; otherwise, specify the server UUID
restart When to activate the configurations. This can be one of the following values:
defer - Activate IMM settings but do not restart the server.
immediate - Activate all settings and restart the server immediately.
pending - Manually activate the server profile and restart the server.
type Type of the server, It can be one of the following
flex - Flex System Placeholder chassis empty bays
node
rack
tower
pattern_update_dict dictionary of category_pattern to import.
status check config status for given uuid in endpoint
True
@example
list all configpatterns
rep = configpatterns(con, subcmd = 'list')
list particular configpattern
rep = configpatterns(con, subcmd = 'list', id="53")
list particular configpattern with includeSettings
rep = configpatterns(con, subcmd = 'list', id="53", includeSettings="True")
import SystemInfo category pattern
py_dict = {
"template_type": "SystemInfo",
"template": {
"contact": "contact",
"description": "Pattern created by testAPI",
"location": "location",
"name": "Learned-System_Info-19",
"systemName": {
"autogen": "Disable",
"hyphenChecked": False
},
"type": "SystemInfo",
"uri": "/config/template/61",
"userDefined": True
}
}
import json
my_json = json.dumps(py_dict)
rep = configpatterns(con_lxca, 'import', pattern_update_dict = my_json )
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some of them don't have short options
long_short_key_map = {'id': 'i', 'endpoint': 'e', 'restart': 'r',
'type': 't', 'name': 'n', 'pattern_update_dict': 'p'}
keylist = ['con', 'subcmd', 'id', 'includeSettings', 'endpoint',
'restart', 'type', 'pattern_update_dict', 'name']
optional_keylist = ['con', 'id', 'includeSettings', 'endpoint',
'restart', 'type', 'pattern_update_dict', 'name']
mutually_exclusive_keys = ['id', 'pattern_update_dict']
mandatory_options_list = {'id': [], 'pattern_update_dict': []}
# 'includeSettings': ['id']}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def configprofiles(*args, **kwargs):
'''
@summary:
Use this function to Retrieve information server configuration profiles
that have been defined in the Lenovo XClarity Administrator
run this function as
data_dictionary = configprofiles( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'id', 'name', 'endpoint', 'restart', 'powerdown', 'resetimm', 'resetswitch', 'force']
@param
The parameters for this command are as follows
subcmd list, rename, activate, unassign, delete
id The unique ID that was assigned when the server profile was created
name profile name
endpoint endpoint UUID of the server or location id for flex system
restart restart server to activate profile ( immediate / defer )
options for unassign
powerdown powerdown server
resetimm reset IMM
resetswitch Identifies whether to reset the switch internal port settings to default values
force force unassign operation
@example
rep = configprofiles(con_lxca, 'list')
rep = configprofiles(con_lxca, 'list','3')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some of keys don't have short option
long_short_key_map = {'id': 'i', 'name': 'n', 'endpoint': 'e', 'restart': 'r',
'powerdown': 'p', 'force': 'f'}
keylist = ['con', 'subcmd', 'id', 'name', 'endpoint',
'restart', 'powerdown', 'resetimm', 'resetswitch', 'force']
optional_keylist = ['con', 'id', 'name', 'endpoint',
'restart', 'powerdown', 'resetimm', 'resetswitch', 'force']
mutually_exclusive_keys = []
mandatory_options_list = {'id': [], 'endpoint': ['restart']}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
#remove dummy field added for view
if 'dummy' in out_obj:
out_obj.pop('dummy')
return out_obj
return out_obj
def configtargets(*args, **kwargs):
'''
@summary:
Use this function to get config pattern targets from Lenovo XClarity Administrator
run this function as
data_dictionary = configtargets( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id']
@param
The parameters for this command are as follows
id config target id
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i'}
keylist = ['con', 'id']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def updatepolicy(*args, **kwargs):
'''
@summary:
Use this function to read Firmwar update Policy from Lenovo XClarity Administrator
run this function as
data_dictionary = updatepolicy( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'info','job','uuid',policy','Type']
@param
The parameters for this command are as follows
subcmd list,query, assign , status
info Specifies the type of information to return. This can be one of the following values:
FIRMWARE- Returns information about firmware that is applicable to each managed endpoint
RESULTS- Returns persisted compare result for servers to which a compliance policy is assigned
NAMELIST - Returns the available compliance policies
jobid Job ID of assign compliance policy operation
uuid UUID of the device to which you want to assign the compliance policy
policy Policyname, Name of the compliance-policy to be assigned to device
Type Device type. This can be one of the following values.
CMM - Chassis Management Module
IOSwitch - Flex switch
RACKSWITCH - RackSwitch switch
STORAGE - Lenovo Storage system
SERVER - Compute node or rack server
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'info': 'i', 'job': 'j',
'uuid': 'u', 'policy': 'p', 'type': 't'}
keylist = ['con', 'subcmd', 'info', 'job', 'uuid', 'policy', 'type']
optional_keylist = ['con', 'info', 'job', 'uuid', 'policy', 'type']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def updaterepo(*args, **kwargs):
'''
@summary:
Use this function to get repository info from Lenovo XClarity Administrator
run this function as
data_dictionary = updaterepo( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'key', 'mt', 'scope', 'fixids', 'type']
@param
The parameters for this command are as follows
subcmd The action to take. This can be one of the following values.
query - Get info using key parameter
read - Reloads the repository files. The clears the update information in cache and reads the update file again from the repository.
refresh - Retrieves information about the latest available firmware updates from the Lenovo Support website,
and stores the information to the firmware-updates repository.
acquire - Downloads the specified firmware updates from Lenovo Support website, and stores the updates to the firmware-updates repository.
delete - Deletes the specified firmware updates from the firmware-updates repository.
export.not supported
key Returns the specified type of update. This can be one of the following values.
supportedMts - Returns a list of supported machine types
size - Returns the repository size
lastRefreshed - Returns the timestamp of the last repository refresh
importDir - Returns the import directory for the repository.
publicKeys - Returns the supported signed keys
updates - Returns information about all firmware updates
updatesByMt - Returns information about firmware updates for the specified machine type
updatesByMtByComp - Returns the update component names for the specified machine type
mt comma separated machine types
scope scope of operation
fixids comma separated fixids
type filetype for PUT opertaion
@example
rep = updaterepo(con, "query", k="size")
rep = updaterepo(con, subcmd = "read")
rep = updaterepo(con_lxca, subcmd = "read")
rep = updaterepo(con_lxca, subcmd = "acquire", mt="7903")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'key': 'k', 'mt': 'm',
'scope': 's', 'fixids': 'f', 'type': 't'}
keylist = ['con', 'subcmd', 'key', 'mt', 'scope', 'fixids', 'type']
optional_keylist = ['con', 'key', 'mt', 'scope', 'fixids', 'type']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def updatecomp(*args, **kwargs):
'''
@summary:
Use this function to update firmware of endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = updatecomp( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
USAGE:
keylist = ['con','subcmd', 'query','mode','action','cmm','switch','server','storage','dev_list']
@param
The parameters for this command are as follows
subcmd info|apply
query The data to return. This can be one of the following values.
components - Returns a list of devices and components that can be updated.
status - Returns the status and progress of firmware updates. This is the default value
mode Indicates when to activate the update. This can be one of the following values.
immediate - Uses Immediate Activaton mode when applying firmware updates to the selected endpoints.
delayed - Uses Delayed Activaton mode when applying firmware updates to the selected endpoints.
prioritized. Firmware updates on the baseboard management controller are activated immediately
action The action to take. This can be one of the following values.
apply - Applies the associated firmware to the submitted components.
power - Perform power action on selected endpoint.
cancelApply - Cancels the firmware update request to the selected components.
cmm cmms information
switch switch information
server servers information
storage storages information
dev_list - update all updateable components
For action = apply / applyBundle / cancelApply, Device information should contain following data separated by comma
UUID - UUID of the device
Fixid - Firmware-update ID of the target package to be applied to the component.
If not provided assigned policy would be used.
Component - Component name
For action = power, Device information should contain following data separated by comma
UUID - UUID of the device
powerState - One of the power state values. Possible values per device type are
Server: powerOn, powerOff, powerCycleSoft, powerCycleSoftGraceful, powerOffHardGraceful
Switch: powerOn, powerOff, powerCycleSoft
CMM: reset
Storage:powerOff,powerCycleSoft
@example
Applying firmware update to server
endpoint = "38B1DC62084411E88C7A0A94EF4EC2EF,lnvgy_fw_lxpm_pdl116o-1.40_anyos_noarch,LXPM Diagnostic Software"
rep = updatecomp(con, 'apply', action='apply', mode='immediate', server=endpoint)
Applying firmware update to ThinkSystem SR635 and SR655
endpoint = "<KEY>"
rep = updatecomp(con, 'apply', action='applyBudle', server=endpoint)
Applying firmware update using dev_list json format
dev_list = {
"DeviceList": [{
"ServerList": [{
"UUID": "38B1DC62084411E88C7A0A94EF4EC2EF",
"Components": [{
"Fixid": "lnvgy_fw_lxpm_pdl116o-1.40_anyos_noarch",
"Component": "LXPM Diagnostic Software"
}]
}]
},
{
"CMMList": []
},
{
"SwitchList": []
},
{
"StorageList": []
}]
}
dev_json = json.dumps(dev_list)
rep = updatecomp(con, 'apply', action='apply', mode='immediate', dev_list=dev_json)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'query': 'q', 'mode': 'm', 'action': 'a', 'cmm': 'c', 'switch': 'w', 'server': 's',
'storage': 't', 'dev_list': 'l'}
keylist = ['con', 'subcmd', 'query', 'mode', 'action', 'cmm',
'switch', 'server', 'storage', 'dev_list']
optional_keylist = ['con', 'query', 'mode', 'action',
'cmm', 'switch', 'server', 'storage', 'dev_list']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def users(*args, **kwargs):
'''
@summary:
Use this function to get users data from Lenovo XClarity Administrator
run this function as
data_dictionary = users( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id']
@param
The parameters for this command are as follows
id unique ID of the user to be retrieved
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i'}
keylist = ['con', 'id']
optional_keylist = ['con', 'id']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def ffdc(*args, **kwargs):
'''
@summary:
Use this function to Collect and export specific endpoint data
from Lenovo XClarity Administrator
run this function as
data_dictionary = ffdc( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid']
@param
The parameters for this command are as follows
uuid UUID of the target endpoint this is manadatory parameter
@example
ffdc(con = lxca_con, uuid='UUID of endpoint")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u'}
keylist = ['con', 'uuid']
optional_keylist = ['con', 'uuid']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def log(*args, **kwargs):
'''
@summary:
Use this function to get Lenovo XClarity Administrator LOG information
run this function as
data_dictionary = log( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','lvl']
@param
The parameters for this command are as follows
lvl log level to be set
Possible Log Levels, Please use following values to set desired log level.
DEBUG: Detailed information, typically of interest only when diagnosing problems.
INFO: Confirmation that things are working as expected.
WARNING: An indication that something unexpected happened, or indicative of some problem in the near future.
ERROR: Due to a more serious problem, the software has not been able to perform some function.
CRITICAL: A serious error, indicating that the program itself may be unable to continue running.
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'lvl': 'l'}
keylist = ['con', 'lvl']
optional_keylist = ['con', 'lvl']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def lxcalog(*args, **kwargs):
'''
@summary:
Use this function to get Lenovo XClarity Administrator LOG information
run this function as
data_dictionary = lxcalog( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','filter']
@param
The parameters for this command are as follows
filter filter for the event
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'filter': 'f'}
keylist = ['con', 'filter']
optional_keylist = ['con', 'filter']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def jobs(*args, **kwargs):
'''
@summary:
Use this function to get jobs information from Lenovo XClarity Administrator
run this function as
data_dictionary = jobs( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id','uuid','state','cancel','delete']
@param
The parameters for this command are as follows
id= job id
uuid= uuid of endpoint for which jobs should be retrieved
state= job state to retrieve jobs in specified state.
The state can be one of the following
Pending
Running
Complete
Cancelled
Running_With_Errors
Cancelled_With_Errors
Stopped_With_Error
Interrupted
cancel= cancel job of specified id
delete= delete job of specified id
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i', 'uuid': 'u',
'state': 's', 'cancel': 'c', 'delete': 'd'}
keylist = ['con', 'id', 'uuid', 'state', 'cancel', 'delete']
optional_keylist = ['con', 'id', 'uuid', 'state', 'cancel', 'delete']
mutually_exclusive_keys = ['id', 'cancel', 'delete']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def manifests(*args, **kwargs):
'''
@summary:
Use this function to send solution manifest to and retreive manifests from Lenovo XClarity Administrator
run this function as
data_dictionary = manifests( conn_handle, input_args_dictionary{key,value} )
Where KeyList is as follows
keylist = [id','file']
@param
The parameters for this command are as follows
id= solution id
file= path to manifest file
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i', 'file': 'f'}
keylist = ['con', 'id', 'file']
optional_keylist = ['con', 'file']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
# return out_obj
return True
def tasks(*args, **kwargs):
'''
@summary:
Use this function to get tasks information
run this function as
= tasks( con, data_dictionary)
Where data_dictionary contain input arguments as follows
keylist = ['jobUID','children','action', 'updateList']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
jobUID uuid of job
children result will include children if True
action cancel/update/create
updateList required for update action , string containing list of update
@example
update_list = [{"jobUID":"9","percentage":50}]
str_u = str(update_list)
rep = tasks(con_lxca, a = 'update', u = str_u)
'''
global SHELL_OBJ
con = None
param_dict = {}
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'jobUID': 'j', 'children': 'c',
'action': 'a', 'updateList': 'u', 'template': 't'}
keylist = ['con', 'jobUID', 'children', 'action', 'updateList','template']
optional_keylist = ['con', 'jobUID', 'children', 'action', 'updateList','template']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def resourcegroups(*args, **kwargs):
'''
@summary:
Use this function to Create, modify, delete or read resource group from Lenovo XClarity Administrator
run this function as
data_dictionary = resourcegroups( con_handle,uuid, name, desc, type, solutionVPD, members, criteria )
Where KeyList is as follows
keylist = ['uuid', 'name','description','type','solutionVPD','members','criteria']
@param
The parameters for this command are as follows
uuid= UUID of already created Group
name= Name of Resource Group
desc= Description of Resource Group
type= Type of Resource Group. <{"static", "dynamic", "solution"}>,
solutionVPD": { "id": <UUID string>,
"machineType": <machine type string>,
"model": <model string>,
"serialNumber": <serial number string>,
"manufacturer": <string>
},
"members": [ "uri","uri",....],
"criteria":[]
@example
list all resourcegroups
rep = resourcegroups(con_lxca, 'list')
list criteria properties for dynamic groups
rep = resourcegroups(con_lxca, 'criteriaproperties')
For creating dynamic group
criteria = {
u'parent': u'root',
u'value': None,
u'criteria': [{
u'operator': u'contains',
u'value': u'test',
u'property': u'hostname',
u'id': u'1001',
u'parent': u'lxca_customUI_resourceViews_allGroupsPage_editGroupDynamicPage_2'
}
],
u'operator': u'AND',
u'property': None,
u'id': u'root'
}
criteria_json = json.dumps(criteria)
print criteria_json
rep = resourcegroups(con_lxca, 'create', n="TEST_DYNAMIC", d="TRIAL_GROUP", t='dynamic', c=criteria_json)
Updating dynamic group
rep = resourcegroups(con_lxca, 'update', u="5C5AB42D94C6A719BEF2A375", n="R1_GROUP", d="TRIAL_GROUP modified", t='dynamic', c=criteria_json)
Delete resouregroup
rep = resourcegroups(con_lxca, 'delete', u="5C5BC6EA90F54D074FC7BC0D")
Create solution group supported for api only for uhm
solutionVPD = {
'id': '59A54997C18DCF0594A8CCD1',
'machineType': 'TESTMTM',
'model': 'TESTMODEL',
'serialNumber': 'TESTSERIAL',
'manufacturer': 'LENOVO'}
members = []
criteria = []
rep = resourcegroups(con_lxca, 'create', n="TEST_solution", d="Test_GROUP", t='solution', s=solutionVPD, m=members, c=criteria)
'''
global SHELL_OBJ
con = None
param_dict = {}
command_name = sys._getframe().f_code.co_name
long_short_key_map = {'uuid': 'u', 'name': 'n', 'description': 'd', 'type': 't', 'solutionVPD': 's',
'members': 'm', 'criteria': 'c'}
keylist = ['con', 'subcmd', 'uuid', 'name', 'description',
'type', 'solutionVPD', 'members', 'criteria']
optional_keylist = ['con', 'uuid', 'name', 'description',
'type', 'solutionVPD', 'members', 'criteria']
mutually_exclusive_keys = []
mandatory_options_list = {'uuid': [], 'name': ['type']}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist, mutually_exclusive_keys,
param_dict, *args, **kwargs)
LOGGER.debug("resourcegroups %s" %str(param_dict))
if 'type' in param_dict:
if 'solution' in param_dict['type']:
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
else:
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
else:
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys, param_dict, *args, **kwargs):
'''
this function will create param_dict and con from args and kwargs, param_dict will have only long options for keys,
it will convert short option to long option key and finally validate parameters
:param arglist: list of arguments derived from args
:param keylist: keylist of name of fields
:param mandatory_options_list:
:param optional_keylist:
:param mutually_exclusive_keys:
:param param_dict: append to param_dict
:return: connection object
'''
arglist = list(args)
arglist = arglist[::-1]
con = None
for key in keylist:
short_key = long_short_key_map.get(key)
if (key in list(kwargs.keys())):
param_dict[key] = kwargs[key]
elif key in param_dict:
continue
elif short_key and (short_key in list(kwargs.keys())):
param_dict[key] = kwargs[short_key]
elif len(arglist) >= 1:
value = arglist.pop()
if value != None:
param_dict[key] = value
elif key not in optional_keylist:
LOGGER.error(" Invalid Input args %s is not in optional list %s" % (
key, str(mandatory_options_list)))
raise ValueError("Invalid Input Arguments")
if key == 'con':
if key in param_dict:
con = param_dict.pop(key)
# if not con:
# raise AttributeError("Invalid command invocation: Connection Object missing.")
me_key_found = False
for me_key in list(param_dict.keys()):
# Checking mandatory option_list presence
if me_key in list(mandatory_options_list.keys()):
if not set(mandatory_options_list[me_key]).issubset(set(param_dict.keys())):
LOGGER.error(" Invalid command invocation %s of mandatory list %s is not in arguments parovided" % (
me_key, str(mandatory_options_list)))
raise AttributeError("Invalid command invocation")
# Checking mutually exclusive key presense
if me_key in mutually_exclusive_keys:
if me_key_found:
LOGGER.error(" Invalid command invocation %s of mutual exclusive list %s " % (
me_key, str(mutually_exclusive_keys)))
raise AttributeError("Invalid command invocation")
me_key_found = True
if not set(keylist + list(long_short_key_map.values())).issuperset(set(kwargs.keys())):
LOGGER.error(" Invalid Input args: %s unsupported argument passed"
% list(set(kwargs.keys()).difference(set(keylist + long_short_key_map.values()))))
raise ValueError("Invalid Input Arguments")
return con
def osimages(*args, **kwargs):
'''
@summary:
Use this function to retrieve information about, delete, and import OS images,
OS-image profiles, device driver, and boot-options files.
data_dictionary = osimages(input_args, key=values )
Where KeyList is as follows
keylist = [con, subcmd, o]
@param
subcmd
list Retrieve information about all osimages
globalsettings Retrieve or modify global operating-system deployment
settings. Global settings serve as defaults settings
when operating systems are deployed.
hostsettings Retrieve information about the network and storage
settings for all servers, and create or modify the
network and storage settings for one or more servers
hostplatforms Retrieve information about the host platforms and
deploy operating-system images to the host platforms
as a job
import Import OS images and scripts from remote server to
LXCA
remotefileservers Retrieve information about all remote file-server
profiles or to create or modify a remote file-server
profile
delete Delete osimages from LXCA
@example
list all osimages info
osimages(con, subcmd='list')
delete osimages with ids
osimages(con, subcmd='delete', id='i1,i2')
rep = osimages(con_lxca, 'delete', i='20190131054310_trail.py')
List all globalsettings
osimages(con, subcmd = 'globalsettings')
Set Linux default passw0rd using globalsettings
change_linux_password = {
"activeDirectory": {
"allDomains": [],
"defaultDomain": "labs.lenovo.com"
},
"credentials": [{
"name": "root",
"password": "<PASSWORD>",
"passwordChanged": True,
"type": "LINUX"
}, {
"type": "WINDOWS",
"name": "Administrator",
"password": <PASSWORD>,
"passwordChanged": False
}
],
"ipAssignment": "dhcpv4",
"isVLANMode": False,
"licenseKeys": {
"win2012r1": {
"dataCenterLicenseKey": "",
"standardLicenseKey": "",
},
"win2012r2": {
"dataCenterLicenseKey": "",
"standardLicenseKey": ""
},
"win2016r1": {
"dataCenterLicenseKey": "",
"standardLicenseKey": ""
},
"win2019r1": {
"dataCenterLicenseKey": "",
"standardLicenseKey": ""
}
}
}
json_string = json.dumps(change_linux_password)
print json_string
rep = osimages(con_lxca, subcmd = 'globalsettings', osimages_dict = json_string)
Remote file server list
rep = osimages(con_lxca, subcmd = 'remotefileservers')
Create remote file server entry for ftp server
rep = osimages(con_lxca, subcmd = 'remotefileservers',
osimages_dict ='{"username":"guest", "password":"<PASSWORD>",
"protocol":"FTP", "port": 21, "address":"10.243.2.207", "displayName": "new_ftp_207" }')
Update remote file server
rep = osimages(con_lxca, subcmd = 'remotefileservers',
osimages_dict ='{"putid": "1", "protocol":"FTP", "port": 21,
"address":"10.243.2.207", "displayName": "new_ftp_207" }')
Delete remote file server
rep = osimages(con_lxca, subcmd = 'remotefileservers', osimages_dict ='{"deleteid": "1"}')
Import local files of imagetype (UNATTEND, CUSTOM_CONFIG, SCRIPT, OS)
rep = osimages(con_lxca, subcmd='import', imagetype='UNATTEND')
print rep
file_dict = { "jobId":rep["jobId"], "imageName":"SLES", "os":"sles", "description":"SLES_config_file", "file": "/home/naval/sles_unattended.xml" }
rep = osimages(con_lxca, subcmd='import', imagetype='UNATTEND', osimages_dict = json.dumps(file_dict))
import BUNDLE and BUNDLESIG done with single jobid and imagename should be same as basename of files
rep = osimages(con_lxca, subcmd='import', imagetype='BUNDLE')
file_dict = {"jobId":rep["jobId"], "imageName":"bundle_win2016_20180926153236.zip", "file": "/home/naval/osimage_test/bundle_win2016_20180926153236.zip"}
rep1 = osimages(con_lxca, subcmd='import', imagetype='BUNDLE', osimages_dict = json.dumps(file_dict))
file_dict = { "jobId":rep["jobId"], "imageName":"bundle_win2016_20180926153236.zip.asc", "file": "/home/naval/osimage_test/bundle_win2016_20180926153236.zip.asc"}
rep2 = osimages(con_lxca, subcmd='import', imagetype='BUNDLESIG', osimages_dict = json.dumps(file_dict))
get all hostSettings
rep = osimages(con_lxca, 'hostsettings')
create hostsettings entry
host_settings_dict = {u'hosts': [{u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A1445C6FDBAA11E6A87F86E06E3AFFFF', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'nodeundefined', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.9.79'}}, {u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A122FB03FF4011E68D9BA32E3A66DDDD', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'proton1', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.9.87'}}]}
host_settings_json = json.dumps(host_settings_dict)
rep = osimages(con_lxca, 'hostsettings', action='create', osimages_dict = host_settings_json)
update hostSettings entry
host_settings_dict = {u'hosts': [{u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A1445C6FDBAA11E6A87F86E06E3AFFFF', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'nodeundefined', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.25.25'}}, {u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A122FB03FF4011E68D9BA32E3A66DDDD', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'proton1', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.26.26'}}]}
host_settings_json = json.dumps(host_settings_dict)
rep = osimages(con_lxca, 'hostsettings', action='update', osimages_dict = host_settings_json)
'''
global SHELL_OBJ
# #con = None
# param_dict = {}
# command_name = sys._getframe().f_code.co_name
#
# # con = kwargs.get('con')
# # if not con:
# # raise ValueError("Invalid Input Arguments")
#
# logger.info(" osimages got kwargs %s " % str(kwargs))
# if args:
# kwargs['osimages_info'] = args[0]
# #param_dict = (args, kwargs)
# logger.info(" osimages got param_dict %s " % str(kwargs))
# # handle_input_dict only takes param_dict as input argument
# ch = shell_obj.handle_input_dict(command_name, con, kwargs)
# return ch
con = None
param_dict = {}
param_dict = kwargs
kwargs = {} # this is required to avoid invalid argument error in _validate_param
command_name = sys._getframe().f_code.co_name
long_short_key_map = {}
keylist = ['con', 'subcmd']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def managementserver(*args, **kwargs):
'''
@summary:
Use this function to get repository info from Lenovo XClarity Administrator
run this function as
data_dictionary = managementserver( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'key', 'fixids', 'type', 'action', 'files','jobid']
@param
The parameters for this command are as follows
subcmd
query Retrieve information about all updates in the
management-server updates repository
query_fixids Retrieve information or the readme or change history
file for a specific update in the management-server
updates repository
apply install a management-server update.
acquire Downloads the specified firmware updates from Lenovo
XClarity Support website, and stores the updates to
the updates repository
refresh Retrieves information about the latest available
firmware updates from the Lenovo XClarity Support
website, and stores the information to the updates
repository
delete Deletes the specified fixids - removeMetadata not supported
import Import files to management server
key Returns the specified type of update. This can be one of the following values.
all. Returns all information. This is the default value.
currentVersion. Returns the current version of Lenovo XClarity Administrator.
history. Returns the history of management-server updates.
importDir. Returns the directory for the management-server updates repository.
size. Returns the repository size (in bytes).
updates. Returns information about all updates packages.
updatedDate. Returns the date when the last update was performed.
fixids comma separated fixids
type Type for Get with fixids
changeHistory. Returns the change-history file for the specified management-server update.
readme. Returns the readme file for the specified management-server update
jobid jobid for import
files files to be imported with fullpath and comma separated
@example
TO import files
rep = managementserver(con_lxca, subcmd='import', files='/home/naval/updates/updates/lnvgy_sw_lxca_thinksystemrepo1-1.3.2_anyos_noarch.txt')
rep = managementserver(con_lxca, subcmd='import', j=rep['jobid'], files='/home/naval/updates/updates/lnvgy_sw_lxca_thinksystemrepo1-1.3.2_anyos_noarch.txt')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'key': 'k', 'fixids': 'f',
'type': 't', 'jobid': 'j'}
keylist = ['con', 'subcmd', 'key', 'fixids', 'type', 'files', 'jobid']
optional_keylist = ['con', 'key', 'fixids',
'type', 'files', 'jobid']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
#remove dummy field added for view
if 'dummy' in out_obj:
out_obj.pop('dummy')
return out_obj
def rules(*args, **kwargs):
'''
@summary:
Use this function to get and set complaince rules on Lenovo XClarity Administrator
run this function as
data_dictionary = managementserver( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'id', 'rule']
@param
The parameters for this command are as follows
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'id': 'i', 'rule': 'r'}
keylist = ['con', 'id', 'rule']
optional_keylist = ['con', 'id', 'rule']
mutually_exclusive_keys = ['id', 'rule']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
def compositeResults(*args, **kwargs):
'''
@summary:
Use this function to get and set complaince rules on Lenovo XClarity Administrator
run this function as
data_dictionary = managementserver( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'id', ,'query_solutionGroups', 'solutionGroups','targetResources','all_rules']
@param
The parameters for this command are as follows
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'id': 'i', 'query_solutionGroups': 'q',
'solutionGroups': 's', 'targetResources': 't', 'all_rules': 'a'}
keylist = ['con', 'id', 'query_solutionGroups',
'solutionGroups', 'targetResources', 'all_rules']
optional_keylist = ['con', 'id', 'query_solutionGroups',
'solutionGroups', 'targetResources', 'all_rules']
mutually_exclusive_keys = ['id', 'query_solutionGroups',
'solutionGroups', 'targetResources', 'all_rules']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
def storedcredentials(*args, **kwargs):
'''
@summary:
Use this function to get and set complaince rules on Lenovo XClarity Administrator
run this function as
data_dictionary = storedcredentials( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'id', 'user_name', 'description', 'password', 'delete_id']
@param
The parameters for this command are as follows
id Stored credential id of stored credential
user_name user name
password password
description description of user credential
delete_id id to be deleted
@example
rep = storedcredentials(con1)
rep = storedcredentials(con1, id='955')
rep = storedcredentials(con1,u='admin1', p='admin1', d='description of stored credentials')
rep = storedcredentials(con1, delete_id='954')
rep = storedcredentials(con1,i='955', u='admin1', p='admin1', d='description of stored credentials for admin')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'id': 'i', 'user_name': 'u',
'description': 'd', 'password': 'p'}
keylist = ['con', 'id', 'user_name',
'description', 'password', 'delete_id']
optional_keylist = ['con', 'id', 'user_name',
'description', 'password', 'delete_id']
mutually_exclusive_keys = ['id', 'delete_id']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def license(*args, **kwargs):
'''
@summary:
Use this function to retrieve information about warnings regarding non-compliance of installed Lenovo XClarity Administrator license
run this function as
license(con)
@param
The parameter for this command is as follows
con Connection Object to Lenovo XClarity Administrator
@example
license(con)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {}
keylist = ['con']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
| 2.046875 | 2 |
recipes/recipe_modules/recipe_autoroller/test_api.py | xinghun61/infra | 2 | 12760612 | <filename>recipes/recipe_modules/recipe_autoroller/test_api.py<gh_stars>1-10
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
from recipe_engine import recipe_test_api
class RecipeAutorollerTestApi(recipe_test_api.RecipeTestApi):
_TBR_EMAILS = ('<EMAIL>', '<EMAIL>')
_EXTRA_REVIEWERS = ('<EMAIL>', '<EMAIL>',
'<EMAIL>')
def repo_spec(self, tbr_emails=_TBR_EMAILS, extra_reviewers=_EXTRA_REVIEWERS,
disable_reason='', trivial_commit=True, trivial_dryrun=False,
nontrivial_dryrun=True, include_autoroll_options=True):
spec = {
'api_version': 2,
'deps': {
'recipe_engine': {},
},
}
if include_autoroll_options:
spec['autoroll_recipe_options'] = {
'trivial': {
'tbr_emails': list(tbr_emails),
'automatic_commit': trivial_commit,
'dry_run': trivial_dryrun,
},
'nontrivial': {
'extra_reviewer_emails': list(extra_reviewers),
'automatic_commit_dry_run': nontrivial_dryrun,
},
'disable_reason': disable_reason,
}
return spec
def roll_data(self, project, spec=None, success=True, trivial=True,
empty=False):
"""Returns mock roll and recipes.cfg data for |project|."""
if spec is None:
spec = self.repo_spec()
if empty:
success = False
ret = self.empty_test_data() + self.recipe_cfg(project, spec)
if spec.get('autoroll_recipe_options', {}).get('disable_reason'):
return ret
picked_roll_details = {
'commit_infos': {
'recipe_engine': [
{
'author_email': '<EMAIL>',
'message_lines': [
'some commit message',
'R=<EMAIL>,<EMAIL>,invalid1,invalid<EMAIL>',
'BUG=123,456',
],
'revision': '123abc',
},
],
},
'spec': spec,
}
roll_result = {
'success': success,
'trivial': trivial if success else None,
'picked_roll_details': picked_roll_details if success else None,
}
roll_result['rejected_candidates_count'] = 0
if empty:
roll_result['roll_details'] = []
else:
roll_result['roll_details'] = [picked_roll_details]
if not success:
roll_result['rejected_candidates_count'] = 1
ret += self.step_data('%s.roll' % project, self.m.json.output(roll_result))
return ret
def repo_data(self, project, trivial, status, timestamp):
return (
self.override_step_data(
'%s.gsutil repo_state' % project,
self.m.raw_io.stream_output(
json.dumps({
'issue': '123456789',
'issue_url': 'https://codereview.chromium.org/123456789',
'trivial': trivial,
'last_roll_ts_utc': timestamp,
}),
stream='stdout'),
self.m.raw_io.stream_output('', stream='stderr')) +
self.step_data('%s.git cl status' % project,
self.m.raw_io.stream_output(status)))
def recipe_cfg(self, project, spec=None):
"""Returns mock recipes.cfg data (only) for |project|.
This is used for tests which abort between the 'read recipes.cfg' step and
the 'roll' step (e.g. which read repo state and decide to quit early).
For "normal" test runs, you'll want to use roll_data() from this
RecipeTestApi, which includes this step data automatically.
"""
if spec is None:
spec = self.repo_spec()
return self.override_step_data(
'%s.read recipes.cfg' % project, self.m.json.output(spec)
)
| 2 | 2 |
examples/bomb_planted.py | paulnbrd/csgogsi | 0 | 12760613 | <gh_stars>0
import csgogsi
server = csgogsi.Server() # Default is host: 127.0.0.1, port: 3000
csgogsi.disable_log()
@server.add_observer("current_round", "bomb") # On bomb state changed
def on_bomb_state_changed():
print("Bomb sate changed ! Now {}".format(server.payload.current_round.bomb))
server.disable_on_start_event_triggering = True
server.run()
print("This will never be printed because the csgogsi server is blocking the thread (blocking argument is True by "
"default)")
| 2.375 | 2 |
phonenumbers/data/region_GA.py | ayushgoel/FixGoogleContacts | 2 | 12760614 | """Auto-generated file, do not edit by hand. GA metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_GA = PhoneMetadata(id='GA', country_code=241, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='0\\d{7}', possible_number_pattern='\\d{8}'),
fixed_line=PhoneNumberDesc(national_number_pattern='01\\d{6}', possible_number_pattern='\\d{8}', example_number='01441234'),
mobile=PhoneNumberDesc(national_number_pattern='0[2-7]\\d{6}', possible_number_pattern='\\d{8}', example_number='06031234'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='1730|18|13\\d{2}', possible_number_pattern='\\d{2,4}', example_number='1730'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_code=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
number_format=[NumberFormat(pattern='(0\\d)(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4')],
leading_zero_possible=True)
| 1.679688 | 2 |
app/api/__init__.py | TestAuto2018/TestAuto | 249 | 12760615 | <filename>app/api/__init__.py
# -*- coding: utf-8 -*-
__author__ = "苦叶子"
"""
公众号: 开源优测
Email: <EMAIL>
"""
from flask import Blueprint
from flask_restful import Api
api_bp = Blueprint('api', __name__)
api = Api(api_bp)
from .auth import Auth
api.add_resource(Auth, "/auth/")
from .product import Product
api.add_resource(Product, "/product/")
from .project import Project
api.add_resource(Project, "/project/")
from .suite import Suite
api.add_resource(Suite, "/suite/")
from .object import Object
api.add_resource(Object, "/object/")
from .case import Case
api.add_resource(Case, "/case/")
from .step import Step
api.add_resource(Step, "/step/")
from .var import Var
api.add_resource(Var, "/var/")
from .keyword import Keyword
api.add_resource(Keyword, "/keyword/")
from .help import Help
api.add_resource(Help, "/help/")
from .task import Task
api.add_resource(Task, "/task/")
from .trigger import Triggers
api.add_resource(Triggers, "/trigger/")
from .stats import Stats
api.add_resource(Stats, "/stats/")
from .user import Users
api.add_resource(Users, "/user/")
from .role import Roles
api.add_resource(Roles, "/role/")
from .user_keyword import UserKeywordSuite, UserKeyword
api.add_resource(UserKeywordSuite, "/user_keyword_suite/")
api.add_resource(UserKeyword, "/user_keyword/")
| 1.992188 | 2 |
anomaly_simulation/anomalies/__init__.py | pilijevski/microservices_anomalies | 1 | 12760616 | <filename>anomaly_simulation/anomalies/__init__.py
from .packet_loss import PacketLossAnomaly
from .delay import DelayAnomaly
from .pause import FreezeContainerAnomaly
anomaly_dict = {"loss": PacketLossAnomaly, "delay": DelayAnomaly, "pause": FreezeContainerAnomaly}
| 1.601563 | 2 |
Script/Commands/Messages/Creators/refresh_dbl.py | AIDRI/Clash-Of-Clans-Discord-Bot | 0 | 12760617 | <reponame>AIDRI/Clash-Of-Clans-Discord-Bot
from Script.import_emojis import Emojis
from Script.Clients.top_gg import Dbl_client
from Script.Clients.discord import Clash_info
async def refresh_dbl(ctx):
await Dbl_client.update_stats(len(Clash_info.guilds))
await ctx.send(str(Emojis["Yes"]) + " (https://top.gg/bot/704688212832026724)")
return
| 1.992188 | 2 |
main.py | LinuxKid21/Discord-Grammar-Nazi | 1 | 12760618 | import discord
from discord.ext import commands
import asyncio
# configuration files
import configparser
import sys, traceback
# grammar check
import language_check
grammar_en = language_check.LanguageTool('en-US')
# https://www.geeksforgeeks.org/get-synonymsantonyms-nltk-wordnet-python/
import nltk
from nltk.corpus import wordnet
import re
from PIL import ImageFont, ImageDraw, Image
# load configuration to get around hard coded tokens
config = configparser.ConfigParser()
with open('config.ini') as config_file:
config.read_file(config_file)
# startup stuff for debugging
print('using discordpy version', discord.__version__)
client = discord.Client()
@client.event
async def on_ready():
# print some stuff when the bot goes online
print('Logged in ' + str(client.user.name) + ' - ' +
str(client.user.id) + '\n' + 'Version ' + str(discord.__version__))
def handle_grammar(message):
matches = grammar_en.check(message.content)
sentences = []
for match in matches:
bad_word = message.content[match.offset:match.offset + match.errorlength]
if(match.ruleId == 'UPPERCASE_SENTENCE_START'):
sentences.append('Wow. I can\'t believe you didn\'t start your sentence with a capital letter.')
elif(match.ruleId == 'MORFOLOGIK_RULE_EN_US'): # spelling errors
if(bad_word[0] == '@'):
continue # don't over react when the @mention someone!
sentences.append('Ugh. The word \'' + bad_word + '\' is not spelled correctly.')
elif(match.ruleId == 'IT_VBZ'): # this are a problem <- fixes that sort of issue
sentences.append('Subject verb disagreement! Did you mean to use \'' + match.replacements[0] + '\' instead? (hint: you did)')
elif(match.ruleId in ['IT_IS', 'EN_CONTRACTION_SPELLING']): # its when should be it's and that when should be that's
sentences.append('Missing an apostrophe here: \'' + bad_word + '\'. Fix it.')
elif(match.ruleId == 'PROGRESSIVE_VERBS'): # ???
sentences.append('This usage is not usually used in the progressive form.' +
'I know you don\'t know what that means so here it is: http://lmgtfy.com/?q=progressive+form')
elif(match.ruleId == 'PERS_PRONOUN_AGREEMENT_SENT_START'): # I are should be I am
sentences.append('really? That verb does not agree with \'' + bad_word + '\'. You clearly meant \'' + match.replacements[0] + '\'')
else:
sentences.append([match])
punctuation = ['.', '!', '?']
# looks like the following:
# ['This is a sentence' '.' 'Okay' '?' '']
split = re.split('(\.|\!|\?)', message.content)
if(split[-1] == ''):
split = split[:-1] # chop off last empty if ending in punctuation
img = ''
if(split[-1] not in punctuation):
smaller_text = split[-1][-15:]
font = ImageFont.truetype('DejaVuSans.ttf', 75)
im = Image.open('NoteBack.png')
d = ImageDraw.Draw(im)
d.text((200,560), smaller_text, fill=(0, 0, 0), font=font)
pos = d.textsize(smaller_text, font)
arrow = Image.open('arrow.png')
im.paste(arrow, (200+pos[0], 560+pos[1]), arrow)
im.save('tmp.jpg')
img = 'tmp.jpg'
sentences.append('!?!?!?!?!? NO PUNCTUATION? Look at where you missed it!')
return sentences, img
def handle_thesaurus(message):
my_string_1 = message.content.split("Thesaurus ", 1)[1]
end = my_string_1.find(" ")
my_string_2 = my_string_1
if end != -1:
my_string_2 = my_string_1[0:end]
if my_string_2 == "":
my_string_2 = "nothing"
# get thesaurus definitions:
synonyms = []
for syn in wordnet.synsets(my_string_2):
for l in syn.lemmas():
if l.name() != my_string_2:
synonyms.append(l.name())
if len(synonyms) == 0:
ret_string = my_string_2 + " isn't a word i know, which probably means you're wrong. Goof."
return retstring
synonyms_2 = ', '.join(synonyms)
my_string_3 = "Here, dummy:\n\n\'" + my_string_2 + "\' synonyms: "
for i in synonyms_2:
my_string_3 = my_string_3 + i
return my_string_3
@client.event
async def on_message(message):
if(message.author.bot):
return
if "Thesaurus " in message.content:
my_string = handle_thesaurus(message)
await message.channel.send(my_string)
else:
sentences, img_loc = handle_grammar(message)
for sentence in sentences:
await message.channel.send(sentence)
if(img_loc != ''):
await message.channel.send(img_loc, file=discord.File(img_loc))
# now actually connect the bot
client.run(config.get(section='Configuration', option='connection_token'),
bot=True, reconnect=True)
| 2.609375 | 3 |
Python/Skrypty/Python - Szkolenie_11-2015/przyklady_rec_python/PythonExample/descriptors2.py | Elzei/show-off | 0 | 12760619 | <reponame>Elzei/show-off<gh_stars>0
class MyClass(object):
@property
def x(self):
return self.__x
@x.getter
def x(self):
print "Pobranie atrybutu x"
return self.__x
@x.setter
def x(self, value):
print "Ustawienie x na", value
self.__x = value
@x.deleter
def x(self):
print "Usuniecie x"
del self.__x
a = MyClass()
a.x = 999
w = a.x
del a.x
| 2.859375 | 3 |
students/k33402/Sholomov_Dan/lab34/lab3/order/admin.py | heidamn/ITMO_ICT_WebDevelopment_2020-2021 | 0 | 12760620 | from django.contrib import admin
from .models import Order, OrderedItem
@admin.register(Order)
class ItemAdmin(admin.ModelAdmin):
pass
@admin.register(OrderedItem)
class ItemAdmin(admin.ModelAdmin):
pass
| 1.546875 | 2 |
myapp/setup.py | clozinski/grok.install | 1 | 12760621 | from setuptools import setup, find_packages
setup(
name='myapp',
version='0.1',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/clozinski/grok.install',
description="""\
Simple Part of the Install package.
""",
packages=find_packages('.'),
package_dir={'': '.'},
include_package_data=True,
zip_safe=False,
license='ZPL',
install_requires=['setuptools',
'grok',
'grokui.admin',
'grokcore.startup',
'grokcore.message',
],
entry_points="""
[console_scripts]
interactive_debug_prompt = grokcore.startup.startup:interactive_debug_prompt
[paste.app_factory]
main = grokcore.startup:application_factory""",
)
| 1.421875 | 1 |
example/scripts/002_settings.py | kaalam/kpipe | 0 | 12760622 | <reponame>kaalam/kpipe
class Settings():
""" This is just variables stored as properties of a class.
It still needs (allthough void) .inputs(), .output() and .build() and defines whatever else is handy.
(It is nice, as in this case, to separate constants from methods.)
"""
def inputs(self):
return None
def output(self):
return None
def build(self):
pass
query = 'felines'
| 2.15625 | 2 |
mrgeo-services/mrgeo-services-core/src/main/scripts/wps-shell-example.py | ngageoint/mrgeo | 198 | 12760623 | #!/usr/bin/python
import sys
import time
# read all the input values into a dictionary for easy access.
args = {}
for v in sys.argv[1:]:
s = v.split("=")
args[s[0]] = s[1]
# put an artificial pause to simulate an expensive operation
for i in range(1, 10):
print "progress:" + str(i * 10)
time.sleep(1)
# write out the results.
print "summary:" + str(args)
print "output:" + args['output']
| 3.15625 | 3 |
shfl/federated_aggregator/norm_clip_aggregators.py | SSSuperTIan/Sherpa.ai-Federated-Learning-Framework | 1 | 12760624 | <reponame>SSSuperTIan/Sherpa.ai-Federated-Learning-Framework
import numpy as np
from numpy import linalg as LA
from shfl.federated_aggregator import FedAvgAggregator
from multipledispatch import dispatch
from multipledispatch.variadic import Variadic
class NormClipAggregator(FedAvgAggregator):
"""
Implementation of Average Federated Aggregator with Clipped Norm.
It clips the norm of the client's updates and averages them.
# Arguments:
clip: value used to clip each client's update
"""
def __init__(self, clip):
self._clip = clip
def _serialize(self, data):
"""
It turns a list of multidimensional arrays into a list of one-dimensional arrays
# Arguments:
data: list of multidimensional arrays
"""
data = [np.array(j) for j in data]
self._data_shape_list = [j.shape for j in data]
serialized_data = [j.ravel() for j in data]
serialized_data = np.hstack(serialized_data)
return serialized_data
def _deserialize(self, data):
"""
It turns a list of one-dimensional arrays into a list of multidimensional arrays.
The multidimensional shape is stored when it is serialized
# Arguments:
data: list of one-dimensional arrays
"""
firstInd = 0
deserialized_data = []
for shp in self._data_shape_list:
if len(shp) > 1:
shift = np.prod(shp)
elif len(shp) == 0:
shift = 1
else:
shift = shp[0]
tmp_array = data[firstInd:firstInd+shift]
tmp_array = tmp_array.reshape(shp)
deserialized_data.append(tmp_array)
firstInd += shift
return deserialized_data
@dispatch(Variadic[np.ndarray, np.ScalarType])
def _aggregate(self, *params):
"""Aggregation of arrays"""
clients_params = np.array(params)
for i, v in enumerate(clients_params):
norm = LA.norm(v)
clients_params[i] = np.multiply(v, min(1, self._clip/norm))
return np.mean(clients_params, axis=0)
@dispatch(Variadic[list])
def _aggregate(self, *params):
"""Aggregation of (nested) lists of arrays"""
serialized_params = np.array([self._serialize(client) for client in params])
serialized_aggregation = self._aggregate(*serialized_params)
aggregated_weights = self._deserialize(serialized_aggregation)
return aggregated_weights
class CDPAggregator(NormClipAggregator):
"""
Implementation of Average Federated Aggregator with Differential Privacy also known \
as Central Differential Privacy.
It clips the norm of the client's updates, averages them and adds gaussian noise \
calibrated to noise_mult*clip/number_of_clients.
# Arguments:
clip: value used to clip each client's update.
noise_mult: quantity of noise to add. To ensure proper Differential Privacy, \
it must be calibrated according to some composition theorem.
"""
def __init__(self, clip, noise_mult):
super().__init__(clip=clip)
self._noise_mult = noise_mult
@dispatch(Variadic[np.ndarray, np.ScalarType])
def _aggregate(self, *params):
"""Aggregation of arrays with gaussian noise calibrated to noise_mult*clip/number_of_clients"""
clients_params = np.array(params)
mean = super()._aggregate(*params)
noise = np.random.normal(loc=0.0, scale=self._noise_mult*self._clip/len(clients_params), size=mean.shape)
return mean + noise
@dispatch(Variadic[list])
def _aggregate(self, *params):
return super()._aggregate(*params)
class WeakDPAggregator(CDPAggregator):
"""
Implementation of Average Federated Aggregator with Weak Differential Privacy.
It clips the norm of the client's updates, averages them and adds gaussian noise \
calibrated to 0.025*clip/number_of_clients.
The noise multiplier 0.025 is not big enough to ensure proper Differential Privacy.
# Arguments:
clip: value used to clip each client's update
"""
def __init__(self, clip, noise_mult=0.025):
super().__init__(clip=clip, noise_mult = noise_mult)
@dispatch(Variadic[np.ndarray, np.ScalarType])
def _aggregate(self, *params):
return super()._aggregate(*params)
@dispatch(Variadic[list])
def _aggregate(self, *params):
return super()._aggregate(*params)
| 2.5 | 2 |
DjangoGraphs/migrations/0004_type_unit.py | vabene1111/DjangoGraphs | 7 | 12760625 | # Generated by Django 2.2.3 on 2019-07-04 07:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('DjangoGraphs', '0003_auto_20190704_0628'),
]
operations = [
migrations.AddField(
model_name='type',
name='unit',
field=models.CharField(blank='', default='', max_length=4),
),
]
| 1.53125 | 2 |
examples/docs_snippets/docs_snippets/concepts/assets/source_asset.py | kstennettlull/dagster | 0 | 12760626 | <gh_stars>0
# pylint: disable=redefined-outer-name
# start_marker
from dagster import AssetGroup, AssetKey, SourceAsset, asset
my_source_asset = SourceAsset(key=AssetKey("my_source_asset"))
@asset
def my_derived_asset(my_source_asset):
return my_source_asset + [4]
asset_group = AssetGroup(assets=[my_derived_asset], source_assets=[my_source_asset])
# end_marker
| 1.929688 | 2 |
tests/retention/test_should_perform_retention_predicate.py | Morgenz/bbq | 41 | 12760627 | import unittest
from mock import patch
from src.backup.datastore.Backup import Backup
from src.retention.should_perform_retention_predicate import \
ShouldPerformRetentionPredicate
from src.commons.error_reporting import ErrorReporting
class TestShouldPerformRetentionPredicate(unittest.TestCase):
def setUp(self):
patch(
'src.commons.config.environment.Environment.version_id',
return_value='dummy_version'
).start()
patch('googleapiclient.discovery.build').start()
patch('oauth2client.client.GoogleCredentials.get_application_default') \
.start()
def tearDown(self):
patch.stopall()
def test_should_return_true_for_valid_backup_list(self):
# given
backups = self.__create_valid_backups()
# when
result = ShouldPerformRetentionPredicate.test(backups)
# then
self.assertEqual(True, result)
def test_should_return_false_for_empty_list(self):
# given
empty_list = []
# when
result = ShouldPerformRetentionPredicate.test(empty_list)
# then
self.assertEqual(False, result)
@patch.object(ErrorReporting, '_create_http')
@patch.object(ErrorReporting, 'report')
def test_should_return_false_and_trigger_error_reporting_if_there_are_multiple_backups_referencing_same_table_in_bq(
self, report, _):
# given
backups = \
self.__create_backups_with_part_of_referencing_same_table_in_bq()
# when
result = ShouldPerformRetentionPredicate.test(backups)
# then
self.assertEqual(False, result)
report.assert_called_once()
@staticmethod
def __create_valid_backups():
backup_1 = Backup(table_id='table_id_1', dataset_id='dataset_id_1')
backup_2 = Backup(table_id='table_id_2', dataset_id='dataset_id_1')
backup_3 = Backup(table_id='table_id_3', dataset_id='dataset_id_1')
backup_4 = Backup(table_id='table_id_4', dataset_id='dataset_id_1')
return [backup_1, backup_2, backup_3, backup_4]
@staticmethod
def __create_backups_with_part_of_referencing_same_table_in_bq():
backup_1 = Backup(table_id='table_id_1', dataset_id='dataset_id_1')
backup_2 = Backup(table_id='table_id_2', dataset_id='dataset_id_1')
backup_3 = Backup(table_id='table_id_3', dataset_id='dataset_id_1')
backup_4 = Backup(table_id='table_id_3', dataset_id='dataset_id_1')
return [backup_4, backup_2, backup_3, backup_1]
| 2.40625 | 2 |
insert-delete-getrandom-o1.py | ArCan314/leetcode | 0 | 12760628 | from typing import Dict, List, Set
from random import choice
class RandomizedSet:
def __init__(self):
self.vals: List[int] = []
self.indices: Dict[int, int] = {}
def insert(self, val: int) -> bool:
if val in self.indices.keys():
return False
self.indices[val] = len(self.vals)
self.vals.append(val)
return True
def remove(self, val: int) -> bool:
if val not in self.indices.keys():
return False
ind = self.indices[val]
self.indices[self.vals[-1]] = ind
self.vals[ind], self.vals[-1] = self.vals[-1], self.vals[ind]
self.vals.pop()
self.indices.pop(val)
return True
def getRandom(self) -> int:
return choice(self.vals)
# Your RandomizedSet object will be instantiated and called as such:
# obj = RandomizedSet()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
| 3.734375 | 4 |
pywinrt/bleak_winrt/windows/devices/bluetooth/genericattributeprofile/__init__.py | langarmjonny/bleak-winrt | 0 | 12760629 | <filename>pywinrt/bleak_winrt/windows/devices/bluetooth/genericattributeprofile/__init__.py
# WARNING: Please don't edit this file. It was generated by Python/WinRT v1.0.0-alpha.5
import enum
import bleak_winrt
_ns_module = bleak_winrt._import_ns_module("Windows.Devices.Bluetooth.GenericAttributeProfile")
try:
import bleak_winrt.windows.devices.bluetooth
except Exception:
pass
try:
import bleak_winrt.windows.devices.enumeration
except Exception:
pass
try:
import bleak_winrt.windows.foundation
except Exception:
pass
try:
import bleak_winrt.windows.foundation.collections
except Exception:
pass
try:
import bleak_winrt.windows.storage.streams
except Exception:
pass
class GattCharacteristicProperties(enum.IntFlag):
NONE = 0
BROADCAST = 0x1
READ = 0x2
WRITE_WITHOUT_RESPONSE = 0x4
WRITE = 0x8
NOTIFY = 0x10
INDICATE = 0x20
AUTHENTICATED_SIGNED_WRITES = 0x40
EXTENDED_PROPERTIES = 0x80
RELIABLE_WRITES = 0x100
WRITABLE_AUXILIARIES = 0x200
class GattClientCharacteristicConfigurationDescriptorValue(enum.IntEnum):
NONE = 0
NOTIFY = 1
INDICATE = 2
class GattCommunicationStatus(enum.IntEnum):
SUCCESS = 0
UNREACHABLE = 1
PROTOCOL_ERROR = 2
ACCESS_DENIED = 3
class GattOpenStatus(enum.IntEnum):
UNSPECIFIED = 0
SUCCESS = 1
ALREADY_OPENED = 2
NOT_FOUND = 3
SHARING_VIOLATION = 4
ACCESS_DENIED = 5
class GattProtectionLevel(enum.IntEnum):
PLAIN = 0
AUTHENTICATION_REQUIRED = 1
ENCRYPTION_REQUIRED = 2
ENCRYPTION_AND_AUTHENTICATION_REQUIRED = 3
class GattRequestState(enum.IntEnum):
PENDING = 0
COMPLETED = 1
CANCELED = 2
class GattServiceProviderAdvertisementStatus(enum.IntEnum):
CREATED = 0
STOPPED = 1
STARTED = 2
ABORTED = 3
STARTED_WITHOUT_ALL_ADVERTISEMENT_DATA = 4
class GattSessionStatus(enum.IntEnum):
CLOSED = 0
ACTIVE = 1
class GattSharingMode(enum.IntEnum):
UNSPECIFIED = 0
EXCLUSIVE = 1
SHARED_READ_ONLY = 2
SHARED_READ_AND_WRITE = 3
class GattWriteOption(enum.IntEnum):
WRITE_WITH_RESPONSE = 0
WRITE_WITHOUT_RESPONSE = 1
GattCharacteristic = _ns_module.GattCharacteristic
GattCharacteristicUuids = _ns_module.GattCharacteristicUuids
GattCharacteristicsResult = _ns_module.GattCharacteristicsResult
GattClientNotificationResult = _ns_module.GattClientNotificationResult
GattDescriptor = _ns_module.GattDescriptor
GattDescriptorUuids = _ns_module.GattDescriptorUuids
GattDescriptorsResult = _ns_module.GattDescriptorsResult
GattDeviceService = _ns_module.GattDeviceService
GattDeviceServicesResult = _ns_module.GattDeviceServicesResult
GattLocalCharacteristic = _ns_module.GattLocalCharacteristic
GattLocalCharacteristicParameters = _ns_module.GattLocalCharacteristicParameters
GattLocalCharacteristicResult = _ns_module.GattLocalCharacteristicResult
GattLocalDescriptor = _ns_module.GattLocalDescriptor
GattLocalDescriptorParameters = _ns_module.GattLocalDescriptorParameters
GattLocalDescriptorResult = _ns_module.GattLocalDescriptorResult
GattLocalService = _ns_module.GattLocalService
GattPresentationFormat = _ns_module.GattPresentationFormat
GattPresentationFormatTypes = _ns_module.GattPresentationFormatTypes
GattProtocolError = _ns_module.GattProtocolError
GattReadClientCharacteristicConfigurationDescriptorResult = _ns_module.GattReadClientCharacteristicConfigurationDescriptorResult
GattReadRequest = _ns_module.GattReadRequest
GattReadRequestedEventArgs = _ns_module.GattReadRequestedEventArgs
GattReadResult = _ns_module.GattReadResult
GattReliableWriteTransaction = _ns_module.GattReliableWriteTransaction
GattRequestStateChangedEventArgs = _ns_module.GattRequestStateChangedEventArgs
GattServiceProvider = _ns_module.GattServiceProvider
GattServiceProviderAdvertisementStatusChangedEventArgs = _ns_module.GattServiceProviderAdvertisementStatusChangedEventArgs
GattServiceProviderAdvertisingParameters = _ns_module.GattServiceProviderAdvertisingParameters
GattServiceProviderResult = _ns_module.GattServiceProviderResult
GattServiceUuids = _ns_module.GattServiceUuids
GattSession = _ns_module.GattSession
GattSessionStatusChangedEventArgs = _ns_module.GattSessionStatusChangedEventArgs
GattSubscribedClient = _ns_module.GattSubscribedClient
GattValueChangedEventArgs = _ns_module.GattValueChangedEventArgs
GattWriteRequest = _ns_module.GattWriteRequest
GattWriteRequestedEventArgs = _ns_module.GattWriteRequestedEventArgs
GattWriteResult = _ns_module.GattWriteResult
| 1.6875 | 2 |
jinrui/__init__.py | haobin12358/jinrui-test | 0 | 12760630 | <filename>jinrui/__init__.py
# -*- coding: utf-8 -*-
from flask import Flask
from flask import Blueprint
from flask_cors import CORS
from .api.APdfUpload import APdfUpload
from .api.AHello import AHello
from .api.AOcr import AOcr
from .api.APpt import APpt
from .api.APaper import APaper
from .api.AAnswer import AAnswer
from .api.AFile import AFile
from .extensions.request_handler import error_handler, request_first_handler
from .config.secret import DefaltSettig
from .extensions.register_ext import register_ext
from jinrui.extensions.base_jsonencoder import JSONEncoder
from jinrui.extensions.base_request import Request
def register(app):
jr = Blueprint(__name__, 'jr', url_prefix='/api')
jr.add_url_rule('/hello/<string:hello>', view_func=AHello.as_view('hello'))
jr.add_url_rule('/ocr/<string:ocr>', view_func=AOcr.as_view('ocr'))
jr.add_url_rule('/ppt/<string:ppt>', view_func=APpt.as_view('ppt'))
jr.add_url_rule('/pdf/<string:pdf>', view_func=APdfUpload.as_view('pdf'))
jr.add_url_rule('/paper/<string:paper>', view_func=APaper.as_view('paper'))
jr.add_url_rule('/answer/<string:answer>', view_func=AAnswer.as_view('answer'))
jr.add_url_rule('/file/<string:file>', view_func=AFile.as_view('file'))
app.register_blueprint(jr)
def after_request(resp):
resp.headers['Access-Control-Allow-Origin'] = '*'
resp.headers['Access-Control-Allow-Methods'] = 'GET,POST'
resp.headers['Access-Control-Allow-Headers'] = 'x-requested-with,content-type'
return resp
def create_app():
app = Flask(__name__)
app.json_encoder = JSONEncoder
app.request_class = Request
app.config.from_object(DefaltSettig)
app.after_request(after_request)
register(app)
CORS(app, supports_credentials=True)
request_first_handler(app)
register_ext(app)
error_handler(app)
return app
| 1.867188 | 2 |
server/core/alpaca.py | webclinic017/trading-bot-1 | 1 | 12760631 | <filename>server/core/alpaca.py
import logging
import alpaca_trade_api as tradeapi
logger = logging.getLogger(__name__)
class TradeApiRest:
"""Base wrapper for TradeView REST requests."""
def __init__(self):
try:
# Env variables set to initalise connection:
# * APCA_API_KEY_ID
# * APCA_API_SECRET_KEY
# * APCA_API_BASE_URL
self.api = tradeapi.REST()
except Exception as e:
logger.error(f"Tradeview api connection failed: {e}")
return
def account_info(self):
"""
Retrieves account information.
Endpoint: GET /account
Returns:
- id(uuid): Account id
- acccount_number(str): Account number
- status(str): Account status
- currency(str): 'USD'
- cash(str): cash balance
- portfolio_value(str): Depricated. Equal to 'equity' field
- pattern_day_trader(bool): Is account flagged as pattern day trader
- trade_suspended_by_user(bool): User setting
- trading_blocked(bool): If true, the account is not allowed to
place orders.
- transfers_blocked(bool): If true, the account is not allowed to
request money transfers.
- account_blocked(bool): If true, the account activity by user is
prohibited.
- created_at(timestamp): Timestamp this account was created at.
- shorting_enabled(bool): Flag to denote whether or not the account
is permitted to short
- long_market_value(str): Real-time MtM value of all long positions
held in the account
- short_market_value(str): Real-time MtM value of all short
positions held in the account
- equity(str): Cash + long_market_value + short_market_value
- last_equity(str): Equity as of previous trading day at 16:00:00 ET
- multiplier(str): Buying power multiplier that represents account
margin classification
- buying_power(str): Current available buying power; If multiplier =
4, this is your daytrade buying power which is calculated as
(last_equity - (last) maintenance_margin) * 4; If multiplier = 2,
buying_power = max(equity – initial_margin,0) * 2; If multiplier =
1, buying_power = cash
- initial_margin(str): Reg T initial margin requirement
(continuously updated value)
- maintenance_margin(str): Maintenance margin requirement
(continuously updated value)
- sma(str): Value of special memorandum account (will be used at a
later date to provide additional buying_power)
- daytrade_count(str): The current number of daytrades that have
been made in the last 5 trading days
- last_maintenance_margin(str): Your maintenance margin requirement
on the previous trading day
- daytrading_buying_power(str): Your buying power for day trades
(continuously updated value
- regt_buying_power(str): Your buying power under Regulation T (your
excess equity - equity minus margin value - times your margin
multiplier)
"""
return self.api.get_account()
def get_portfolio_history(
self,
date_start=None,
date_end=None,
period=None,
timeframe=None,
extended_hours=None,
):
"""
Returns timeseries data about equity and profit/loss (P/L).
Endpoint: GET /account/portfolio/history
Params:
- date_start(str): The date the data is returned from, in
“YYYY-MM-DD” format
- date_end(str): The date the data is returned up to, in
“YYYY-MM-DD” format. Defaults to the current market date
- period(str): The duration of the data in <number><unit>
format where <unit> can be D (day), W (week), M (month),
A (year). Default 1M
- timeframe(str): The resolution of time window. 1Min, 5Min, 15Min,
1H, or 1D
- extended_hours(bool): If true, include extended hours in the
result. This is effective only for timeframe less than 1D.
"""
return self.api.get_portfolio_history(
date_start, date_end, period, timeframe, extended_hours
)
def list_assets(self, status=None, asset_class=None):
"""
Get master list of assets available for trade and data consumption from
Alpaca.
Endpoint: GET /assets
Params:
- status(str): e.g. “active”. By default, all statuses are included
- asset_class(str): Defaults to us_equity
"""
return self.api.list_assets(status, asset_class)
def get_asset(self, symbol):
"""
Get an asset for the given symbol.
Endpoint: GET /assets/{symbol}
"""
return self.api.get_asset(symbol)
def list_positions(self):
"""
Retrieves a list of the account’s open positions.
Endpoint: GET /positions
"""
return self.api.list_positions()
def list_position_by_symbol(self, symbol):
"""
Retrieves the account’s open position for the given symbol.
Endpoint: GET /positions/{symbol}
"""
return self.api.get_position(symbol)
def get_orders(
self,
status=None,
limit=None,
after=None,
until=None,
direction=None,
nested=None,
):
"""
Get order information.
Endpoint: GET /orders
Params:
- status(str): open, closed or all. Defaults to open
- limit(int): max number of orders in response. Default 50, max 500
- after(timestamp): response will include only ones submitted after
this timestamp (exclusive.)
- until(timestamp): response will include only ones submitted until
this timestamp (exclusive.)
- direction(str): order of response based on submission time. asc or
desc. Defaults to desc.
- nested(bool): if true, the result will roll up multi-leg orders
under the legs field of primary order.
"""
return self.api.list_orders(status, limit, after, until, direction, nested)
def get_order_by_client_order_id(self, client_order_id):
""" "
Get order details when client_order_id is manually speicified by client.
Endpoint: GET /orders
"""
return self.api.get_order_by_client_order_id(client_order_id)
def get_order_by_id(self, order_id):
"""
Get order details when id auto generated by Alpaca.
Endpoint: GET /orders/{order_id}
"""
return self.api.get_order(order_id)
def cancel_order_by_id(self, order_id):
"""
Closes (liquidates) the account’s open position for the given symbol.
Works for both long and short positions.
Endpoint: DELETE /orders/{order_id}
"""
return self.api.cancel_order(order_id)
def cancel_all_orders(self):
"""
Closes (liquidates) all of the account’s open long and short positions.
Endpoint: DELETE /orders
"""
return self.api.cancel_all_orders()
def submit_order(
self,
symbol,
qty,
side,
type,
time_in_force,
limit_price=None,
stop_price=None,
client_order_id=None,
order_class=None,
take_profit=None,
stop_loss=None,
trail_price=None,
trail_percent=None,
):
"""
Submit an order.
Endpoint: POST /orders
Params:
:param symbol(str): symbol or asset ID to identify the asset to trade
:param qty(float): number of shares to trade
:param side(str): buy or sell
:param type(str): market, limit, stop, stop_limit, or trailing_stop
:param time_in_force(str):
- day: a day order is eligible for execution only on the day it is
live
- gtc: good til cancelled
- opg: use with a market/limit order type to submit 'market on open'
(MOO) and 'limit on open' (LOO) orders
- cls: use with a market/limit order type to submit 'market on
close' (MOC) and 'limit on close' (LOC) orders
- ioc: immediate or cancel requires all or part of the order to be
executed immediately. Any unfilled portion of the order is canceled
(v2 API only)
- fok: fill or kill is only executed if the entire order quantity
can be filled, otherwise the order is canceled (v2 API only)
:param limit_price(float): required if type is 'limit' or 'stop_limit'
:param stop_price(float): required if type is 'stop' or 'stop_limit'
:param tail_price(float): this or 'trail_percent' is required if type is
'trailing_stop'
:param tail_percentage(float): this or 'trail_price' is required if type
is 'trailing_stop'
:param extended_hours(bool): defaults false. If true, order will be
eligible to execute
in premarket/afterhours. Only works with type limit and time_in_force
day
:param client_order_id(str): unique identifier for the order.
Automatically generated if not sent.
:param order_class(str): simple, bracket, oco or oto
:param take_profit(dict):
- limit_price(float): required for bracket orders
:param stop_loss(dict):
- stop_limit(float): required for bracket orders
- limit_price(float): the stop-loss order becomes a stop-limit order
if specified
"""
return self.api.submit_order(
symbol,
qty,
side,
type,
time_in_force,
limit_price,
stop_price,
client_order_id,
order_class,
take_profit,
stop_loss,
trail_price,
trail_percent,
)
def is_tradable(self, symbol):
"""Is an asset tradable via Alpaca api."""
if not isinstance(symbol, str):
symbol = str(symbol)
return self.api.get_asset(symbol).tradable
def get_clock(self):
"""
Get market opening/closing details.
:response timestamp(str): Current timestamp
:response is_open(bool): Whether or not the market is open
:response next_open(str): Next market open timestamp
:response next_close(str): Next market close timestamp
"""
return self.api.get_clock()
def is_market_open(self):
"""Return true if the market is currently open."""
return self.api.get_clock().__dict__["_raw"]["is_open"]
def cancel_orders(self, id):
return self.api.cancel_order(id)
def get_bars(
self,
symbols,
timeframe,
limit=None,
start=None,
end=None,
after=None,
until=None,
):
"""
Retrieves list of bars for each requested symbol.
Endpoint: GET /bars/{timeframe}
:param symbols(str): One or more (max 200) symbol names split by commas
:param timeframe(str): One of minute, 1Min, 5Min, 15Min, day or 1D.
minute is an alias of 1Min. Similarly, day is of 1D.
:param limit(int): The maximum number of bars to be returned for each
symbol. Default 100
:param start(str): Filter bars equal to or after this time, ISO Format
:param end(str): Filter bars equal to or before this time, ISO Format
:param after(str): Filter bars after this time, ISO Format
:param before(str): Filter bars before this time, ISO Format
"""
return self.api.get_barset(symbols, timeframe, limit, start, end, after, until)
def get_aggs(self, symbol, timespan, multiplier, _from, to):
"""
TBC.
Endpoint:
GET /aggs/ticker/{symbol}/range/{multiplier}/{timespan}/{from}/{to}
:param symbol(str): symbol or asset ID
:param timespan(str): Size of the time window: minute, hour, day, week,
month, quarter, year
:param multiplier(int): Size of the timespan multiplier (distance
between samples)
:param _from(str): acceptable types: isoformat string, timestamp int,
datetime object
:param to(str): same as _from
"""
return self.api.get_aggs(symbol, timespan, multiplier, _from, to)
def get_last_trade(self, symbol):
"""
Get last trade details for given symbol.
Endpoint: GET /last/stocks/{symbol}
:param symbol(str): symbol or asset ID
"""
return self.api.get_last_trade(symbol)
def get_last_quote(self, symbol):
"""
Get last quote for given symbol.
Endpoint: GET /last_quote/stocks/{symbol}
:param symbol(str): symbol or asset ID
"""
return self.api.get_last_quote(symbol)
def is_account_blocked(self):
"""Return true if account if blocked from trading."""
account = self._account_info()
return account.trading_blocked
def daily_balance(self):
"""Get change in equity from yesterday."""
account = self._account_info()
return float(account.equity) - float(account.last_equity)
def open_orders(self):
"""Get all orders that are open."""
return self.api.list_orders(status="open")
| 2.390625 | 2 |
server/openapi_server/models/__init__.py | hubmapconsortium/ontology-api | 2 | 12760632 | # coding: utf-8
# flake8: noqa
from __future__ import absolute_import
# import models into model package
from openapi_server.models.codes_codes_obj import CodesCodesObj
from openapi_server.models.concept_detail import ConceptDetail
from openapi_server.models.concept_term import ConceptTerm
from openapi_server.models.full_capacity_term import FullCapacityTerm
from openapi_server.models.qqst import QQST
from openapi_server.models.sab_code_term import SabCodeTerm
from openapi_server.models.sab_definition import SabDefinition
from openapi_server.models.sab_relationship_concept_prefterm import SabRelationshipConceptPrefterm
from openapi_server.models.semantic_stn import SemanticStn
from openapi_server.models.sty_tui_stn import StyTuiStn
from openapi_server.models.term_resp_obj import TermRespObj
from openapi_server.models.termtype_code import TermtypeCode
| 1.203125 | 1 |
ch12-ann/classification_example.py | GaoX2015/intro_ds | 314 | 12760633 | # -*- coding: UTF-8 -*-
"""
此脚本用于展示如何利用神经网络解决分类问题
"""
import os
from mlp import ANN
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_blobs, make_circles, make_moons
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import StandardScaler, OneHotEncoder
def generateData(n):
"""
"""
np.random.seed(12046)
blobs = make_blobs(n_samples=n, centers = [[-2, -2], [2, 2]])
circles = make_circles(n_samples=n, factor=.4, noise=.05)
moons = make_moons(n_samples=n, noise=.05)
blocks = np.random.rand(n, 2) - 0.5
y = (blocks[:, 0] * blocks[:, 1] < 0) + 0
blocks = (blocks, y)
# 由于神经网络对数据的线性变换不稳定,因此将数据做归一化处理
scaler = StandardScaler()
blobs = (scaler.fit_transform(blobs[0]), blobs[1])
circles = (scaler.fit_transform(circles[0]), circles[1])
moons = (scaler.fit_transform(moons[0]), moons[1])
blocks = (scaler.fit_transform(blocks[0]), blocks[1])
return blobs, circles, moons, blocks
def drawData(ax, data):
"""
将数据可视化
"""
X, y = data
label1 = X[y>0]
ax.scatter(label1[:, 0], label1[:, 1], marker="o")
label0 = X[y==0]
ax.scatter(label0[:, 0], label0[:, 1], marker="^", color="k")
return ax
def drawModel(ax, model):
"""
将模型的分离超平面可视化
"""
x1 = np.linspace(ax.get_xlim()[0], ax.get_xlim()[1], 100)
x2 = np.linspace(ax.get_ylim()[0], ax.get_ylim()[1], 100)
X1, X2 = np.meshgrid(x1, x2)
Y = model.predict_proba(np.c_[X1.ravel(), X2.ravel()])[:, 1]
Y = Y.reshape(X1.shape)
ax.contourf(X1, X2, Y, levels=[0, 0.5], colors=["gray"], alpha=0.4)
return ax
def trainLogit(data):
"""
"""
X, y = data
model = LogisticRegression()
model.fit(X, y)
return model
def trainANN(data, logPath):
"""
"""
X, y = data
enc = OneHotEncoder()
y = enc.fit_transform(y.reshape(-1, 1)).toarray()
model = ANN([4, 4, 2], logPath)
model.fit(X, y)
return model
def visualize(data):
"""
"""
# 创建一个图形框
fig = plt.figure(figsize=(10, 10), dpi=80)
fig1 = plt.figure(figsize=(10, 10), dpi=80)
# 在图形框里画四幅图
for i in range(len(data)):
ax = fig.add_subplot(2, 2, i+1)
ax1 = fig1.add_subplot(2, 2, i+1)
drawData(ax, data[i])
# Windows下的存储路径与Linux并不相同
if os.name == "nt":
drawModel(ax, trainANN(data[i], "logs\\data_%s" % (i+1)))
else:
drawModel(ax, trainANN(data[i], "logs/data_%s" % (i+1)))
drawData(ax1, data[i])
drawModel(ax1, trainLogit(data[i]))
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
plt.show()
if __name__ == "__main__":
data = generateData(200)
visualize(data)
| 3 | 3 |
tests/bugs/core_5750_test.py | FirebirdSQL/firebird-qa | 1 | 12760634 | <gh_stars>1-10
#coding:utf-8
#
# id: bugs.core_5750
# title: Date-time parsing is very weak
# decription:
# Checked on: 4.0.0.1479: OK, 1.263s.
#
# 27.10.2020.
# Parser changed after following fixes:
# - CORE-6427 - Whitespace as date separator causes conversion error.
# - CORE-6429 - Timezone offset in timestamp/time literal and CAST should follow SQL standard syntax only.
# See: https://github.com/FirebirdSQL/firebird/commit/ff37d445ce844f991242b1e2c1f96b80a5d1636d
# Adjusted expected stdout/stderr after discuss with Adriano.
# Checked on 4.0.0.2238
#
# tracker_id: CORE-5750
# min_versions: ['4.0']
# versions: 4.0
# qmid: None
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 4.0
# resources: None
substitutions_1 = []
init_script_1 = """"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """
set heading off;
--set echo on;
-- All these must fail:
select timestamp '2018-01-01 10 20 30' from rdb$database;
select timestamp '2018-01-01 10,20,30 40' from rdb$database;
select timestamp '31.05.2017 1:2:3.4567' from rdb$database;
select timestamp '31/05/2017 2:3:4.5678' from rdb$database;
------------------------------------------------------------
-- Date components separator may be a single one (first and second occurence): dash, slash or dot.
select date '2018-01-31' from rdb$database;
select date '2018/01/31' from rdb$database;
select date '2018.01.31' from rdb$database;
select date '2018' from rdb$database;
select date '2018,01,31' from rdb$database;
select date '2018/01.31' from rdb$database;
select date '2018 01 31' from rdb$database;
-- Time components (hours to minutes to seconds) separator may be colon.
select time '10:29:39' from rdb$database;
select time '10/29/39' from rdb$database;
select time '10.29.39' from rdb$database;
select time '10-29-39' from rdb$database;
select time '10 29 39' from rdb$database;
-- Seconds to fractions separator may be dot.
select time '7:3:1.' from rdb$database;
select time '7:3:2.1238' from rdb$database;
select time '7:3:3,1238' from rdb$database;
-- There could *NOT* be any separator (other than spaces) between date and time.
select timestamp '2018-01-01T01:02:03.4567' from rdb$database;
select timestamp '31.05.2017 11:22:33.4455' from rdb$database;
select timestamp '31.05.2017
22:33:44.5577' from rdb$database;
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
2017-05-31 01:02:03.4567
2018-01-31
2018-01-31
2018-01-31
2018-01-31
10:29:39.0000
07:03:01.0000
07:03:02.1238
2017-05-31 11:22:33.4455
"""
expected_stderr_1 = """
Statement failed, SQLSTATE = 22009
Invalid time zone region: 20 30
Statement failed, SQLSTATE = 22009
Invalid time zone region: ,20,30 40
Statement failed, SQLSTATE = 22018
conversion error from string "31/05/2017 2:3:4.5678"
Statement failed, SQLSTATE = 22018
conversion error from string "2018"
Statement failed, SQLSTATE = 22018
conversion error from string "2018,01,31"
Statement failed, SQLSTATE = 22018
conversion error from string "2018/01.31"
Statement failed, SQLSTATE = 22009
Invalid time zone region: /29/39
Statement failed, SQLSTATE = 22009
Invalid time zone region: .39
Statement failed, SQLSTATE = 22009
Invalid time zone offset: -29-39 - must use format +/-hours:minutes and be between -14:00 and +14:00
Statement failed, SQLSTATE = 22009
Invalid time zone region: 29 39
Statement failed, SQLSTATE = 22009
Invalid time zone region: ,1238
Statement failed, SQLSTATE = 22009
Invalid time zone region: T01:02:03.4567
Statement failed, SQLSTATE = 22009
Invalid time zone region:
22:33:44.5577
"""
@pytest.mark.version('>=4.0')
def test_1(act_1: Action):
act_1.expected_stdout = expected_stdout_1
act_1.expected_stderr = expected_stderr_1
act_1.execute()
assert act_1.clean_stderr == act_1.clean_expected_stderr
assert act_1.clean_stdout == act_1.clean_expected_stdout
| 1.851563 | 2 |
Material/ex python 101 .114/exercicios python 2/ex33.py | Luferrazlek/Exercicio-em-python | 0 | 12760635 | <filename>Material/ex python 101 .114/exercicios python 2/ex33.py
anum1 = int(input('Digite 1° valor: '))
bnum2 = int(input('Digite 2° valor: '))
cnum3 = int(input('Digite 3° valor: '))
#verificando quem é menor
menor = anum1
if bnum2 < anum1 and bnum2 < cnum3:
menor = bnum2
if cnum3 < anum1 and cnum3 < bnum2:
menor = cnum3
#verificando quem é maior
maior = anum1
if bnum2 > anum1 and bnum2 > cnum3:
maior = bnum2
if cnum3 > anum1 and cnum3 > bnum2:
maior = cnum3
print('O menor valor digitado foi {}'.format(menor))
print('O maior valor digitado foi {}'.format(maior))
| 4.21875 | 4 |
analog-in/python/analog-in.py | intel-iot-devkit/iot-devkit-samples | 92 | 12760636 | #!/usr/bin/env python
# (C) Copyright 2018, Intel Corporation
# SPDX-License-Identifier: MIT
import mraa
import time
# TODO Ensure you are running this sample on a Linux system with;
# * python-mraa bindings installed
# * Permissions to write to the aio system, this often requires root
# The following should read a ADC/AIO value from your mraa supported board.
# 2 is used as the default with this sample but this may not a the AIO on your board.
# NOTE: Not all boards *have* an AIO/ADC please see your boards documention for details.
aio = mraa.Aio(2)
while True:
print(aio.read())
| 2.421875 | 2 |
pomp/example_problems/flappy.py | Aand1/pyOptimalMotionPlanning | 0 | 12760637 | from OpenGL.GL import *
from geometric import *
from ..spaces.objective import *
from ..spaces.statespace import *
from ..spaces.configurationspace import *
from ..spaces.edgechecker import *
from ..spaces.metric import *
from ..planners.problem import PlanningProblem
class FlappyControlSpace(ControlSpace):
def __init__(self,flappy):
self.flappy = flappy
def configurationSpace(self):
return self.flappy.configurationSpace()
def controlSet(self,x):
return MultiSet(TimeBiasSet(self.flappy.time_range,self.flappy.controlSet()),self.flappy.controlSet())
#return MultiSet(BoxSet([0],[self.flappy.time_range]),self.flappy.controlSet())
def nextState(self,x,u):
return self.eval(x,u,1.0)
def eval(self,x,u,amount):
x_i,y_i,vy_i = x
t,thrust = u
tc = t*amount
#instantaneous version
#net_acceler = self.flappy.gravity
#vy_i += thrust*self.flappy.thrust
#yilun's version
net_acceler = self.flappy.gravity + thrust*self.flappy.thrust
return [x_i+self.flappy.v_x*tc,
y_i+vy_i*tc+0.5*net_acceler*(tc**2),
vy_i+net_acceler*tc]
def interpolator(self,x,u):
return LambdaInterpolator(lambda s:self.eval(x,u,s),self.configurationSpace(),10)
class Flappy:
def __init__(self):
self.x_range = 1000
self.y_range = 600
self.min_altitude = 300
self.max_velocity = 40
self.start_state = [50, 250, 0]
self.goal_state = [950, 200, 0]
self.goal_radius = 50
self.time_range = 10
#u = lambda:round(random.random())
self.obstacles = []
self.obstacles = [(175, 450, 50, 100), (175, 0, 50, 100), (175, 150, 50, 200),
(375,200, 50, 300),(375, 0, 50, 100),
(575, 500, 50, 100), (575, 0, 50, 125), (575, 200, 50, 200),
(775, 200, 50, 400)]
self.v_x = 5
self.gravity = -1
self.thrust = 4
def controlSet(self):
return FiniteSet([[0],[1]])
def controlSpace(self):
return FlappyControlSpace(self)
def workspace(self):
wspace = Geometric2DCSpace()
wspace.box.bmin = [0,0]
wspace.box.bmax = [self.x_range,self.y_range]
for o in self.obstacles:
wspace.addObstacle(Box(o[0],o[1],o[0]+o[2],o[1]+o[3]))
return wspace
def configurationSpace(self):
wspace = Geometric2DCSpace()
wspace.box.bmin = [0,0]
wspace.box.bmax = [self.x_range,self.y_range]
for o in self.obstacles:
wspace.addObstacle(Box(o[0],o[1],o[0]+o[2],o[1]+o[3]))
res = MultiConfigurationSpace(wspace,BoxConfigurationSpace([-self.max_velocity],[self.max_velocity]))
return res
def startState(self):
return self.start_state
def goalSet(self):
r = self.goal_radius
return BoxSubset(self.configurationSpace(),
[self.goal_state[0]-r,self.goal_state[1]-r,-self.max_velocity],
[self.goal_state[0]+r,self.goal_state[1]+r,self.max_velocity])
class FlappyObjectiveFunction(ObjectiveFunction):
"""Given a function pointwise(x,u), produces the incremental cost
by incrementing over the interpolator's length.
"""
def __init__(self,flappy,timestep=0.2):
self.flappy = flappy
self.space = flappy.controlSpace()
self.timestep = timestep
def incremental(self,x,u):
e = self.space.interpolator(x,u)
tmax = u[0]
t = 0
c = 0
while t < tmax:
t = min(tmax,t+self.timestep)
xnext = e.eval(t / tmax)
c += vectorops.distance(x,xnext)
x = xnext
return c
def flappyTest():
p = Flappy()
objective = FlappyObjectiveFunction(p)
return PlanningProblem(p.controlSpace(),p.startState(),p.goalSet(),
objective=objective,
visualizer=p.workspace(),
euclidean = True)
| 2.3125 | 2 |
tests/digraph/test_isomorphic.py | enavarro51/retworkx | 0 | 12760638 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import unittest
import retworkx
class TestIsomorphic(unittest.TestCase):
def test_empty_isomorphic(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_empty_isomorphic_compare_nodes(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_isomorphic_mismatch_node_data(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_isomorphic_compare_nodes_mismatch_node_data(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertFalse(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_is_isomorphic_nodes_compare_raises(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
def compare_nodes(a, b):
raise TypeError("Failure")
self.assertRaises(TypeError, retworkx.is_isomorphic, (dag_a, dag_b, compare_nodes))
def test_isomorphic_compare_nodes_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_compare_edges_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(
dag_a,
dag_b,
edge_matcher=lambda x, y: x == y,
id_order=id_order,
)
)
def test_isomorphic_compare_nodes_with_removals(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
qr_0_in = dag_a.add_node("qr[0]")
qr_1_in = dag_a.add_node("qr[1]")
cr_0_in = dag_a.add_node("cr[0]")
qr_0_out = dag_a.add_node("qr[0]")
qr_1_out = dag_a.add_node("qr[1]")
cr_0_out = dag_a.add_node("qr[0]")
cu1 = dag_a.add_child(qr_0_in, "cu1", "qr[0]")
dag_a.add_edge(qr_1_in, cu1, "qr[1]")
measure_0 = dag_a.add_child(cr_0_in, "measure", "cr[0]")
dag_a.add_edge(cu1, measure_0, "qr[0]")
measure_1 = dag_a.add_child(cu1, "measure", "qr[1]")
dag_a.add_edge(measure_0, measure_1, "cr[0]")
dag_a.add_edge(measure_1, qr_1_out, "qr[1]")
dag_a.add_edge(measure_1, cr_0_out, "cr[0]")
dag_a.add_edge(measure_0, qr_0_out, "qr[0]")
dag_a.remove_node(cu1)
dag_a.add_edge(qr_0_in, measure_0, "qr[0]")
dag_a.add_edge(qr_1_in, measure_1, "qr[1]")
qr_0_in = dag_b.add_node("qr[0]")
qr_1_in = dag_b.add_node("qr[1]")
cr_0_in = dag_b.add_node("cr[0]")
qr_0_out = dag_b.add_node("qr[0]")
qr_1_out = dag_b.add_node("qr[1]")
cr_0_out = dag_b.add_node("qr[0]")
measure_0 = dag_b.add_child(cr_0_in, "measure", "cr[0]")
dag_b.add_edge(qr_0_in, measure_0, "qr[0]")
measure_1 = dag_b.add_child(qr_1_in, "measure", "qr[1]")
dag_b.add_edge(measure_1, qr_1_out, "qr[1]")
dag_b.add_edge(measure_1, cr_0_out, "cr[0]")
dag_b.add_edge(measure_0, measure_1, "cr[0]")
dag_b.add_edge(measure_0, qr_0_out, "qr[0]")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_compare_nodes_with_removals_deepcopy(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
qr_0_in = dag_a.add_node("qr[0]")
qr_1_in = dag_a.add_node("qr[1]")
cr_0_in = dag_a.add_node("cr[0]")
qr_0_out = dag_a.add_node("qr[0]")
qr_1_out = dag_a.add_node("qr[1]")
cr_0_out = dag_a.add_node("qr[0]")
cu1 = dag_a.add_child(qr_0_in, "cu1", "qr[0]")
dag_a.add_edge(qr_1_in, cu1, "qr[1]")
measure_0 = dag_a.add_child(cr_0_in, "measure", "cr[0]")
dag_a.add_edge(cu1, measure_0, "qr[0]")
measure_1 = dag_a.add_child(cu1, "measure", "qr[1]")
dag_a.add_edge(measure_0, measure_1, "cr[0]")
dag_a.add_edge(measure_1, qr_1_out, "qr[1]")
dag_a.add_edge(measure_1, cr_0_out, "cr[0]")
dag_a.add_edge(measure_0, qr_0_out, "qr[0]")
dag_a.remove_node(cu1)
dag_a.add_edge(qr_0_in, measure_0, "qr[0]")
dag_a.add_edge(qr_1_in, measure_1, "qr[1]")
qr_0_in = dag_b.add_node("qr[0]")
qr_1_in = dag_b.add_node("qr[1]")
cr_0_in = dag_b.add_node("cr[0]")
qr_0_out = dag_b.add_node("qr[0]")
qr_1_out = dag_b.add_node("qr[1]")
cr_0_out = dag_b.add_node("qr[0]")
measure_0 = dag_b.add_child(cr_0_in, "measure", "cr[0]")
dag_b.add_edge(qr_0_in, measure_0, "qr[0]")
measure_1 = dag_b.add_child(qr_1_in, "measure", "qr[1]")
dag_b.add_edge(measure_1, qr_1_out, "qr[1]")
dag_b.add_edge(measure_1, cr_0_out, "cr[0]")
dag_b.add_edge(measure_0, measure_1, "cr[0]")
dag_b.add_edge(measure_0, qr_0_out, "qr[0]")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(
copy.deepcopy(dag_a),
copy.deepcopy(dag_b),
lambda x, y: x == y,
id_order=id_order,
)
)
def test_digraph_isomorphic_parallel_edges_with_edge_matcher(self):
graph = retworkx.PyDiGraph()
graph.extend_from_weighted_edge_list([(0, 1, "a"), (0, 1, "b"), (1, 2, "c")])
self.assertTrue(retworkx.is_isomorphic(graph, graph, edge_matcher=lambda x, y: x == y))
def test_digraph_isomorphic_self_loop(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0])
graph.add_edges_from([(0, 0, "a")])
self.assertTrue(retworkx.is_isomorphic(graph, graph))
def test_digraph_non_isomorphic_edge_mismatch_self_loop(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0])
graph.add_edges_from([(0, 0, "a")])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0])
second_graph.add_edges_from([(0, 0, "b")])
self.assertFalse(
retworkx.is_isomorphic(graph, second_graph, edge_matcher=lambda x, y: x == y)
)
def test_digraph_non_isomorphic_rule_out_incoming(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(0, 1), (0, 2), (2, 1)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(0, 1), (0, 2), (3, 1)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_digraph_non_isomorphic_rule_ins_outgoing(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(1, 0), (2, 0), (1, 2)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(1, 0), (2, 0), (1, 3)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_digraph_non_isomorphic_rule_ins_incoming(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(1, 0), (2, 0), (2, 1)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(1, 0), (2, 0), (3, 1)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_isomorphic_parallel_edges(self):
first = retworkx.PyDiGraph()
first.extend_from_edge_list([(0, 1), (0, 1), (1, 2), (2, 3)])
second = retworkx.PyDiGraph()
second.extend_from_edge_list([(0, 1), (1, 2), (1, 2), (2, 3)])
self.assertFalse(retworkx.is_isomorphic(first, second))
def test_digraph_isomorphic_insufficient_call_limit(self):
graph = retworkx.generators.directed_path_graph(5)
self.assertFalse(retworkx.is_isomorphic(graph, graph, call_limit=2))
def test_digraph_vf2_mapping_identical(self):
graph = retworkx.generators.directed_grid_graph(2, 2)
second_graph = retworkx.generators.directed_grid_graph(2, 2)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual(next(mapping), {0: 0, 1: 1, 2: 2, 3: 3})
def test_digraph_vf2_mapping_identical_removals(self):
graph = retworkx.generators.directed_path_graph(2)
second_graph = retworkx.generators.directed_path_graph(4)
second_graph.remove_nodes_from([1, 2])
second_graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual({0: 0, 1: 3}, next(mapping))
def test_digraph_vf2_mapping_identical_removals_first(self):
second_graph = retworkx.generators.directed_path_graph(2)
graph = retworkx.generators.directed_path_graph(4)
graph.remove_nodes_from([1, 2])
graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual({0: 0, 3: 1}, next(mapping))
def test_digraph_vf2_mapping_identical_vf2pp(self):
graph = retworkx.generators.directed_grid_graph(2, 2)
second_graph = retworkx.generators.directed_grid_graph(2, 2)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual(next(mapping), {0: 0, 1: 1, 2: 2, 3: 3})
def test_digraph_vf2_mapping_identical_removals_vf2pp(self):
graph = retworkx.generators.directed_path_graph(2)
second_graph = retworkx.generators.directed_path_graph(4)
second_graph.remove_nodes_from([1, 2])
second_graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual({0: 0, 1: 3}, next(mapping))
def test_digraph_vf2_mapping_identical_removals_first_vf2pp(self):
second_graph = retworkx.generators.directed_path_graph(2)
graph = retworkx.generators.directed_path_graph(4)
graph.remove_nodes_from([1, 2])
graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual({0: 0, 3: 1}, next(mapping))
def test_digraph_vf2_number_of_valid_mappings(self):
graph = retworkx.generators.directed_mesh_graph(3)
mapping = retworkx.digraph_vf2_mapping(graph, graph, id_order=True)
total = 0
for _ in mapping:
total += 1
self.assertEqual(total, 6)
def test_empty_digraph_vf2_mapping(self):
g_a = retworkx.PyDiGraph()
g_b = retworkx.PyDiGraph()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
mapping = retworkx.digraph_vf2_mapping(g_a, g_b, id_order=id_order, subgraph=False)
self.assertEqual({}, next(mapping))
| 2.234375 | 2 |
Data fetching scripts/questiontext_thisdumps.py | KahiniWadhawan/MinasTirith | 0 | 12760639 | '''Generates words that are dependent on the word "This" for all question text in train and test'''
from __future__ import division
import operator
import nltk
import string
from csv import DictReader, DictWriter
import wikipedia
import re
import time
import cPickle as pickle
from pattern.en import tag
from collections import Counter, defaultdict
import corenlp
from corenlp import StanfordCoreNLP
# Used to avoid spliting on period that are part of sentence
sentence_finder = re.compile(r"""
# Split sentences on whitespace between them.
(?: # Group for two positive lookbehinds.
(?<=[.!?]) # Either an end of sentence punct,
| (?<=[.!?]['"]) # or end of sentence punct and quote.
) # End group of two positive lookbehinds.
(?<! Mr\. ) # Don't end sentence on "Mr."
(?<! Mrs\. ) # Don't end sentence on "Mrs."
(?<! Jr\. ) # Don't end sentence on "Jr."
(?<! Dr\. ) # Don't end sentence on "Dr."
(?<! Prof\. ) # Don't end sentence on "Prof."
(?<! Sr\. ) # Don't end sentence on "Sr."
(?<! U\.S\. ) # Don't end sentence on "U.S."
(?<! U\.K\. ) # Don't end sentence on "U.K."
(?<! U\.S\.A\. ) # Don't end sentence on "U.S.A."
\s+ # Split on whitespace between sentences.
""",
re.IGNORECASE | re.VERBOSE)
sp = StanfordCoreNLP()
this_dictionary = defaultdict(list)
this_dump = "this_qtext_dump_v0.txt"
train = DictReader(open("train.csv",'r'))
count = 0
for ii in train:
count += 1
print "train", count
questionId = ii['Question ID']
sentencePos = ii['Sentence Position']
questionText = ii['Question Text']
# Parses and finds the words dependent on "This"
for line in sentence_finder.split(questionText):
sparse = corenlp.corenlp.StanfordCoreNLP.raw_parse(sp, line)
this_dependencies = [y for x, y, z in sparse['sentences'][0]['dependencies'] if (z == u'This' or z == u'this' )]
this_dictionary[(questionId, sentencePos)] += this_dependencies
this_dictionary[(questionId, sentencePos)] = set(this_dictionary[(questionId, sentencePos)])
count = 0
test = DictReader(open("test.csv",'r'))
# Same as above, for test file
for ii in test:
count +=1
print "test", count
questionId = ii['Question ID']
sentencePos = ii['Sentence Position']
questionText = ii['Question Text']
# Parses and finds the words dependent on "This"
for line in sentence_finder.split(questionText):
sparse = corenlp.corenlp.StanfordCoreNLP.raw_parse(sp, line)
this_dependencies = [y for x, y, z in sparse['sentences'][0]['dependencies'] if (z == u'This' or z == u'this' )]
this_dictionary[(questionId, sentencePos)] += this_dependencies
this_dictionary[(questionId, sentencePos)] = set(this_dictionary[(questionId, sentencePos)])
# Dumps here
with open(this_dump, 'w') as fp:
pickle.dump(this_dictionary, fp)
fp.close()
| 2.859375 | 3 |
AccGenBot/__init__.py | AL-Noman21/AccGenBot | 8 | 12760640 | <gh_stars>1-10
"""
AccGenBot
Copyright (C) 2021 TgxBotz
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
See < https://github.com/TgxBotz/AccGenBot/blob/master/LICENSE >
for the license.
"""
import os
import logging
from telethon import TelegramClient
from Configs import Config
logging.basicConfig(format='[%(levelname) 5s/%(asctime)s] %(name)s: %(message)s',
level=logging.INFO)
bot = TelegramClient('AccGen', api_id=Config.API_ID, api_hash=Config.API_HASH)
AccGen = bot.start(bot_token=Config.TOKEN)
| 2.171875 | 2 |
src/flask_uploads/extensions.py | jugmac00/flask-uploads | 0 | 12760641 | <reponame>jugmac00/flask-uploads<gh_stars>0
"""Extension presets and extension configuration."""
import os
from typing import Iterable
from typing import cast
# This contains archive and compression formats (.gz, .bz2, .zip, .tar,
# .tgz, .txz, and .7z).
ARCHIVES = tuple('gz bz2 zip tar tgz txz 7z'.split())
# This contains audio file types (.wav, .mp3, .aac, .ogg, .oga, and .flac).
AUDIO = tuple('wav mp3 aac ogg oga flac'.split())
# This is for structured data files (.csv, .ini, .json, .plist, .xml, .yaml,
# and .yml).
DATA = tuple('csv ini json plist xml yaml yml'.split())
# This contains various office document formats (.rtf, .odf, .ods, .gnumeric,
# .abw, .doc, .docx, .xls, .xlsx and .pdf). Note that the macro-enabled
# versions of Microsoft Office 2007 files are not included.
DOCUMENTS = tuple('rtf odf ods gnumeric abw doc docx xls xlsx pdf'.split())
# This contains shared libraries and executable files (.so, .exe and .dll).
# Most of the time, you will not want to allow this - it's better suited for
# use with `AllExcept`.
EXECUTABLES = tuple('so exe dll'.split())
# This contains basic image types that are viewable from most browsers (.jpg,
# .jpe, .jpeg, .png, .gif, .svg, .bmp and .webp).
IMAGES = tuple('jpg jpe jpeg png gif svg bmp webp'.split())
# This contains various types of scripts (.js, .php, .pl, .py .rb, and .sh).
# If your Web server has PHP installed and set to auto-run, you might want to
# add ``php`` to the DENY setting.
SCRIPTS = tuple('js php pl py rb sh'.split())
# This contains nonexecutable source files - those which need to be
# compiled or assembled to binaries to be used. They are generally safe to
# accept, as without an existing RCE vulnerability, they cannot be compiled,
# assembled, linked, or executed. Supports C, C++, Ada, Rust, Go (Golang),
# FORTRAN, D, Java, C Sharp, F Sharp (compiled only), COBOL, Haskell, and
# assembly.
SOURCE = tuple(
(
'c cpp c++ h hpp h++ cxx hxx hdl ' # C/C++
'ada ' # Ada
'rs ' # Rust
'go ' # Go
'f for f90 f95 f03 ' # FORTRAN
'd dd di ' # D
'java ' # Java
'hs ' # Haskell
'cs ' # C Sharp
'fs ' # F Sharp compiled source (NOT .fsx, which is interactive-ready)
'cbl cob ' # COBOL
'asm s ' # Assembly
).split()
)
# This just contains plain text files (.txt).
TEXT = ('txt',)
# The default allowed extensions - `TEXT`, `DOCUMENTS`, `DATA`, and `IMAGES`.
DEFAULTS = TEXT + DOCUMENTS + IMAGES + DATA
class All:
"""
This type can be used to allow all extensions. There is a predefined
instance named `ALL`.
"""
def __contains__(self, item: str) -> bool:
return True
#: This "contains" all items. You can use it to allow all extensions to be
#: uploaded.
x = All()
ALL = cast(Iterable[str], x)
class AllExcept:
"""
This can be used to allow all file types except certain ones. For example,
to ban .exe and .iso files, pass::
AllExcept(('exe', 'iso'))
to the `UploadSet` constructor as `extensions`. You can use any container,
for example::
AllExcept(SCRIPTS + EXECUTABLES)
"""
def __init__(self, items: Iterable[str]) -> None:
self.items = items
def __contains__(self, item: str) -> bool:
return item not in self.items
def extension(filename: str) -> str:
ext = os.path.splitext(filename)[1]
if ext.startswith('.'):
# os.path.splitext retains . separator
ext = ext[1:]
return ext
def lowercase_ext(filename: str) -> str:
"""
This is a helper used by UploadSet.save to provide lowercase extensions for
all processed files, to compare with configured extensions in the same
case.
.. versionchanged:: 0.1.4
Filenames without extensions are no longer lowercased, only the
extension is returned in lowercase, if an extension exists.
:param filename: The filename to ensure has a lowercase extension.
"""
if '.' in filename:
main, ext = os.path.splitext(filename)
return main + ext.lower()
# For consistency with os.path.splitext,
# do not treat a filename without an extension as an extension.
# That is, do not return filename.lower().
return filename
| 2.03125 | 2 |
docs/animation/game.py | pauleveritt/arcade_tutorial | 7 | 12760642 | import arcade
GAME_TITLE = 'Drawing Example'
WINDOW_HEIGHT = 600
WINDOW_WIDTH = 600
def on_draw(delta_time):
arcade.start_render()
arcade.draw_text(GAME_TITLE, 200, 300, arcade.color.BLACK, 12)
def main():
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, GAME_TITLE)
arcade.set_background_color(arcade.color.WHEAT)
arcade.schedule(on_draw, 1 / 2)
arcade.finish_render()
arcade.run()
if __name__ == '__main__':
main()
| 2.34375 | 2 |
microbench/disjoint-choice/yacc/yacc_generator.py | yshinya6/Cello | 0 | 12760643 | <gh_stars>0
import sys
import random
alphabets = []
codes = (('a','z'),('A','Z'),('0','9'))
# create alphabet list
for r in codes:
chars = map(chr, range(ord(r[0]),ord(r[1])+1))
alphabets.extend(chars)
print alphabets
header = open("header")
hCode = header.read()
header.close()
footer = open("footer")
fCode = footer.read()
footer.close()
for n in range(0,62):
ntermList = []
startProduction = ["S: N | S N;\n"]
grammarList = ["N:"]
cnt = 0
for alt in alphabets:
ntermList.append("N{a}:\'{a}\' \'{a}\'\n".format(a=alt))
grammarList.append("N{a}".format(a=alt))
cnt = cnt + 1
if cnt > n:
break
else:
grammarList.append("|")
grammarList.append(";\n")
f = open("R{n}.y".format(n=n), 'w')
f.write(hCode)
f.write("".join(startProduction))
f.write("".join(grammarList))
f.write("".join(ntermList))
f.write(fCode)
f.close()
# print "".join(grammarList)
| 3.015625 | 3 |
solns/python/5.py | retiman/project-euler | 7 | 12760644 | from functools import reduce
from math import gcd
def lcm(a, b):
return a * (b // gcd(a, b))
result = reduce(lcm, range(1, 21), 20)
print(result)
assert result == 232792560
| 3.40625 | 3 |
001~010/009.py | alstn2468/Kakao_Algorithm_Walk | 0 | 12760645 | # 009.py
리스트 = [i for i in range(10)]
for i in 리스트 :
print(i)
'''
1
2
3
4
5
6
7
8
9
'''
for i in 'kimminsu' :
print(i)
'''
k
i
m
m
i
n
s
u
'''
| 3.234375 | 3 |
examples/05-payments-history.py | elcolumbio/mollie-api-python | 0 | 12760646 | <filename>examples/05-payments-history.py
# Example: How to retrieve your payments history.
#
import os
from mollie.api.client import Client
from mollie.api.error import Error
def main():
try:
#
# Initialize the Mollie API library with your API key.
#
# See: https://www.mollie.com/dashboard/settings/profiles
#
api_key = os.environ.get('MOLLIE_API_KEY', 'test_test')
mollie_client = Client()
mollie_client.set_api_key(api_key)
#
# Get the first page of payments for this API key ordered by newest.
#
payments = mollie_client.payments.list()
body = ''
if not len(payments):
body += '<p>You have no payments. You can create one from the examples.</p>'
return body
body += '<p>Showing the first page of payments for this API key</p>'
for payment in payments:
body += "{curr} {value}, status: '{status}'<br>".format(
curr=payment.amount['currency'], value=payment.amount['value'], status=payment.status)
return body
except Error as err:
return 'API call failed: {error}'.format(error=err)
if __name__ == '__main__':
print(main())
| 2.96875 | 3 |
libtc/__init__.py | JohnDoee/libtc | 8 | 12760647 | <reponame>JohnDoee/libtc<filename>libtc/__init__.py<gh_stars>1-10
from .bencode import bdecode, bencode
from .clients import *
from .exceptions import FailedToExecuteException, LibTorrentClientException
from .management import move_torrent
from .torrent import *
from .utils import TorrentProblems
__version__ = "1.1.1"
__all__ = [
"DelugeClient",
"RTorrentClient",
"TransmissionClient",
"QBittorrentClient",
"LilTorrentClient",
"FakeClient",
"TORRENT_CLIENT_MAPPING",
"TorrentData",
"TorrentState",
"TorrentFile",
"bencode",
"bdecode",
"LibTorrentClientException",
"FailedToExecuteException",
"move_torrent",
"parse_libtc_url",
"TorrentProblems",
]
| 1.625 | 2 |
communication/logging/Logger.py | vladimir2240/orders_searcher | 0 | 12760648 | <reponame>vladimir2240/orders_searcher<filename>communication/logging/Logger.py
import logging
_log_format = f"%(asctime)s - [%(levelname)s] - %(name)s: %(message)s"
def get_stream_handler():
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(logging.Formatter(_log_format))
return stream_handler
def get_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.INFO)
logger.addHandler(get_stream_handler())
return logger | 2.5625 | 3 |
day04/2.py | lvrcek/advent-of-code-2020 | 2 | 12760649 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Advent of Code 2020
Day 4, Part 2
"""
class Passport:
def __init__(self, byr=-1, iyr=-1, eyr=-1, hgt=-1, hfc=-1, ecl=-1, pid=-1, cid=-1):
self.fields = {
'byr': byr,
'iyr': iyr,
'eyr': eyr,
'hgt': hgt,
'hcl': hfc,
'ecl': ecl,
'pid': pid,
'cid': cid,
}
def parse(line):
fields = line.split()
field_dict = {}
for field in fields:
key1, value1 = field.split(':')
field_dict[key1] = value1
return field_dict
def is_invalid(key, value):
if key == 'byr' and (int(value) < 1920 or int(value) > 2002):
return True
if key == 'iyr' and (int(value) < 2010 or int(value) > 2020):
return True
if key == 'eyr' and (int(value) < 2020 or int(value) > 2030):
return True
if key == 'hgt':
if value[-2:] == 'cm':
if int(value[:-2]) < 150 or int(value[:-2]) > 193:
return True
elif value[-2:] == 'in':
if int(value[:-2]) < 59 or int(value[:-2]) > 76:
return True
else:
return True
if key == 'hcl':
if value[0] != '#':
return True
value = value[1:]
if len(value) != 6:
return True
for c in value:
if not (c.isdigit() or c in 'abcdef'):
return True
if key == 'ecl':
if value not in ('amb', 'blu', 'brn', 'grn', 'gry', 'hzl', 'oth'):
return True
if key == 'pid':
if len(value) != 9:
return True
return False
def main():
with open('in.txt') as f:
lines = f.readlines()
counter = 0
total_passports = 0
passport = Passport()
for line in lines:
line = line.strip()
if len(line) == 0:
for key, value in passport.fields.items():
if key != 'cid' and value == -1 or is_invalid(key, value):
counter += 1
break
total_passports += 1
passport = Passport()
d = parse(line)
for key, value in d.items():
passport.fields[key] = value
print(total_passports - counter)
if __name__ == '__main__':
main()
| 3.265625 | 3 |
AULA9.py | ViniciusMottag/Aulas_LP2 | 0 | 12760650 | <reponame>ViniciusMottag/Aulas_LP2
class Pessoa:
def __init__(self, nome, email, celular):
self.nome = nome
self.email = email
self.celular = celular
def get_nome(self):
return f"Caro(a) {self.nome}"
def get_email(self):
return(self.email)
def get_celular(self):
return(self.celular)
def set_nome(self, nome):
self.nome = nome
def set_email(self, email):
self.email = email
def set_celular(self, celular):
self.celular = celular
class Aluno(Pessoa):
def __init__(self, nome, email, celular, sigla, m, r, d=[]):
super().__init__(nome, email, celular)
self.sigla = sigla
self.mensalidade = m
self.ra = r
self.disciplina = d
def __repr__(self):
return 'Aluno: %s | Curso: %s' %(self.nome,self.sigla)
###Getando os atrigutos (Devolvendo os valores deles)
def get_sigla(self):
return(f"{self.sigla[0]},{self.sigla[1]},{self.sigla[2]}")
def get_mensalidade(self):
return 'R$ %.2f' %self.mensalidade
def get_ra(self):
return self.ra
def get_disciplina(self):
return self.disciplina
#### setando os atributos ( Dando valor a eles.)
def set_disciplina(self, m):
self.disciplina.append(m)
def set_mensalidade(self, mensalidade):
self.mensalidade = mensalidade
def set_sigla(self, sigla):
self.sigla = sigla
def set_ra(self, ra):
self.ra = ra
class Professor(Pessoa):
def __init__(self, nome, email, celular, v):
super().__init__(nome, email, celular)
self.valor_hora = float(v)
def __repr__(self):
return 'Prof: %s ' %(self.nome)
def get_nome(self):
return f"Mestre {self.nome}"
def get_valor_hora(self):
return self.valor_hora
def set_valor_hora(self, valor):
self.valor_hora = float(valor)
aluno = Aluno("tuiu","<EMAIL>",11961015070,"ADS",690.00,1800648)
print(aluno)
print(aluno.get_nome())
aluno.set_nome("<NAME>")
print(aluno.get_nome())
aluno1 = Aluno("tuiu","<EMAIL>",11961015070,"ADS",690.00,1800648,["logica","matematica"])
print(aluno1.get_disciplina())
aluno1.set_disciplina("aleluia")
print(aluno1.get_disciplina())
prof = Professor("ilana","<EMAIL>",156,150)
print (prof)
print(prof.get_nome())
| 3.671875 | 4 |