code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from datetime import datetime
from parsedatetime import parsedatetime, Constants
from scrapy import signals
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import NotConfigured, IgnoreRequest
from scrapy.utils.misc import load_object
class HistoryMiddleware(object):
DATE_FORMAT = '%Y%m%d'
def __init__(self, crawler):
self.stats = crawler.stats
settings = crawler.settings
history = settings.get('HISTORY', None)
if not history:
raise NotConfigured()
# EPOCH:
# == False: don't retrieve historical data
# == True : retrieve most recent version
# == datetime(): retrieve next version after datetime()
self.epoch = self.parse_epoch(settings.get('EPOCH', False))
self.retrieve_if = load_object(history.get(
'RETRIEVE_IF', 'history.logic.RetrieveNever'))(settings)
self.store_if = load_object(history.get(
'STORE_IF', 'history.logic.StoreAlways'))(settings)
self.storage = load_object(history.get(
'BACKEND', 'history.storage.S3CacheStorage'))(settings)
self.ignore_missing = settings.getbool('HTTPCACHE_IGNORE_MISSING')
dispatcher.connect(self.spider_opened, signal=signals.spider_opened)
dispatcher.connect(self.spider_closed, signal=signals.spider_closed)
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def spider_opened(self, spider):
self.storage.open_spider(spider)
self.store_if.spider_opened(spider)
self.retrieve_if.spider_opened(spider)
def spider_closed(self, spider):
self.storage.close_spider(spider)
self.store_if.spider_closed(spider)
self.retrieve_if.spider_closed(spider)
def process_request(self, request, spider):
"""
A request is approaching the Downloader.
Decide if we would like to intercept the request and supply a
response ourselves.
"""
if self.epoch and self.retrieve_if(spider, request):
request.meta['epoch'] = self.epoch
response = self.storage.retrieve_response(spider, request)
if response:
response.flags.append('historic')
return response
elif self.ignore_missing:
raise IgnoreRequest("Ignored; request not in history: %s" % request)
def process_response(self, request, response, spider):
"""
A response is leaving the Downloader. It was either retreived
from the web or from another middleware.
Decide if we would like to store it in the history.
"""
if self.store_if(spider, request, response):
self.storage.store_response(spider, request, response)
self.stats.set_value('history/cached', True, spider=spider)
return response
def parse_epoch(self, epoch):
"""
bool => bool
datetime => datetime
str => datetime
"""
if isinstance(epoch, bool) or isinstance(epoch, datetime):
return epoch
elif epoch == 'True':
return True
elif epoch == 'False':
return False
try:
return datetime.strptime(epoch, self.DATE_FORMAT)
except ValueError:
pass
parser = parsedatetime.Calendar(Constants())
time_tupple = parser.parse(epoch) # 'yesterday' => (time.struct_time, int)
if not time_tupple[1]:
raise NotConfigured('Could not parse epoch: %s' % epoch)
time_struct = time_tupple[0] #=> time.struct_time(tm_year=2012, tm_mon=4, tm_mday=7, tm_hour=22, tm_min=8, tm_sec=6, tm_wday=5, tm_yday=98, tm_isdst=-1)
return datetime(*time_struct[:6]) #=> datetime.datetime(2012, 4, 7, 22, 8, 6)
|
playandbuild/scrapy-history-middleware
|
history/middleware.py
|
Python
|
mit
| 3,854
|
import unittest
from contented.app import Application
class AppTests(unittest.TestCase):
def test_load_app(self):
app = Application({})
self.assertTrue(hasattr(app, "settings"))
self.assertTrue(hasattr(app, "content_map"))
self.assertTrue(hasattr(app, "request_processors"))
|
elbeanio/contented
|
test/app.py
|
Python
|
mit
| 312
|
# -*- coding: utf-8 -*-
import pathlib
from typing import Union
import lxml.etree
def save_as_xml(
element_tree: Union[lxml.etree._Element, lxml.etree._ElementTree],
filepath: Union[str, pathlib.Path],
pretty_print: bool = True) -> None:
"""save ElementTree in the file as XML
Args:
element_tree (lxml.etree._ElementTree): the ElementTree to be save.
filepath (str, pathlib.Path): The path of the File to be output as XML.
pretty_print (bool) optional:
The Argument of lxml.etree.tostring.
Defaults to True.
"""
if not isinstance(filepath, pathlib.Path):
filepath = pathlib.Path(filepath)
with filepath.open(mode='w', encoding='utf-8', newline='') as file:
file.write(lxml.etree.tostring(
element_tree,
encoding='utf-8',
pretty_print=pretty_print,
xml_declaration=True).decode('utf-8'))
|
085astatine/togetter
|
togetter/xml_tools.py
|
Python
|
mit
| 972
|
# coding=UTF-8
import mysql.connector
import xlrd
import xlsxwriter
import os
from mysql.connector import errorcode
from datetime import datetime
# 符号化后的 Excel 文件名
EXCEL_NAME = '20170223_4.0.1_feedback_result_py'
DB_NAME = 'zl_crash'
config = {
'user': 'root',
'password': '123456',
'host': '127.0.0.1',
'database': 'zl_crash',
}
class Report(object):
'''
Report class used to encapsulate the row data in EXCEL
'''
def __init__(self, report_id, exception_type, device_id, exception_symbols, os_version):
self.report_id = report_id;
self.exception_type = exception_type;
self.device_id = device_id;
self.exception_symbols = exception_symbols;
self.os_version = os_version;
def main():
begin_time = datetime.now()
# 表名
table_name = 'report_' + begin_time.strftime("%Y_%m_%d_%H_%M_%S")
# 建表
create_table_in_db(table_name)
# 插入数据
insert_symbolication_result_into_db(table_name)
# 对数据进行分组并导出
generate_grouped_exception(table_name)
end_time = datetime.now()
print('耗时:' + str(end_time - begin_time))
def create_table_in_db(table_name):
'''
Create a table in database, and named as `table_name`
:param table_name: table_name
'''
SQLS = {}
SQLS['drop_report'] = (
"DROP TABLE IF EXISTS `" + table_name + "`")
SQLS['report'] = (
"CREATE TABLE `" + table_name + "` ( "
"`report_id` int(11) NOT NULL AUTO_INCREMENT, "
"`exception_type` varchar(255) DEFAULT NULL, "
"`device_id` varchar(255) DEFAULT NULL, "
"`exception_symbols` longtext, "
"`os_version` varchar(255) DEFAULT NULL, "
"PRIMARY KEY (`report_id`)"
") ENGINE=InnoDB DEFAULT CHARSET=utf8")
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor();
for name, sql in SQLS.items():
try:
print("Executing sql {}.".format(name))
cursor.execute(sql)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print('Table already exists.')
else:
print(err.msg)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def insert_symbolication_result_into_db(table_name):
'''
Insert the symbolicated result into database
:param table_name: table_name in database
'''
try:
conn = mysql.connector.connect(**config)
# print('connected to db')
cursor = conn.cursor()
insert_report = (
"INSERT INTO " + table_name + " "
"(exception_type, device_id, exception_symbols, os_version) "
"VALUES (%s, %s, %s, %s)")
work_book = xlrd.open_workbook(EXCEL_NAME + '.xlsx')
sheet = work_book.sheets()[0]
nrows = sheet.nrows
ncols = sheet.ncols
row_index = 1
for row_index in range(1, nrows):
data_row = sheet.row_values(row_index)
# assert col < ncols
device_id = data_row[0]
os_version = data_row[1]
exception_type = data_row[2]
exception_symbols = data_row[3]
if exception_symbols == '':
continue
data_report = (exception_type, device_id, exception_symbols, os_version)
# insert report data
cursor.execute(insert_report, data_report)
conn.commit()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def generate_grouped_exception(table_name):
'''
According the group data in database, make all exception to group data.
:param table_name: table_name in zl_crash database
'''
EXCEPTION_TYPE_COUNT = {}
EXCEPTION_MAPPING = {}
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor()
group_exception_type = (
"SELECT exception_type, COUNT(*) as nums "
"FROM " + table_name + " GROUP BY exception_type")
query_specific_exception = (
"SELECT * FROM " + table_name + " "
"WHERE exception_type = %s")
cursor.execute(group_exception_type)
for (exception_type, nums) in cursor:
EXCEPTION_TYPE_COUNT[exception_type] = nums
# print("exception_type:" + exception_type + ", nums:" + str(nums))
for exception_type in EXCEPTION_TYPE_COUNT.keys():
cursor.execute(query_specific_exception, (exception_type,))
exception_list = []
for (report_id, exception_type, device_id, exception_symbols, os_version) in cursor:
report = Report(report_id, exception_type, device_id, exception_symbols, os_version)
exception_list.append(report)
EXCEPTION_MAPPING[exception_type] = exception_list
write_grouped_exception_to_file(EXCEPTION_TYPE_COUNT, EXCEPTION_MAPPING)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err.msg)
finally:
cursor.close()
conn.close()
def write_grouped_exception_to_file(count, mapping):
'''
Export grouped exception to file
:param count: 字典 key:exception_type value:count
:param mapping: 字典 key:exception_type value:exception_list
'''
output_file_name = EXCEL_NAME + '_grouped.xlsx'
os.system('rm -rf ' + output_file_name)
workbook = xlsxwriter.Workbook(output_file_name)
worksheet = workbook.add_worksheet()
# 设置列宽
worksheet.set_column('A:A', 25)
worksheet.set_column('B:B', 10)
worksheet.set_column('C:C', 25)
worksheet.set_column('D:D', 40)
worksheet.set_column('E:E', 500)
# 粗体格式
bold = workbook.add_format({'font_size': 14,
'align': 'center',
'bold': True})
# 标题行
worksheet.write('A1', 'exception_type', bold)
worksheet.write('B1', 'count', bold)
worksheet.write('C1', 'os_version', bold)
worksheet.write('D1', 'device_id', bold)
worksheet.write('E1', 'symbols', bold)
# 写入 Excel Index 指示器
row_index = 1
col_index = 0
colors = ('#A8BAAA', '#FFF6CF', '#DCCDAE', '#B49D7E',
'#816854', '#334D5C', '#45B29D', '#EFC94C')
count_index = 0
pattern = 0.5
for (type, num) in count.items():
bg_color = colors[count_index % len(colors)]
col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color})
num_col_format = workbook.add_format({'pattern': pattern,
'bg_color': bg_color,
'bold': True,
'align': 'center'})
count_index += 1
list = mapping[type]
for i in range(num):
report_item = list[i]
if i == 0:
worksheet.write(row_index, col_index, report_item.exception_type, col_format)
col_index += 1
worksheet.write(row_index, col_index, num, num_col_format)
col_index += 1
else:
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, '', col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.os_version, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.device_id, col_format)
col_index += 1
worksheet.write(row_index, col_index, report_item.exception_symbols, col_format)
# 设置 index
row_index += 1
col_index = 0
# 关闭文件
workbook.close()
print("Exporting grouped data to " + output_file_name)
if __name__ == '__main__':
main()
|
renguochao/PySymTool
|
py_group.py
|
Python
|
mit
| 8,872
|
from ._excel import ExcelXlsTableWriter, ExcelXlsxTableWriter
from ._pandas import PandasDataFramePickleWriter
from ._sqlite import SqliteTableWriter
|
thombashi/pytablewriter
|
pytablewriter/writer/binary/__init__.py
|
Python
|
mit
| 150
|
import cookielib
import urllib2
import urllib
import json
import time
#Default Settings for a system to keep cookies, please add it before testing
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
# Let the single cookie system override the current openurl
#Cookies Info
def INITCOOKIES():
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
def COOKIE_TO_FILE(filename):
cookie = cookielib.MozillaCookieJar(filename)
handler = urllib2.HTTPCookieProcessor(cookie)
opener = urllib2.build_opener(handler)
response = opener.open("http://www.baidu.com")
cookie.save(ignore_discard=True, ignore_expires=True)
def FILE_TO_COOKIE(filename):
cookie = cookielib.MozillaCookieJar()
#Initiallize the Cookies environment
cookie.load(filename, ignore_discard=True, ignore_expires=True)
req = urllib2.Request("http://www.baidu.com")
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
#opener are similar to urlopen and have the equal functionalities
response = opener.open(req)
print response.read()
#Helper Functions
def get_current_time():
return time.strftime("%F %T" , time.localtime() )
def title_exporter(dictionary):
my_dict_titles = []
try:
my_dict_titles.append(dictionary.keys())
for key in dictionary:
if isinstance(dictionary[key], dict):
my_dict_titles.append(title_exporter(dictionary[key]))
if isinstance(dictionary[key], list):
if len(dictionary[key]):
if isinstance(dictionary[key][0],dict):
my_dict_titles.append(title_exporter(dictionary[key][0]))
except:
if isinstance(dictionary, list):
my_dict_titles.append(title_exporter(dictionary[0]))
#print my_dict_titles
return my_dict_titles
#Major Functions
def POST(url, data, header_type = "application/json", encodejson = True):
if encodejson:
data = json.dumps(data)
req = urllib2.Request(url, data)
else:
data = urllib.urlencode(data)
req = urllib2.Request(url, data)
req.add_header('Content-Type', header_type)
source_code = ErrorOut(req)
try:
print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
#print source_code
return source_code
def GET(url,data = "", header_type = "application/json",encodejson = False):
if encodejson:
data = json.dumps(data)
else:
data = urllib.urlencode(data)
geturl = url + "?" + data
req = urllib2.Request(geturl)
req.add_header('Content-Type', header_type)
source_code = ErrorOut(req)
try:
print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
#print source_code
return source_code
def DELETE(url,data = "", header_type = "application/json",encodejson = True):
#Without Functionality test
if encodejson:
data = json.dumps(data)
else:
data = urllib.urlencode(data)
req = urllib2.Request(url, data)
req.add_header('Content-Type', header_type)
req.get_method = lambda: 'DELETE'
source_code = ErrorOut(req)
try:
print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
return source_code
def PUT(url,data = "", header_type = "application/json",encodejson = True):
if encodejson:
data = json.dumps(data)
else:
data = urllib.urlencode(data)
req = urllib2.Request(url, data)
req.add_header('Content-Type', header_type)
req.get_method = lambda: 'PUT'
resp = ErrorOut(req)
source_code = ErrorOut(req)
try:
print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
return source_code
def ErrorOut(req):
#Later for using in the test
try:
resp = urllib2.urlopen(req)
#print "Passed Basic Access!"
return resp.read()
except urllib2.URLError, e:
if hasattr(e,"code"):
print e.code
if hasattr(e,"reason"):
print e.reason
else:
print "OK"
return None
class News:
def __init__(self):
self.url_manager = []
self.iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
self.newsurl = "http://testwww.iquicker.com.cn/iquicker_web/newstype/datas"
self.all_news = "http://testwww.iquicker.com.cn/iquicker_web/news/datas"
self.news_id = "http://testwww.iquicker.com.cn/iquicker_web/news/data/num"
self.news_root = "http://testwww.iquicker.com.cn/iquicker_web/news/data/"
self.id_info = ""
self.data_manager = []
self.my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
self.all_news_data = {"pageNo" : 1, "pageSize" : 20, "sortInfo" : "DESC_isUp_isUpTime_publishTime"}
def Login_to_system(self):
print "in Login System..."
POST(self.iQuickerUrl, self.my_dict)
def get_news_type(self):
print "in News Type..."
GET(self.newsurl)
def get_news_data(self):
print "in news Data..."
Dict = GET(self.all_news, self.all_news_data)
Dict = json.loads(Dict)
#print Dict
Dict = Dict['data']['list'][0]
self.id_info = Dict['id']
def get_news_id(self):
print "in news id..."
GET(self.news_root + str(self.id_info))
'''
My_news = News()
My_news.Login_to_system()
My_news.get_news_data()
My_news.get_news_id()
My_news.get_news_type()
'''
'''
iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
POST(iQuickerUrl, my_dict)
newsurl = "http://testwww.iquicker.com.cn/iquicker_web/newstype/datas"
GET(newsurl)
all_news = "http://testwww.iquicker.com.cn/iquicker_web/news/datas"
all_news_data = {"pageNo" : 1, "pageSize" : 20, "sortInfo" : "DESC_isUp_isUpTime_publishTime"}
news_data_dict = GET(all_news, all_news_data)
news_id = "http://testwww.iquicker.com.cn/iquicker_web/news/data/num"
GET(news_id)
news_data_dict = json.loads(news_data_dict)
#print news_data_dict
news_data_dict = news_data_dict['data']['list'][0]
news_data_dict = news_data_dict['id']
news1 = "http://testwww.iquicker.com.cn/iquicker_web/news/data/"+str(news_data_dict)
GET(news1)
'''
class Tasks:
def __init__(self):
self.iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
self.personal_info = "http://testwww.iquicker.com.cn/iquicker_web/login/user"
self.get_user_data = "http://testwww.iquicker.com.cn/iquicker_web/mobile/ad_books"
self.task_info = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/"
self.discuss_on_tasks = "http://testwww.iquicker.com.cn/iquicker_web/discuss/data"
self.discuss_list = "http://testwww.iquicker.com.cn/iquicker_web/discusslist/data/"
self.function_name = {"login" : 1, "Get Personal Data" : 2, "Get Name List" : 3, "Get Unfinished" : 4, "Get Finished" : 5,
"Label Finished" : 6, "Label UnFinished" : 7, "Post Tasks" : 8, "Modify Task" : 9, "Delete Task" : 10,
"Comment on Task" : 11, "Discuss List" : 12}
self.basic_struct = ['status', 'message', 'data', 'success']
self.error_count = []
self.url_list = []
self.port_type_warning = []
self.times = 0
self.id_book = []
self.name_book = []
self.Task_id = []
self.Finished_Task_id = []
self.my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
def login(self):
print "in Login System..."
template = [[u'status', u'message', u'data', u'success'], [[u'orgs', u'initialised']]]
self.determine_error(POST(self.iQuickerUrl, self.my_dict), "login",template, self.iQuickerUrl)
self.times += 1
def get_personal_data(self):
print "Fetching personal info..."
template = [[u'status', u'success', u'orgName', u'orgLogoWhite', u'orgLogoColour',
u'orgInnerEmailStatus', u'theme', u'orgCode', u'message', u'data'],
[[u'hometown', u'idcard', u'bankCard', u'telephone', u'statusReason',
u'sex', u'pinyinPrefix', u'id', u'innerEmail', u'img', u'innerEmailContact',
u'joindate', u'department', u'shortname', u'type', u'email', u'status', u'fax',
u'isTrialAccount', u'pinyin', u'qualifications', u'birthday', u'address', u'org',
u'createTime', u'itcode', u'name', u'mobile', u'prefixId', u'sn', u'signature',
u'position', u'joinTime', u'enname'],
[[u'org', u'subDept', u'id', u'deptManager', u'parDept', u'flag2', u'shortname',
u'status', u'usable', u'flag', u'zfield9', u'zfield8', u'zfield5', u'zfield4',
u'zfield7', u'zfield6', u'zfield1', u'zfield3', u'zfield2', u'name', u'zfield10',
u'prefixId', u'sn', u'root']]]]
self.determine_error(GET(self.personal_info), "Get Personal Data",template, self.personal_info)
self.times += 1
def get_name_list(self):
print "Fetching Namelist..."
user_data = GET(self.get_user_data)
template = [[[u'tel', u'uuid', u'mobile', u'piny', u'position', u'deptname', u'id', u'name']]]
self.determine_error(user_data, "Get Name List", template, self.get_user_data)
user_data = json.loads(user_data)
for i in range(len(user_data)):
self.id_book.append(user_data[i]['uuid'])
self.name_book.append(user_data[i]['name'])
self.times += 1
def get_unfinished(self):
print "Fetching Unfinished task id..."
self.Task_id = []
Data_tasks = {"isOver" : False, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(self.task_info,Data_tasks)
template = [[u'status', u'message', u'data', u'success'],
[[u'sort', u'last', u'size', u'number', u'content', u'totalPages', u'first',
u'totalElements', u'numberOfElements'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName', u'subject', u'write',
u'overStatus', u'createDate', u'detail', u'priority', u'participants', u'publishScope',
u'shared', u'principals', u'overDate', u'readRight', u'createUser', u'org',
u'labelObjectList', u'createName', u'shareUserIds', u'writeRight', u'attList'],
[[u'id', u'name']], [[u'id', u'name']]]]]
self.determine_error(Task_info, "Get Unfinished", template, self.task_info)
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
self.Task_id.append(Task_info[i]['id'])
self.times += 1
def get_finished(self):
print "Fetching finished task id..."
self.Finished_Task_id = []
Data_tasks = {"isOver" : True, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(self.task_info,Data_tasks)
template = [[u'status', u'message', u'data', u'success'],
[[u'sort', u'last', u'size', u'number', u'content', u'totalPages', u'first',
u'totalElements', u'numberOfElements'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName', u'subject', u'write',
u'overStatus', u'createDate', u'detail', u'priority', u'participants', u'publishScope',
u'shared', u'principals', u'overDate', u'readRight', u'createUser', u'org',
u'labelObjectList', u'createName', u'shareUserIds', u'writeRight', u'attList'],
[[u'id', u'name']], [[u'id', u'name']]]]]
result = self.determine_error(Task_info, "Get Finished", template, self.task_info)
title_exporter(json.loads(result))
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
self.Finished_Task_id.append(Task_info[i]['id'])
self.times += 1
def Label_finished(self):
print "Label Unfinished Task->Finished"
self.get_unfinished()
Label_url = self.task_info + str(self.Task_id[-1]) + "/completion"
template = [[u'status', u'message', u'success']]
self.determine_error(PUT(Label_url), "Label Finished", template, Label_url)
self.times += 1
def Label_unfinished(self):
print "Label Finished Task->Unfinished"
self.get_finished()
#print self.Finished_Task_id
Label_url = self.task_info + str(self.Finished_Task_id[-1]) + "/incompletion"
template = [[u'status', u'message', u'success']]
self.determine_error(PUT(Label_url), "Label UnFinished", template, Label_url)
self.times += 1
def post_task(self):
print "Posting new task now..."
Post_kernel = {"subject" : "RobotSend", "principals" : [{"id": self.id_book[0] , "name": self.name_book[0]}], "participants":[{"id": self.id_book[0] , "name": self.name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Test Message" , "shared" : False , "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
template = [[u'status', u'message', u'data', u'success'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName',
u'subject', u'write', u'overStatus', u'createDate',
u'detail', u'priority', u'participants', u'publishScope',
u'shared', u'principals', u'overDate', u'readRight', u'createUser',
u'org', u'labelObjectList', u'createName', u'shareUserIds', u'writeRight',
u'attList'], [[u'id', u'name']], [[u'id', u'name']]]]
self.determine_error(POST(self.task_info,Post_kernel), "Post Tasks", template, self.task_info)
self.times += 1
def modify_task(self):
print "Modifying new task now..."
self.get_unfinished()
Post_kernel = {"id": self.Task_id[-1], "subject" : "RobotSendModify", "principals" : [{"id": self.id_book[0] , "name": self.name_book[0]}], "participants":[{"id": self.id_book[0] , "name": self.name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Modified Message" , "shared" : False ,"attList": None, "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
template = [[u'status', u'message', u'data', u'success'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName',
u'subject', u'write', u'overStatus', u'createDate', u'detail',
u'priority', u'participants', u'publishScope', u'shared',
u'principals', u'overDate', u'readRight', u'createUser',
u'org', u'labelObjectList', u'createName', u'shareUserIds',
u'writeRight', u'attList'], [[u'id', u'name']], [[u'id', u'name']]]]
self.determine_error(POST(self.task_info,Post_kernel), "Modify Task", template, self.task_info)
self.times += 1
def delete_task(self):
print "deleting task now..."
self.get_unfinished()
Delete_url = self.task_info + str(self.Task_id[-1])
template = [[u'status', u'message', u'success']]
self.determine_error(DELETE(Delete_url), "Delete Task", template, Delete_url)
self.times += 1
def commment_on_tasks(self):
print "Posting discuss on the Tasks..."
self.get_unfinished()
kernel = {"discussType":"task","masterId":self.Task_id[-1],"discussedId":"","discussedUserId":"","discussedUserName":"","content":"Looks nice!","isFile":"N","publishTime":get_current_time(),"relay":0,"attList":[],"userIdNames":""}
template = [[u'status', u'message', u'data', u'success'],
[[u'storePeopleId', u'userId', u'relayTimes', u'id', u'publishScopeName',
u'write', u'createDate', u'goodPeopleId', u'content', u'publishScope',
u'type', u'discussList', u'companyName', u'deleted', u'readRight', u'createUser',
u'org', u'userName', u'shareUserIds', u'publishTime', u'writeRight', u'attList']]]
self.determine_error(POST(self.discuss_on_tasks, kernel), "Comment on Task", template, self.discuss_on_tasks)
self.times += 1
def get_disscuss_list(self):
print "Get all Discuss data for a task..."
self.get_unfinished()
url = self.discuss_list + self.Task_id[-1]
template = [[u'status', u'message', u'data', u'success'],
[[u'discussList', u'createDate', u'shareUserIds', u'write',
u'masterId', u'readRight', u'writeRight', u'createUser',
u'org', u'id', u'attList'],
[[u'userName', u'content', u'discussedUserId', u'userIdNames',
u'relay', u'createDate', u'isFile', u'shareUserIds',
u'publishScope', u'userId', u'publishTime', u'discussedId',
u'write', u'masterId', u'readRight', u'writeRight', u'createUser',
u'org', u'discussedUserName', u'id', u'attList']]]]
self.determine_error(GET(url), "Discuss List", template, url)
self.times += 1
def determine_error(self,data, name, template=[], url=""):
result = True
try:
result = json.loads(data)['success']
except:
print "There is no success options"
self.port_type_warning.append(self.function_name[name])
if (data == None or not result or template != title_exporter(json.loads(data))):
self.error_count.append(self.function_name[name])
self.url_list.append(url)
return data
def show_off_all_data(self):
print "................................................"
print "Function runs: " + str(self.times) + " times"
print "Error Counts: " + str(len(self.error_count)) + " times"
print "Failure in: " + str(self.error_count)
print "Not supported port: " + str(self.port_type_warning)
print "Failure Url:"
for url in self.url_list:
print url
print "Please check the dictionary for more information"
My_task = Tasks()
My_task.login() == None
My_task.get_personal_data()
My_task.get_name_list()
My_task.post_task()
My_task.commment_on_tasks()
My_task.modify_task()
My_task.Label_finished()
My_task.Label_unfinished()
My_task.delete_task()
My_task.get_disscuss_list()
My_task.show_off_all_data()
'''
#Login the system
iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
POST(iQuickerUrl, my_dict)
#personal_info
get_personal = "http://testwww.iquicker.com.cn/iquicker_web/login/user"
GET(get_personal)
#Get_user_data
Get_user_data = "http://testwww.iquicker.com.cn/iquicker_web/mobile/ad_books"
user_data = GET(Get_user_data)
user_data = json.loads(user_data)
id_book = []
name_book = []
for i in range(len(user_data)):
id_book.append(user_data[i]['uuid'])
name_book.append(user_data[i]['name'])
#Get Unfinished Task List
Tasks_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Data_tasks = {"isOver" : False, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(Tasks_url,Data_tasks)
Task_id = []
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
Task_id.append(Task_info[i]['id'])
#Get Finished Task List
Tasks_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Data_tasks = {"isOver" : True, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(Tasks_url,Data_tasks)
Finished_Task_id = []
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
Finished_Task_id.append(Task_info[i]['id'])
#Start Posting Tasks!
Post_task_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Post_kernel = {"subject" : "RobotSend", "principals" : [{"id": id_book[0] , "name": name_book[0]}], "participants":[{"id": id_book[0] , "name": name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Test Message" , "shared" : False , "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
POST(Post_task_url,Post_kernel)
#Start Modifying Tasks!
Post_task_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Post_kernel = {"id": Task_id[-1], "subject" : "RobotSendModify", "principals" : [{"id": id_book[0] , "name": name_book[0]}], "participants":[{"id": id_book[0] , "name": name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Modified Message" , "shared" : False ,"attList": None, "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
POST(Post_task_url,Post_kernel)
#Label_finished
Label_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/" + str(Task_id[-1]) + "/completion"
PUT(Label_url)
#Label_Unfinished
UnLabel_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/" + str(Finished_Task_id[-1]) + "/incompletion"
PUT(Label_url)
#Start Delete the Task!
Delete_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/" + str(Task_id[-1])
#go_To_TASK
DELETE(Delete_url)
'''
#for item in cj:
# print 'Name = '+item.name
# print 'Value = '+item.value
|
lanking520/Digital_China
|
backup1.py
|
Python
|
mit
| 22,145
|
# -*- coding: utf-8 -*-
from .httpclient import HTTPClient
from .models import Video, Show
__all__ = ['Funimation']
class Funimation(object):
def __init__(self):
super(Funimation, self).__init__()
self.http = HTTPClient('http://www.funimation.com/',
[('User-Agent', 'Sony-PS3')])
# defaults to the free account user
# hmm... the API doesn't appear to validate the users subscription
# level so if this was changed you might be able to watch
# the paid videos ;)
# FunimationSubscriptionUser = paid account
# FunimationUser = free account
self.user_type = 'FunimationSubscriptionUser'
def get_shows(self, limit=3000, offset=0, sort=None, first_letter=None,
filter=None):
query = self._build_query(locals())
return self._request('feeds/ps/shows', query)
def get_videos(self, show_id, limit=3000, offset=0):
query = self._build_query(locals())
request = self._request('feeds/ps/videos', query)
for req in request:
# Replace get params with the mobile one
# This lets any IP (not only server IP) access content
req.video_url = req.video_url.split('?')[0]+'?9b303b6c62204a9dcb5ce5f5c607'
video_split = req.video_url.split(',')
split_len = len(video_split)
req.video_url = video_split[0]+video_split[split_len-2]+video_split[split_len-1]
return request
def get_featured(self, limit=3000, offset=0):
query = self._build_query(locals())
return self._request('feeds/ps/featured', query)
def search(self, search):
query = self._build_query(locals())
return self._request('feeds/ps/search', query)
def get_latest(self, limit=3000, offset=0):
if self.user_type == 'FunimationSubscriptionUser':
sort = 'SortOptionLatestSubscription'
else:
sort = 'SortOptionLatestFree'
return self.get_shows(limit, offset, sort)
def get_simulcast(self, limit=3000, offset=0):
return self.get_shows(limit, offset, filter='FilterOptionSimulcast')
def get_genres(self):
# we have to loop over all the shows to be sure to get all the genres.
# use a 'set' so duplicates are ignored.
genres = set()
for show in self.get_shows():
if show.get('genres'):
[genres.add(g) for g in show.get('genres').split(',')]
return sorted(genres)
def get_shows_by_genre(self, genre):
shows = []
for show in self.get_shows():
if show.get('genres') and genre in show.get('genres').split(','):
shows.append(show)
return shows
def _request(self, uri, query):
res = self.http.get(uri, query)
if 'videos' in res:
return [Video(**v) for v in res['videos']]
elif isinstance(res, list) and 'series_name' in res[0]:
return [Show(**s) for s in res]
else:
# search results
new_res = {}
# the result is a list when there is no episodes in the results...
if isinstance(res['episodes'], list):
new_res['episodes'] = []
else:
new_res['episodes'] = [Video(**v) for v in
res['episodes']['videos']]
new_res['shows'] = [Show(**s) for s in res['shows']]
return new_res
def _build_query(self, params):
if params is None:
params = {}
else:
params['first-letter'] = params.pop('first_letter', None)
params.pop('self', None)
params.setdefault('ut', self.user_type)
return params
|
ABusers/A-Certain-Magical-API
|
funimation/api.py
|
Python
|
mit
| 3,784
|
#!/usr/bin/python -tt
from behave import *
import os
import subprocess
import glob
import re
import shutil
DNF_FLAGS = ['-y', '--disablerepo=*', '--nogpgcheck']
RPM_INSTALL_FLAGS = ['-Uvh']
RPM_ERASE_FLAGS = ['-e']
def _left_decorator(item):
""" Removed packages """
return u'-' + item
def _right_decorator(item):
""" Installed packages """
return u'+' + item
def find_pkg(pkg):
""" Find the package file in the repository """
candidates = glob.glob('/repo/' + pkg + '*.rpm')
if len(candidates) == 0:
print("No candidates for: '{0}'".format(pkg))
assert len(candidates) == 1
return candidates[0]
def decorate_rpm_packages(pkgs):
""" Converts package names like TestA, TestB into absolute paths """
return [find_pkg(p) for p in pkgs]
def get_rpm_package_list():
""" Gets all installed packages in the system """
pkgstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}\n'])
return pkgstr.splitlines()
def get_rpm_package_version_list():
""" Gets all installed packages in the system with version"""
pkgverstr = subprocess.check_output(['rpm', '-qa', '--queryformat', '%{NAME}-%{VERSION}-%{RELEASE}\n'])
return pkgverstr.splitlines()
def get_dnf_package_version_list():
""" Gets all installed packages in the system with version to check that dnf has same data about installed packages"""
pkgverstr = subprocess.check_output(['dnf', 'repoquery', '--installed', '-Cq', '--queryformat', '%{name}.%{version}.%{release}\n'])
pkgverstr = pkgverstr.splitlines()
return pkgverstr
def diff_package_lists(a, b):
""" Computes both left/right diff between lists `a` and `b` """
sa, sb = set(a), set(b)
return (map(_left_decorator, list(sa - sb)),
map(_right_decorator, list(sb - sa)))
def package_version_lists(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if x.startswith(pkg)]
assert len(found_pkgs) == 1
return str(found_pkgs[0])
def package_absence(pkg, list_ver):
""" Select package versions """
found_pkgs = [x for x in list_ver if re.search('^' + pkg, x)]
assert len(found_pkgs) == 0
return None
def execute_dnf_command(cmd, reponame):
""" Execute DNF command with default flags and the specified `reponame` enabled """
flags = DNF_FLAGS + ['--enablerepo={0}'.format(reponame)]
return subprocess.check_call(['dnf'] + flags + cmd, stdout=subprocess.PIPE)
def execute_rpm_command(pkg, action):
""" Execute given action over specified pkg(s) """
if not isinstance(pkg, list):
pkg = [pkg]
if action == "remove":
rpm_command = RPM_ERASE_FLAGS
elif action == "install":
rpm_command = RPM_INSTALL_FLAGS
pkg = decorate_rpm_packages(pkg)
return subprocess.check_call(['rpm'] + rpm_command + pkg, stdout=subprocess.PIPE)
def piecewise_compare(a, b):
""" Check if the two sequences are identical regardless of ordering """
return sorted(a) == sorted(b)
def split(pkgs):
return [p.strip() for p in pkgs.split(',')]
@given('I use the repository "{repo}"')
def given_repo_condition(context, repo):
""" :type context: behave.runner.Context """
assert repo
context.repo = repo
assert os.path.exists('/var/www/html/repo/' + repo)
for root, dirs, files in os.walk('/repo'):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
subprocess.check_call(['cp -rs /var/www/html/repo/' + repo + '/* /repo/'], shell=True)
with open('/etc/yum.repos.d/' + repo + '.repo', 'w') as f:
f.write('[' + repo + ']\nname=' + repo + '\nbaseurl=http://127.0.0.1/repo/' + repo + '\nenabled=1\ngpgcheck=0')
@when('I "{action}" a package "{pkgs}" with "{manager}"')
def when_action_package(context, action, pkgs, manager):
assert pkgs
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
if manager == 'rpm':
if action in ["install", "remove"]:
execute_rpm_command(split(pkgs), action)
else:
raise AssertionError('The action {} is not allowed parameter with rpm manager'.format(action))
elif manager == 'dnf':
if action == 'upgrade':
if pkgs == 'all':
execute_dnf_command([action], context.repo)
else:
execute_dnf_command([action] + split(pkgs), context.repo)
elif action == 'autoremove':
subprocess.check_call(['dnf', '-y', action], stdout=subprocess.PIPE)
elif action in ["install", "remove", "downgrade", "upgrade-to"]:
execute_dnf_command([action] + split(pkgs), context.repo)
else:
raise AssertionError('The action {} is not allowed parameter with dnf manager'.format(action))
else:
raise AssertionError('The manager {} is not allowed parameter'.format(manager))
@when('I execute command "{command}" with "{result}"')
def when_action_command(context, command, result):
assert command
context.pre_rpm_packages = get_rpm_package_list()
assert context.pre_rpm_packages
context.pre_rpm_packages_version = get_rpm_package_version_list()
assert context.pre_rpm_packages_version
context.pre_dnf_packages_version = get_dnf_package_version_list()
assert context.pre_dnf_packages_version
cmd_output = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
context.cmd_rc = cmd_output.returncode
if result == "success":
assert context.cmd_rc == 0
elif result == "fail":
assert context.cmd_rc != 0
else:
raise AssertionError('The option {} is not allowed option for expected result of command. '
'Allowed options are "success" and "fail"'.format(result))
@then('package "{pkgs}" should be "{state}"')
def then_package_state(context, pkgs, state):
assert pkgs
pkgs_rpm = get_rpm_package_list()
pkgs_rpm_ver = get_rpm_package_version_list()
pkgs_dnf_ver = get_dnf_package_version_list()
assert pkgs_rpm
assert context.pre_rpm_packages
removed, installed = diff_package_lists(context.pre_rpm_packages, pkgs_rpm)
assert removed is not None and installed is not None
for n in split(pkgs):
if state == 'installed':
assert ('+' + n) in installed
installed.remove('+' + n)
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'removed':
assert ('-' + n) in removed
removed.remove('-' + n)
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'absent':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_absence = package_absence(n, pkgs_rpm_ver)
assert not post_rpm_absence
post_dnf_absence = package_absence(n, pkgs_dnf_ver)
assert not post_dnf_absence
elif state == 'upgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver > pre_rpm_ver
elif state == 'unupgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver == pre_rpm_ver
elif state == 'downgraded':
pre_rpm_ver = package_version_lists(n, context.pre_rpm_packages_version)
post_rpm_ver = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_ver
assert pre_rpm_ver
assert post_rpm_ver < pre_rpm_ver
elif state == 'present':
assert ('+' + n) not in installed
assert ('-' + n) not in removed
post_rpm_present = package_version_lists(n, pkgs_rpm_ver)
assert post_rpm_present
post_dnf_present = package_version_lists(n, pkgs_dnf_ver)
assert post_dnf_present
elif state == 'upgraded-to':
assert n in package_version_lists(n, pkgs_rpm_ver)
else:
raise AssertionError('The state {} is not allowed option for Then statement'.format(state))
""" This checks that installations/removals are always fully specified,
so that we always cover the requirements/expecations entirely """
if state in ["installed", "removed"]:
assert not installed and not removed
@then('exit code of command should be equal to "{exit_code}"')
def then_package_state(context, exit_code):
exit_code = int(exit_code)
assert context.cmd_rc == exit_code
|
j-mracek/dnf_docker_test
|
features/steps/test_behave.py
|
Python
|
mit
| 9,384
|
from __future__ import unicode_literals
# Django
from django.conf import settings
from django.db import models
# 3rd Party
from grapevine import generics
from grapevine.models.base import GrapevineModel
from model_utils.managers import PassThroughManager
# Local Apps
from .querysets import WelcomeEmailQuerySet
class WelcomeEmail(generics.EmailSendable, GrapevineModel):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="welcome_emails")
try:
# In Django 1.7, this is finally addressed!
objects = WelcomeEmailQuerySet.as_manager()
except AttributeError:
# This handles Django <= 1.6
objects = PassThroughManager.for_queryset_class(WelcomeEmailQuerySet)()
class Meta:
verbose_name = "Welcome Email"
verbose_name_plural = "Welcome Emails"
def as_str(self):
return "Welcome Email to {0}".format(self.user)
def get_template_name(self):
return "emails/welcome.html"
def get_raw_subject(self):
return "Welcome to Acme Inc, {{ sendable.user }}!"
def get_recipients(self):
return {"to": [self.user.email], "bcc": ["top@secret.com"]}
def confirm_individual_sendability(self):
"""Only send Welcome Emails to users with email addresses"""
if not self.user.email:
self.cancelled_at_send_time = True
self.save()
return bool(self.user.email)
|
craiglabenz/django-grapevine
|
tests/core/models.py
|
Python
|
mit
| 1,420
|
# coding: utf-8
# Author: Vova Zaytsev <zaytsev@usc.edu>
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nlcd.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
LucidAi/nlcd
|
nlcd/wsgi.py
|
Python
|
mit
| 221
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of fish-bundles.
# https://github.com/fish-bundles/fb
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2014 Bernardo Heynemann heynemann@gmail.com
from fish_bundles.version import __version__
|
fish-bundles/fb
|
fish_bundles/__init__.py
|
Python
|
mit
| 307
|
# import the libraries that you need
import requests
import csv
# make a GET request to the OneSearch X-Service API
response = requests.get('http://onesearch.cuny.edu/PrimoWebServices'
'/xservice/search/brief?'
'&institution=KB'
'&query=any,contains,obama'
'&query=facet_rtype,exact,books'
'&loc=adaptor,primo_central_multiple_fe'
'&loc=local,scope:(KB,AL,CUNY_BEPRESS)'
'&json=true')
# take the JSON from the response
# and store it in a variable called alldata
alldata = response.json()
# drill down into a smaller subset of the json
# and print this smaller bit of json
somedata = alldata['SEGMENTS']['JAGROOT']['RESULT']['FACETLIST']['FACET']\
[1]['FACET_VALUES']
print(somedata)
# open a file called mycsv.csv, then loop through the data
# and write to that file
with open('mycsv.csv', 'wb') as f:
writer = csv.writer(f)
for x in somedata:
writer.writerow([x['@KEY'], x['@VALUE']])
|
MarkEEaton/api-workshop
|
4-json-to-csv.py
|
Python
|
mit
| 1,093
|
"""
This is pure Python implementation of comb sort algorithm.
Comb sort is a relatively simple sorting algorithm originally designed by Wlodzimierz
Dobosiewicz in 1980. It was rediscovered by Stephen Lacey and Richard Box in 1991.
Comb sort improves on bubble sort algorithm.
In bubble sort, distance (or gap) between two compared elements is always one.
Comb sort improvement is that gap can be much more than 1, in order to prevent slowing
down by small values
at the end of a list.
More info on: https://en.wikipedia.org/wiki/Comb_sort
For doctests run following command:
python -m doctest -v comb_sort.py
or
python3 -m doctest -v comb_sort.py
For manual testing run:
python comb_sort.py
"""
def comb_sort(data: list) -> list:
"""Pure implementation of comb sort algorithm in Python
:param data: mutable collection with comparable items
:return: the same collection in ascending order
Examples:
>>> comb_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> comb_sort([])
[]
>>> comb_sort([99, 45, -7, 8, 2, 0, -15, 3])
[-15, -7, 0, 2, 3, 8, 45, 99]
"""
shrink_factor = 1.3
gap = len(data)
completed = False
while not completed:
# Update the gap value for a next comb
gap = int(gap / shrink_factor)
if gap <= 1:
completed = True
index = 0
while index + gap < len(data):
if data[index] > data[index + gap]:
# Swap values
data[index], data[index + gap] = data[index + gap], data[index]
completed = False
index += 1
return data
if __name__ == "__main__":
import doctest
doctest.testmod()
user_input = input("Enter numbers separated by a comma:\n").strip()
unsorted = [int(item) for item in user_input.split(",")]
print(comb_sort(unsorted))
|
TheAlgorithms/Python
|
sorts/comb_sort.py
|
Python
|
mit
| 1,851
|
# -*- coding: utf-8 -*-
from django.forms import widgets
from django.template.loader import render_to_string
from django.utils.translation import gettext_lazy as _
from .conf import settings
class PlacesWidget(widgets.MultiWidget):
template_name = 'places/widgets/places.html'
def __init__(self, attrs=None):
_widgets = (
widgets.TextInput(
attrs={'data-geo': 'formatted_address', 'data-id': 'map_place'}
),
widgets.TextInput(
attrs={
'data-geo': 'lat',
'data-id': 'map_latitude',
'placeholder': _('Latitude'),
}
),
widgets.TextInput(
attrs={
'data-geo': 'lng',
'data-id': 'map_longitude',
'placeholder': _('Longitude'),
}
),
)
super(PlacesWidget, self).__init__(_widgets, attrs)
def decompress(self, value):
if isinstance(value, str):
return value.rsplit(',')
if value:
return [value.place, value.latitude, value.longitude]
return [None, None]
def get_context(self, name, value, attrs):
context = super(PlacesWidget, self).get_context(name, value, attrs)
context['map_widget_height'] = settings.MAP_WIDGET_HEIGHT
context['map_options'] = settings.MAP_OPTIONS
context['marker_options'] = settings.MARKER_OPTIONS
return context
class Media:
js = (
'//maps.googleapis.com/maps/api/js?key={}&libraries=places'.format(
settings.MAPS_API_KEY
),
'places/places.js',
)
css = {'all': ('places/places.css',)}
|
oscarmcm/django-places
|
places/widgets.py
|
Python
|
mit
| 1,789
|
from django.shortcuts import render, HttpResponseRedirect, redirect
from django.contrib.auth.decorators import login_required
from django.views.generic.edit import CreateView
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from django.forms.models import inlineformset_factory
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse_lazy
from .models import Profile
from .forms import ProfileForm
class RegistrationView(CreateView):
model = User
form_class = UserCreationForm
template_name = 'profiles/user_create.html'
success_url = reverse_lazy('profiles:redirect')
@login_required
def account_redirect(request):
return redirect('profiles:edit', pk=request.user.pk)
@login_required
def edit_user(request, pk):
user = User.objects.get(pk=pk)
user_form = ProfileForm(instance=user)
# In the line below list the names of your Profile model fields. These are the ones I used.
ProfileInlineFormset = inlineformset_factory(User, Profile, fields=('preferred_name', 'birthdate',
'interests', 'state'))
formset = ProfileInlineFormset(instance=user)
if request.user.is_authenticated() and request.user.id == user.id:
if request.method == "POST":
user_form = ProfileForm(request.POST, request.FILES, instance=user)
formset = ProfileInlineFormset(request.POST, request.FILES, instance=user)
if user_form.is_valid():
created_user = user_form.save(commit=False)
formset = ProfileInlineFormset(request.POST, request.FILES, instance=created_user)
if formset.is_valid():
created_user.save()
formset.save()
return HttpResponseRedirect('/documentaries/')
return render(request, "profiles/profile_update.html", {
"noodle": pk,
"noodle_form": user_form,
"formset": formset,
})
else:
raise PermissionDenied
|
MrCrawdaddy/humans
|
profiles/views.py
|
Python
|
mit
| 2,056
|
#!/usr/bin/env python
import sct_utils as sct
import os
#path = '/Users/benjamindeleener/data/data_testing/C2-T2/'
contrast = 't2'
#path_output_seg = '/Users/benjamindeleener/data/spinal_cord_segmentation_data/training/labels/'
#path_output_im = '/Users/benjamindeleener/data/spinal_cord_segmentation_data/training/data/'
path = '/Users/benjamindeleener/data/data_testing/test/'
path_output_seg = '/Users/benjamindeleener/data/spinal_cord_segmentation_data/test/labels/'
path_output_im = '/Users/benjamindeleener/data/spinal_cord_segmentation_data/test/data/'
size = '80'
def generate_data_list(folder_dataset, verbose=1):
"""
Construction of the data list from the data set
This function return a list of directory (in folder_dataset) in which the contrast is present.
:return data:
"""
data_subjects, subjects_dir = [], []
# each directory in folder_dataset should be a directory of a subject
for subject_dir in os.listdir(folder_dataset):
if not subject_dir.startswith('.') and os.path.isdir(folder_dataset + subject_dir):
data_subjects.append(folder_dataset + subject_dir + '/')
subjects_dir.append(subject_dir)
if not data_subjects:
sct.printv('ERROR: No subject data were found in ' + folder_dataset + '. '
'Please organize your data correctly or provide a correct dataset.',
verbose=verbose, type='error')
return data_subjects, subjects_dir
data_subjects, subjects_name = generate_data_list(path)
current_folder = os.getcwd()
for subject_folder in data_subjects:
print subject_folder
os.chdir(subject_folder+contrast)
sct.run('sct_seg_utility.py -i ' + contrast + '.nii.gz'
' -seg ' + contrast + '_manual_segmentation.nii.gz'
' -ofolder-im ' + path_output_im +
' -ofolder-seg ' + path_output_seg +
' -size ' + size + ' -v 2', verbose=2)
os.chdir(current_folder)
|
3324fr/spinalcordtoolbox
|
dev/generate_ml_data.py
|
Python
|
mit
| 2,063
|
"""This example samples from a simple bivariate normal distribution."""
import jass.mcmc as mcmc
import jass.samplers as samplers
import numpy as np
import scipy.stats as stats
import triangle
import matplotlib.pyplot as pl
# Define the log-likelihood function to be a bivariate normal
normal_rv = stats.multivariate_normal(cov=np.identity(2))
# Initialise the chain at the mean
initial = [0.0, 0.0]
sampler = samplers.ComponentWiseSlice()
samples = mcmc.run(sampler, normal_rv.logpdf, initial, 5000)
# Plot the the samples
triangle.corner(samples)
pl.show()
|
ebnn/jass
|
examples/normal.py
|
Python
|
mit
| 563
|
from difflib import get_close_matches
import os
from pathlib import Path
def _safe(fn, fallback):
try:
return fn()
except OSError:
return fallback
def _get_all_bins():
return [exe.name
for path in os.environ.get('PATH', '').split(':')
for exe in _safe(lambda: list(Path(path).iterdir()), [])
if not _safe(exe.is_dir, True)]
def match(command, settings):
return 'not found' in command.stderr and \
bool(get_close_matches(command.script.split(' ')[0],
_get_all_bins()))
def get_new_command(command, settings):
old_command = command.script.split(' ')[0]
new_command = get_close_matches(old_command,
_get_all_bins())[0]
return ' '.join([new_command] + command.script.split(' ')[1:])
|
dionyziz/thefuck
|
thefuck/rules/no_command.py
|
Python
|
mit
| 848
|
# -*- coding: utf-8 -*-
"""
test
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2014 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
from tests.base_test import FlaskCorsTestCase, AppConfigTest
from tests.test_origins import OriginsTestCase
from tests.test_options import OptionsTestCase
from flask import Flask, jsonify
try:
# this is how you would normally import
from flask.ext.cors import *
except:
# support local usage without installed package
from flask_cors import *
class AppExtensionRegexp(AppConfigTest, OriginsTestCase):
def setUp(self):
self.app = Flask(__name__)
CORS(self.app, resources={
r'/': {},
r'/test_list': {'origins': ["http://foo.com", "http://bar.com"]},
r'/test_string': {'origins': 'http://foo.com'},
r'/test_set': {
'origins': set(["http://foo.com", "http://bar.com"])
},
r'/test_subdomain_regex': {
'origins': r"http?://\w*\.?example\.com:?\d*/?.*"
},
r'/test_regex_list': {
'origins': [r".*.example.com", r".*.otherexample.com"]
},
r'/test_regex_mixed_list': {
'origins': ["http://example.com", r".*.otherexample.com"]
}
})
@self.app.route('/')
def wildcard():
return 'Welcome!'
@self.app.route('/test_list')
def test_list():
return 'Welcome!'
@self.app.route('/test_string')
def test_string():
return 'Welcome!'
@self.app.route('/test_set')
def test_set():
return 'Welcome!'
class AppExtensionList(FlaskCorsTestCase):
def setUp(self):
self.app = Flask(__name__)
CORS(self.app, resources=[r'/test_exposed', r'/test_other_exposed'],
origins=['http://foo.com, http://bar.com'])
@self.app.route('/test_unexposed')
def unexposed():
return 'Not exposed over CORS!'
@self.app.route('/test_exposed')
def exposed1():
return 'Welcome!'
@self.app.route('/test_other_exposed')
def exposed2():
return 'Welcome!'
def test_exposed(self):
for resp in self.iter_responses('/test_exposed'):
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.headers.get(ACL_ORIGIN),
'http://foo.com, http://bar.com')
def test_other_exposed(self):
for resp in self.iter_responses('/test_other_exposed'):
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.headers.get(ACL_ORIGIN),
'http://foo.com, http://bar.com')
def test_unexposed(self):
for resp in self.iter_responses('/test_unexposed'):
self.assertEqual(resp.status_code, 200)
self.assertFalse(ACL_ORIGIN in resp.headers)
class AppExtensionString(FlaskCorsTestCase):
def setUp(self):
self.app = Flask(__name__)
CORS(self.app, resources=r'/api/*',
headers='Content-Type',
expose_headers='X-Total-Count')
@self.app.route('/api/v1/foo')
def exposed1():
return jsonify(success=True)
@self.app.route('/api/v1/bar')
def exposed2():
return jsonify(success=True)
@self.app.route('/api/v1/special')
@cross_origin(origins='http://foo.com')
def overridden():
return jsonify(special=True)
@self.app.route('/')
def index():
return 'Welcome'
def test_exposed(self):
for path in '/api/v1/foo', '/api/v1/bar':
for resp in self.iter_responses(path):
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.headers.get(ACL_ORIGIN), '*')
self.assertEqual(resp.headers.get(ACL_EXPOSE_HEADERS),
'X-Total-Count')
def test_unexposed(self):
for resp in self.iter_responses('/'):
self.assertEqual(resp.status_code, 200)
self.assertFalse(ACL_ORIGIN in resp.headers)
self.assertFalse(ACL_EXPOSE_HEADERS in resp.headers)
def test_override(self):
for resp in self.iter_responses('/api/v1/special'):
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.headers.get(ACL_ORIGIN), 'http://foo.com')
self.assertFalse(ACL_EXPOSE_HEADERS in resp.headers)
class AppExtensionError(FlaskCorsTestCase):
def test_value_error(self):
try:
app = Flask(__name__)
CORS(app, resources=5)
self.assertTrue(False, "Should've raised a value error")
except ValueError:
pass
class AppExtensionDefault(FlaskCorsTestCase):
def test_default(self):
'''
By default match all.
'''
self.app = Flask(__name__)
CORS(self.app)
@self.app.route('/')
def index():
return 'Welcome'
for resp in self.iter_responses('/'):
self.assertEqual(resp.status_code, 200)
self.assertTrue(ACL_ORIGIN in resp.headers)
class AppExtensionExampleApp(FlaskCorsTestCase):
def setUp(self):
self.app = Flask(__name__)
CORS(self.app, resources={
r'/api/*': {'origins': ['http://blah.com', 'http://foo.bar']}
})
@self.app.route('/')
def index():
return ''
@self.app.route('/api/foo')
def test_wildcard():
return ''
@self.app.route('/api/')
def test_exact_match():
return ''
def test_index(self):
'''
If regex does not match, do not set CORS
'''
for resp in self.iter_responses('/'):
self.assertFalse(ACL_ORIGIN in resp.headers)
def test_wildcard(self):
'''
Match anything matching the path /api/* with an origin
of 'http://blah.com' or 'http://foo.bar'
'''
for origin in ['http://foo.bar', 'http://blah.com']:
for resp in self.iter_responses('/api/foo', origin=origin):
self.assertTrue(ACL_ORIGIN in resp.headers)
self.assertEqual(origin, resp.headers.get(ACL_ORIGIN))
def test_exact_match(self):
'''
Match anything matching the path /api/* with an origin
of 'http://blah.com' or 'http://foo.bar'
'''
for origin in ['http://foo.bar', 'http://blah.com']:
for resp in self.iter_responses('/api/', origin=origin):
self.assertTrue(ACL_ORIGIN in resp.headers)
self.assertEqual(origin, resp.headers.get(ACL_ORIGIN))
class AppExtensionCompiledRegexp(FlaskCorsTestCase):
def test_compiled_regex(self):
'''
Ensure we do not error if the user sepcifies an bad regular
expression.
'''
import re
self.app = Flask(__name__)
CORS(self.app, resources=re.compile('/api/.*'))
@self.app.route('/')
def index():
return 'Welcome'
@self.app.route('/api/v1')
def example():
return 'Welcome'
for resp in self.iter_responses('/'):
self.assertFalse(ACL_ORIGIN in resp.headers)
for resp in self.iter_responses('/api/v1'):
self.assertTrue(ACL_ORIGIN in resp.headers)
class AppExtensionBadRegexp(FlaskCorsTestCase):
def test_value_error(self):
'''
Ensure we do not error if the user sepcifies an bad regular
expression.
'''
self.app = Flask(__name__)
CORS(self.app, resources="[")
@self.app.route('/')
def index():
return 'Welcome'
for resp in self.iter_responses('/'):
self.assertEqual(resp.status_code, 200)
class AppExtensionOptionsTestCase(OptionsTestCase):
def __init__(self, *args, **kwargs):
super(AppExtensionOptionsTestCase, self).__init__(*args, **kwargs)
def setUp(self):
self.app = Flask(__name__)
CORS(self.app)
def test_defaults(self):
@self.app.route('/test_default')
def test_default():
return 'Welcome!'
super(AppExtensionOptionsTestCase, self).test_defaults()
def test_no_options_and_not_auto(self):
# This test isn't applicable since we the CORS App extension
# Doesn't need to add options handling to view functions, since
# it is called after_request, and will simply process the autogenerated
# Flask OPTIONS response
pass
def test_options_and_not_auto(self):
self.app.config['CORS_AUTOMATIC_OPTIONS'] = False
@self.app.route('/test_options_and_not_auto', methods=['OPTIONS'])
def test_options_and_not_auto():
return 'Welcome!'
super(AppExtensionOptionsTestCase, self).test_options_and_not_auto()
class AppExtensionSortedResourcesTestCase(FlaskCorsTestCase):
def setUp(self):
from flask_cors import _parse_resources
self.resources = _parse_resources({
'/foo': {'origins': 'http://foo.com'},
re.compile(r'/.*'): {
'origins': 'http://some-domain.com'
},
re.compile(r'/api/v1/.*'): {
'origins': 'http://specific-domain.com'
}
})
def test_sorted_order(self):
def _get_pattern(p):
try:
return p.pattern
except AttributeError:
return p
self.assertEqual(
[_get_pattern(reg) for reg, opt in self.resources],
[r'/api/v1/.*', '/foo', r'/.*']
)
if __name__ == "__main__":
unittest.main()
|
hoyjustin/ScopusAdapter
|
CorsTests/test_app_extension.py
|
Python
|
mit
| 9,998
|
import os, sys
import datetime
import iris
import iris.unit as unit
import iris.analysis.cartography
import numpy as np
from iris.coord_categorisation import add_categorised_coord
diag = 'avg.5216'
cube_name_explicit='stratiform_rainfall_rate'
cube_name_param='convective_rainfall_rate'
pp_file_path='/projects/cascade/pwille/moose_retrievals/'
experiment_ids = ['djznw', 'djzny', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkbhu', 'djznu', 'dkhgu' ] # All 12
#experiment_ids = ['djzns', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkbhu', 'djznu', 'dkhgu' ]
#experiment_ids = [ 'dklwu', 'dklzq', 'dklyu', 'dkmbq', 'dkbhu', 'djznu', 'dkhgu', 'djzns' ]
#experiment_ids = ['djznu', 'dkhgu' ] # High Res
#experiment_ids = ['djznw', 'djzny', 'djznq', 'dkjxq']
#experiment_ids = ['djznw', 'djzny', 'djznq', 'dkmbq', 'dklzq', 'dkjxq' ] # Params
# Load global LAM
dtmindt = datetime.datetime(2011,8,19,0,0,0)
dtmaxdt = datetime.datetime(2011,9,7,23,0,0)
dtmin = unit.date2num(dtmindt, 'hours since 1970-01-01 00:00:00', unit.CALENDAR_STANDARD)
dtmax = unit.date2num(dtmaxdt, 'hours since 1970-01-01 00:00:00', unit.CALENDAR_STANDARD)
time_constraint = iris.Constraint(time= lambda t: dtmin <= t.point <= dtmax)
# Min and max lats lons from smallest model domain (dkbhu) - see spreadsheet
latmin=-10
latmax=5
lonmin=64.115
lonmax=80
lat_constraint=iris.Constraint(grid_latitude= lambda la: latmin <= la.point <= latmax)
lon_constraint=iris.Constraint(grid_longitude= lambda lo: lonmin <= lo.point <= lonmax)
fg = '%sdjzn/djznw/%s.pp' % (pp_file_path, diag)
glob_load = iris.load_cube(fg, ('%s' % cube_name_param) & time_constraint)
## Get time points from global LAM to use as time constraint when loading other runs
time_list = glob_load.coord('time').points
glob_tc = iris.Constraint(time=time_list)
del glob_load
def unrotate_pole_update_cube(cube):
lat = cube.coord('grid_latitude').points
lon = cube.coord('grid_longitude').points
cs = cube.coord_system('CoordSystem')
if isinstance(cs, iris.coord_systems.RotatedGeogCS):
print ' %s - %s - Unrotate pole %s' % (diag, experiment_id, cs)
lons, lats = np.meshgrid(lon, lat)
lons,lats = iris.analysis.cartography.unrotate_pole(lons,lats, cs.grid_north_pole_longitude, cs.grid_north_pole_latitude)
lon=lons[0]
lat=lats[:,0]
for i, coord in enumerate (cube.coords()):
if coord.standard_name=='grid_latitude':
lat_dim_coord_cube = i
if coord.standard_name=='grid_longitude':
lon_dim_coord_cube = i
csur=cs.ellipsoid
cube.remove_coord('grid_latitude')
cube.remove_coord('grid_longitude')
cube.add_dim_coord(iris.coords.DimCoord(points=lat, standard_name='grid_latitude', units='degrees', coord_system=csur), lat_dim_coord_cube)
cube.add_dim_coord(iris.coords.DimCoord(points=lon, standard_name='grid_longitude', units='degrees', coord_system=csur), lon_dim_coord_cube)
return cube
for experiment_id in experiment_ids:
expmin1 = experiment_id[:-1]
fu = '%s%s/%s/%s.pp' % (pp_file_path, expmin1, experiment_id, diag)
flsm = '%s%s/%s/30.pp' % (pp_file_path, expmin1, experiment_id)
print experiment_id
sys.stdout.flush()
try:
#cube_names = ['%s' % cube_name_param, '%s' % cube_name_explicit]
cubeconv = iris.load_cube(fu,'%s' % cube_name_param & glob_tc)
cubeconv= unrotate_pole_update_cube(cubeconv)
cubestrat = iris.load_cube(fu,'%s' % cube_name_explicit & glob_tc)
cubestrat= unrotate_pole_update_cube(cubestrat)
print cubestrat
cube=cubeconv.extract(lat_constraint & lon_constraint) + cubestrat.extract(lat_constraint & lon_constraint)
cube.rename('total_precipitation_rate')
except iris.exceptions.ConstraintMismatchError:
cube = iris.load_cube(fu, ('%s' % cube_name_explicit) & glob_tc)
cube= unrotate_pole_update_cube(cube)
cube = cube.extract(lat_constraint & lon_constraint)
# Mean at each grid point by hour of day and save
add_categorised_coord(cube, 'hour', 'time',lambda coord, x: coord.units.num2date(x).hour)
diurnal_mean_cube = cube.aggregated_by('hour', iris.analysis.MEAN)
del cube
#try:
# iris.save(diurnal_mean_cube, '%s%s/%s/%s_rainfall_hourly_mean.pp' % (pp_file_path, expmin1, experiment_id, diag))
#except Exception, e:
# print e
# pass
# Load land/sea mask
lsm = iris.load_cube(flsm, ('land_binary_mask' ) )
lsm = unrotate_pole_update_cube(lsm)
lsm=lsm.extract(lat_constraint & lon_constraint)
print lsm
sys.stdout.flush()
# For Sea and Land, mask area and calculate mean of each hour for sea/land and SAVE as numpy array
#tdmc= diurnal_mean_cube.collapsed(['grid_latitude', 'grid_longitude'], iris.analysis.MEAN)
#total_diurnal_mean_cube=[tdmc.data.data, diurnal_mean_cube.coord('hour').points+0.5]
#print total_diurnal_mean_cube
#np.save('%s%s/%s/%s_total_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s' % (pp_file_path, expmin1, experiment_id, diag, latmin, latmax, lonmin, lonmax), total_diurnal_mean_cube)
for s in ([0]):
nancube = np.where(lsm.data==s, diurnal_mean_cube.data, np.NaN)
maskedcube = np.ma.masked_array(nancube,np.isnan(nancube))
total_rainfall = np.mean(maskedcube.reshape(maskedcube.shape[0], (maskedcube.shape[1]*maskedcube.shape[2])), axis=1)
trnp =[total_rainfall.data, diurnal_mean_cube.coord('hour').points+0.5]
if s == 0:
# Areas of ocean
print total_rainfall
np.save('%s%s/%s/%s_sea_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s' % (pp_file_path, expmin1, experiment_id, diag, latmin, latmax, lonmin, lonmax), trnp)
#np.save('%s%s/%s/%s_sea_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s_MASKED_ARRAY' % (pp_file_path, expmin1, experiment_id, diag, latmin, latmax, lonmin, lonmax), maskedcube)
if s == 1:
# Areas of land
np.save('%s%s/%s/%s_land_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s' % (pp_file_path, expmin1, experiment_id, diag, latmin, latmax, lonmin, lonmax), trnp)
#np.save('%s%s/%s/%s_land_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s_MASKED_ARRAY' % (pp_file_path, expmin1, experiment_id, diag, latmin, latmax, lonmin, lonmax), maskedcube)
del lsm
#tdmc= diurnal_mean_cube.collapsed(['grid_latitude', 'grid_longitude'], iris.analysis.MEAN)
#total_diurnal_mean_cube=tdmc
#np.save('%s%s/%s/%s_total_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s' % (pp_file_path, expmin1, experiment_id, diag, latmin, latmax, lonmin, lonmax), tdmc.data.data)
#np.save('%s%s/%s/%s_total_rainfall_diurnal_np_domain_constrain_lat_%s-%s_lon-%s-%s_MASKED_ARRAY' % (pp_file_path, expmin1, experiment_id, diag, latmin, latmax, lonmin, lonmax), ma)
|
peterwilletts24/Monsoon-Python-Scripts
|
rain/land_sea_diurnal/rain_mask_save_lat_lon_west_southern_indian_ocean.py
|
Python
|
mit
| 7,070
|
# Tic Tac Toe
# Tic Tac Toe
import random
def drawBoard(board):
# This function prints out the board that it was passed.
# "board" is a list of 10 strings representing the board (ignore index 0)
print(' | |')
print(' ' + board[7] + ' | ' + board[8] + ' | ' + board[9])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[4] + ' | ' + board[5] + ' | ' + board[6])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[1] + ' | ' + board[2] + ' | ' + board[3])
print(' | |')
def inputPlayerLetter():
# Let's the player type which letter they want to be.
# Returns a list with the player's letter as the first item, and the computer's letter as the second.
letter = ''
while not (letter == 'X' or letter == 'O'):
print('Do you want to be X or O?')
letter = input().upper()
# the first element in the tuple is the player's letter, the second is the computer's letter.
if letter == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
# Randomly choose the player who goes first.
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
# This function returns True if the player wants to play again, otherwise it returns False.
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, letter, move):
board[move] = letter
def isWinner(bo, le):
# Given a board and a player's letter, this function returns True if that player has won.
# We use bo instead of board and le instead of letter so we don't have to type as much.
return ((bo[7] == le and bo[8] == le and bo[9] == le) or # across the top
(bo[4] == le and bo[5] == le and bo[6] == le) or # across the middle
(bo[1] == le and bo[2] == le and bo[3] == le) or # across the bottom
(bo[7] == le and bo[4] == le and bo[1] == le) or # down the left side
(bo[8] == le and bo[5] == le and bo[2] == le) or # down the middle
(bo[9] == le and bo[6] == le and bo[3] == le) or # down the right side
(bo[7] == le and bo[5] == le and bo[3] == le) or # diagonal
(bo[9] == le and bo[5] == le and bo[1] == le)) # diagonal
def getBoardCopy(board):
# Make a duplicate of the board list and return it the duplicate.
dupeBoard = []
for i in board:
dupeBoard.append(i)
return dupeBoard
def isSpaceFree(board, move):
# Return true if the passed move is free on the passed board.
return board[move] == ' '
def getPlayerMove(board):
# Let the player type in his move.
move = ' '
while move not in '1 2 3 4 5 6 7 8 9'.split() or not isSpaceFree(board, int(move)):
print('What is your next move? (1-9)')
move = input()
return int(move)
def chooseRandomMoveFromList(board, movesList):
# Returns a valid move from the passed list on the passed board.
# Returns None if there is no valid move.
possibleMoves = []
for i in movesList:
if isSpaceFree(board, i):
possibleMoves.append(i)
if len(possibleMoves) != 0:
return random.choice(possibleMoves)
else:
return None
def getComputerMove(board, computerLetter):
# Given a board and the computer's letter, determine where to move and return that move.
if computerLetter == 'X':
playerLetter = 'O'
else:
playerLetter = 'X'
# Here is our algorithm for our Tic Tac Toe AI:
# First, check if we can win in the next move
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, computerLetter, i)
if isWinner(copy, computerLetter):
return i
# Check if the player could win on his next move, and block them.
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, playerLetter, i)
if isWinner(copy, playerLetter):
return i
# Try to take one of the corners, if they are free.
move = chooseRandomMoveFromList(board, [1, 3, 7, 9])
if move != None:
return move
# Try to take the center, if it is free.
if isSpaceFree(board, 5):
return 5
# Move on one of the sides.
return chooseRandomMoveFromList(board, [2, 4, 6, 8])
def isBoardFull(board):
# Return True if every space on the board has been taken. Otherwise return False.
for i in range(1, 10):
if isSpaceFree(board, i):
return False
return True
print('Welcome to Tic Tac Toe!')
while True:
# Reset the board
theBoard = [' '] * 10
playerLetter, computerLetter = inputPlayerLetter()
turn = whoGoesFirst()
print('The ' + turn + ' will go first.')
gameIsPlaying = True
while gameIsPlaying:
if turn == 'player':
# Player's turn.
drawBoard(theBoard)
move = getPlayerMove(theBoard)
makeMove(theBoard, playerLetter, move)
if isWinner(theBoard, playerLetter):
drawBoard(theBoard)
print('Hooray! You have won the game!')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'computer'
else:
# Computer's turn.
move = getComputerMove(theBoard, computerLetter)
makeMove(theBoard, computerLetter, move)
if isWinner(theBoard, computerLetter):
drawBoard(theBoard)
print('The computer has beaten you! You lose.')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'player'
if not playAgain():
break
|
pathway27/games-prac
|
python3/tictactoe.py
|
Python
|
mit
| 6,138
|
#!/usr/bin/python
"""
@file async_multicast.py
@author Woong Gyu La a.k.a Chris. <juhgiyo@gmail.com>
<http://github.com/juhgiyo/pyserver>
@date March 10, 2016
@brief AsyncMulticast Interface
@version 0.1
@section LICENSE
The MIT License (MIT)
Copyright (c) 2016 Woong Gyu La <juhgiyo@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@section DESCRIPTION
AsyncMulticast Class.
"""
import asyncio
import socket
import traceback
import threading
from .callback_interface import *
from .async_controller import AsyncController
# noinspection PyDeprecation
import copy
IP_MTU_DISCOVER = 10
IP_PMTUDISC_DONT = 0 # Never send DF frames.
IP_PMTUDISC_WANT = 1 # Use per route hints.
IP_PMTUDISC_DO = 2 # Always DF.
IP_PMTUDISC_PROBE = 3 # Ignore dst pmtu.
'''
Interfaces
variables
- callback_obj
functions
- def send(multicast_addr,port,data)
- def close() # close the socket
- def join(multicast_addr) # start receiving datagram from given multicast group
- def leave(multicast_addr) # stop receiving datagram from given multicast group
- def getgrouplist() # get group list
infos
- multicast address range: 224.0.0.0 - 239.255.255.255
- linux : route add -net 224.0.0.0 netmask 240.0.0.0 dev eth0
to enable multicast
'''
class AsyncMulticast(asyncio.Protocol):
# enable_loopback : 1 enable loopback / 0 disable loopback
# ttl: 0 - restricted to the same host
# 1 - restricted to the same subnet
# 32 - restricted to the same site
# 64 - restricted to the same region
# 128 - restricted to the same continent
# 255 - unrestricted in scope
def __init__(self, port, callback_obj, ttl=1, enable_loopback=False, bind_addr=''):
# self.lock = threading.RLock()
self.MAX_MTU = 1500
self.callback_obj = None
self.port = port
self.multicastSet = set([])
self.lock = threading.RLock()
self.ttl = ttl
self.enable_loopback = enable_loopback
if callback_obj is not None and isinstance(callback_obj, IUdpCallback):
self.callback_obj = callback_obj
else:
raise Exception('callback_obj is None or not an instance of IUdpCallback class')
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except AttributeError:
pass # Some systems don't support SO_REUSEPORT
# for both SENDER and RECEIVER to restrict the region
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, self.ttl)
# for SENDER to choose whether to use loop back
if self.enable_loopback:
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
else:
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 0)
self.bind_addr = bind_addr
if self.bind_addr is None or self.bind_addr == '':
self.bind_addr = socket.gethostbyname(socket.gethostname())
# for both SENDER and RECEIVER to bind to specific network adapter
self.sock.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.bind_addr))
# for RECEIVE to receive from multiple multicast groups
self.sock.bind(('', port))
except Exception as e:
print(e)
traceback.print_exc()
self.transport = None
AsyncController.instance().add(self)
if self.callback_obj is not None:
self.callback_obj.on_started(self)
self.loop = asyncio.get_event_loop()
coro = self.loop.create_datagram_endpoint(lambda: self, sock=self.sock)
AsyncController.instance().pause()
(self.transport, _) = self.loop.run_until_complete(coro)
AsyncController.instance().resume()
# Even though UDP is connectionless this is called when it binds to a port
def connection_made(self, transport):
self.transport = transport
# This is called everytime there is something to read
def data_received(self, data, addr):
try:
if data and self.callback_obj is not None:
self.callback_obj.on_received(self, addr, data)
except Exception as e:
print(e)
traceback.print_exc()
def connection_lost(self, exc):
self.close()
def close(self):
self.handle_close()
def error_received(self, exc):
self.handle_close()
def handle_close(self):
try:
delete_set = self.getgrouplist()
for multicast_addr in delete_set:
self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton('0.0.0.0'))
if self.callback_obj is not None:
self.callback_obj.on_leave(self, multicast_addr)
with self.lock:
self.multicastSet = set([])
except Exception as e:
print(e)
print('asyncUdp close called')
self.transport.close()
AsyncController.instance().discard(self)
try:
if self.callback_obj is not None:
self.callback_obj.on_stopped(self)
except Exception as e:
print(e)
traceback.print_exc()
# noinspection PyMethodOverriding
def send(self, hostname, port, data):
if len(data) <= self.MAX_MTU:
self.transport.sendto(data, (hostname, port))
else:
raise ValueError("The data size is too large")
# for RECEIVER to receive datagram from the multicast group
def join(self, multicast_addr):
with self.lock:
if multicast_addr not in self.multicastSet:
self.sock.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton(self.bind_addr))
self.multicastSet.add(multicast_addr)
if self.callback_obj is not None:
self.callback_obj.on_join(self, multicast_addr)
# for RECEIVER to stop receiving datagram from the multicast group
def leave(self, multicast_addr):
with self.lock:
try:
if multicast_addr in self.multicastSet:
self.sock.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(multicast_addr) + socket.inet_aton('0.0.0.0'))
self.multicastSet.discard(multicast_addr)
if self.callback_obj is not None:
self.callback_obj.on_leave(self, multicast_addr)
except Exception as e:
print(e)
def getgrouplist(self):
with self.lock:
return copy.copy(self.multicastSet)
def gethostbyname(self, arg):
return self.sock.gethostbyname(arg)
def gethostname(self):
return self.sock.gethostname()
# Echo udp server test
# def readHandle(sock,addr, data):
# sock.send(addr[0],addr[1],data)
# server=AsyncUDP(5005,readHandle)
|
juhgiyo/pyserver3
|
pyserver/network/async_multicast.py
|
Python
|
mit
| 8,329
|
"""
Author: Lloyd Moore <lloyd@workharderplayharder.com>
Usage:
print perm("01", 2)
> ["00", "01", "10", "11"]
print perm("abcd", 2)
> [ 'aa', 'ab', 'ac', 'ad',
'ba', 'bb', 'bc', 'bd',
'ca', 'cb', 'cc', 'cd',
'da', 'db', 'dc', 'dd' ]
"""
def perm(chars, m, wrd="", wrds=[]):
if len(wrd) == m: return wrd
for i in range(0, len(chars)):
w = perm(chars, m, wrd+chars[i])
if type(w) == type(""): wrds.append(w)
return wrds
|
ActiveState/code
|
recipes/Python/577031_Simple_Permutations/recipe-577031.py
|
Python
|
mit
| 464
|
"""gui systems to manage actions
"""
import os
from sftoolbox.content import ActionContent, PanelContent
from sftoolboxqt import qtgui, qtcore
from sftoolboxqt.tree import PanelsModel, PanelsTreeWidget
class ActionsTreeWidget(qtgui.QTreeWidget):
"""tree widget holding actions
"""
def startDrag(self, dropAction):
# create mime data object
mime = qtcore.QMimeData()
mime.setData('application/x-item', '???')
# start drag
drag = qtgui.QDrag(self)
drag.setMimeData(mime)
# drag.start(qtcore.Qt.CopyAction)
# drag.start(qtcore.Qt.CopyAction)
drag.exec_(dropAction, qtcore.Qt.MoveAction)
class PanelsWidget(qtgui.QWidget):
"""browser for panels
"""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(PanelsWidget, self).__init__(parent=parent)
self.setWindowTitle('Panels Browser')
self._project = project
self._tree_model = PanelsModel(project)
self._tree = self._create_panels_tree_widget(self._tree_model)
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree)
self.setLayout(layout)
def _create_panels_tree_widget(self, model):
"""return tree widget that will contain the actions
"""
tree = PanelsTreeWidget()
tree.setModel(model)
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
tree.setAcceptDrops(True)
return tree
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._tree_model.project = value
class ActionsWidget(qtgui.QWidget):
"""browser system for browsing trough the actions
"""
def _create_actions_tree_widget(self):
"""return tree widget that will contain the actions
"""
tree = ActionsTreeWidget()
tree.setHeaderLabels(['Action', 'IDName', 'Tags'])
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
return tree
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(ActionsWidget, self).__init__(parent=parent)
self.setWindowTitle('Actions Browser')
self._project = project
self._tree_widget = self._create_actions_tree_widget()
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree_widget)
self.setLayout(layout)
layout.addWidget(self._tree_widget)
self._refresh_content()
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._refresh_content()
def _handle_item_double_clicked(self, item):
"""handle doubleclicking item
"""
item.action.run()
def _refresh_content(self):
"""refresh the content
"""
self._tree_widget.clear()
self._tree_widget.itemDoubleClicked.connect(
self._handle_item_double_clicked)
if not self.project:
return
for action in self.project.actions:
item = qtgui.QTreeWidgetItem()
icon_filepath = action.absolute_icon_filepath
if icon_filepath and os.path.exists(icon_filepath):
item.setIcon(0, qtgui.QIcon(icon_filepath))
item.setText(0, action.human_label)
item.setText(1, action.idname)
item.setText(2, ', '.join(map(str, action.tags)))
item.action = action
self._tree_widget.addTopLevelItem(item)
class EditorWidget(qtgui.QWidget):
""""""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(EditorWidget, self).__init__(parent=parent)
self.setWindowTitle('Editor')
self._actions_widget = ActionsWidget(project)
self._panels_widget = PanelsWidget(project)
layout = qtgui.QHBoxLayout()
splitter = qtgui.QSplitter(qtcore.Qt.Horizontal)
splitter.addWidget(self._panels_widget)
splitter.addWidget(self._actions_widget)
layout.addWidget(splitter)
self.setLayout(layout)
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._actions_widget.project = value
self._panels_widget.project = value
def sizeHint(self):
return qtcore.QSize(900, 800)
|
svenfraeys/sftoolbox
|
sftoolboxqt/editor.py
|
Python
|
mit
| 4,569
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VmfsDatastoreCreateSpec(vim, *args, **kwargs):
'''This data object type is used when creating a new VMFS datastore, to create a
specification for the VMFS datastore.'''
obj = vim.client.factory.create('ns0:VmfsDatastoreCreateSpec')
# do some validation checking...
if (len(args) + len(kwargs)) < 3:
raise IndexError('Expected at least 4 arguments got: %d' % len(args))
required = [ 'partition', 'vmfs', 'diskUuid' ]
optional = [ 'extent', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
xuru/pyvisdk
|
pyvisdk/do/vmfs_datastore_create_spec.py
|
Python
|
mit
| 1,109
|
import av
import logging
import random
import sys
import time
import warnings
from livestreamer import Livestreamer
from experiments.test_detecting_in_lol_or_not import get_classifier, process_image
from ocr import ocr_image
logging.getLogger("libav.http").setLevel(logging.ERROR)
# Hide warnings from SKLearn from flooding screen
warnings.filterwarnings("ignore", category=DeprecationWarning)
if __name__ == "__main__":
if len(sys.argv) > 1:
streamer = sys.argv[1]
else:
print "Randomly selecting a streamer..."
streamer = random.choice((
"tsm_doublelift",
"grossie_gore",
"wingsofdeath"
))
classifier = get_classifier()
is_in_lol = False
print "Waiting for streamer %s to join a game..." % streamer
while True:
session = Livestreamer()
streams = session.streams('http://www.twitch.tv/%s' % streamer)
if streams:
stream = streams['source']
container = av.open(stream.url)
video_stream = next(s for s in container.streams if s.type == b'video')
image = None
for packet in container.demux(video_stream):
for frame in packet.decode():
image = frame.to_image()
features = process_image(image)
# save our old state before checking new state, only show message when state changes
old_is_in_lol = is_in_lol
is_in_lol = classifier.predict(features)
if not old_is_in_lol and is_in_lol:
timestr = time.strftime("%Y%m%d-%I:%M %p")
print "@@@@@@@@@@ Joined game: %s" % timestr
elif old_is_in_lol and not is_in_lol:
timestr = time.strftime("%Y%m%d-%I:%M %p")
print "@@@@@@@@@@ Left game: %s" % timestr
if is_in_lol:
print "OCR from image, trying to read character name:", ocr_image(image)
# As soon as we get a full image, we're done
if image:
break
time.sleep(1)
else:
print "Player not streaming, sleeping for 15 minutes"
time.sleep(15 * 60)
|
ckcollab/twitch-experiments
|
src/stream_frame_identifier.py
|
Python
|
mit
| 2,329
|
#!/usr/bin/python
import sys
import remote_core as core
import radio_lights
def main(argv):
config = core.load_config()
lights_config_names = {"1":"door_light", "2":"desk_light", "3": "shelf_light"}
if len(argv) == 1 and len(argv[0]) == 2:
if argv[0] == "an":
argv = ["1n", "2n", "3n"]
elif argv[0] == "af":
argv = ["1f", "2f", "3f"]
for item in argv:
if item[-1:] == 'n':
radio_lights.turn_on_single(config["lights"][lights_config_names[item[:1]]])
elif item[-1:] == 'f':
radio_lights.turn_off_single(config["lights"][lights_config_names[item[:1]]])
core.write_config(config)
if __name__ == "__main__":
main(sys.argv[1:])
|
sradevski/homeAutomate
|
scripts/lights_controller.py
|
Python
|
mit
| 665
|
# Copyright (C) 2002-2007 Python Software Foundation
# Author: Ben Gertzfield
# Contact: email-sig@python.org
"""Base64 content transfer encoding per RFCs 2045-2047.
This module handles the content transfer encoding method defined in RFC 2045
to encode arbitrary 8-bit data using the three 8-bit bytes in four 7-bit
characters encoding known as Base64.
It is used in the MIME standards for email to attach images, audio, and text
using some 8-bit character sets to messages.
This module provides an interface to encode and decode both headers and bodies
with Base64 encoding.
RFC 2045 defines a method for including character set information in an
`encoded-word' in a header. This method is commonly used for 8-bit real names
in To:, From:, Cc:, etc. fields, as well as Subject: lines.
This module does not do the line wrapping or end-of-line character conversion
necessary for proper internationalized headers; it only does dumb encoding and
decoding. To deal with the various line wrapping issues, use the email.header
module.
"""
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future.builtins import range
from future.builtins import bytes
__all__ = [
'body_decode',
'body_encode',
'decode',
'decodestring',
'header_encode',
'header_length',
]
from base64 import b64encode
from binascii import b2a_base64, a2b_base64
CRLF = '\r\n'
NL = '\n'
EMPTYSTRING = ''
# See also Charset.py
MISC_LEN = 7
# Helpers
def header_length(bytearray):
"""Return the length of s when it is encoded with base64."""
groups_of_3, leftover = divmod(len(bytearray), 3)
# 4 bytes out for each 3 bytes (or nonzero fraction thereof) in.
n = groups_of_3 * 4
if leftover:
n += 4
return n
def header_encode(header_bytes, charset='iso-8859-1'):
"""Encode a single header line with Base64 encoding in a given charset.
charset names the character set to use to encode the header. It defaults
to iso-8859-1. Base64 encoding is defined in RFC 2045.
"""
if not header_bytes:
return ""
if isinstance(header_bytes, str):
header_bytes = header_bytes.encode(charset)
encoded = b64encode(header_bytes).decode("ascii")
return '=?%s?b?%s?=' % (charset, encoded)
def body_encode(s, maxlinelen=76, eol=NL):
r"""Encode a string with base64.
Each line will be wrapped at, at most, maxlinelen characters (defaults to
76 characters).
Each line of encoded text will end with eol, which defaults to "\n". Set
this to "\r\n" if you will be using the result of this function directly
in an email.
"""
if not s:
return s
encvec = []
max_unencoded = maxlinelen * 3 // 4
for i in range(0, len(s), max_unencoded):
# BAW: should encode() inherit b2a_base64()'s dubious behavior in
# adding a newline to the encoded string?
enc = b2a_base64(s[i:i + max_unencoded]).decode("ascii")
if enc.endswith(NL) and eol != NL:
enc = enc[:-1] + eol
encvec.append(enc)
return EMPTYSTRING.join(encvec)
def decode(string):
"""Decode a raw base64 string, returning a bytes object.
This function does not parse a full MIME header value encoded with
base64 (like =?iso-8895-1?b?bmloISBuaWgh?=) -- please use the high
level email.header class for that functionality.
"""
if not string:
return bytes()
elif isinstance(string, str):
return a2b_base64(string.encode('raw-unicode-escape'))
else:
return a2b_base64(string)
# For convenience and backwards compatibility w/ standard base64 module
body_decode = decode
decodestring = decode
|
thonkify/thonkify
|
src/lib/future/backports/email/base64mime.py
|
Python
|
mit
| 3,724
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
下载后的漫画整理模块
====================
请参考注释部分的整理步骤。
"""
from __future__ import print_function, unicode_literals
import os, re
import shutil
import random, string
from hcomic.lib.filetool.files import WinDir, FileCollection
from hcomic.lib.filetool.winzip import zip_everything_in_a_folder
from hcomic.lib.logger import EZLogger
from hcomic.datamodel import ComicBook
logger = EZLogger("hcomic")
def non_img_filter(winfile):
"""筛选出所有扩展名不是.jpg和.png的, 或文件大小小于50KB的。这些图片都应该
被删除。
"""
if not ((winfile.ext in [".jpg", ".png"]) and (winfile.size_on_disk >= 50 * 1024)):
return True
else:
return False
#--- Directory Level Method ---
def remove_non_image(dir_path):
"""删除目录下的所有非漫画图片的文件。
"""
for abspath in FileCollection.from_path_by_criterion(dir_path, non_img_filter):
os.remove(abspath)
def serialize_fname(dir_path, sort_by):
"""将一个目录下的所有文件从0到9999按照一定顺序重命名。
"""
fc = FileCollection.from_path(dir_path)
fc.sort_by(sort_by)
prefix = "".join(random.sample(string.ascii_letters, 8))
counter = 0
for winfile in fc.iterfiles():
counter += 1
winfile.rename(new_fname=prefix + str(counter).zfill(4))
counter = 0
for winfile in fc.iterfiles():
counter += 1
winfile.rename(new_fname=str(counter).zfill(4))
def correct_pic_num_for_one_folder(dir_path):
"""根据图片的数量, 更正漫画的文件夹名。
"""
def extract_pic_num_part(text):
"""漫画中用于标识有多少张图片的字符串为"[xxxP]"
本函数的功能就是从文件名中抓取出这部分字符串, 若
没有标注图片数的字符串, 返回None.
"""
res = re.findall(r"\[\d+P\]", text)
if len(res) == 1:
pic_num_part = res[0]
return pic_num_part
else:
return None
windir = WinDir(dir_path)
basename = windir.basename
windir.get_detail()
pic_num_part = extract_pic_num_part(basename)
if pic_num_part:
basename = basename.replace(pic_num_part, "[%sP]" % windir.num_file_current)
else:
basename = basename + "[%sP]" % windir.num_file_current
dir_path_new = os.path.join(windir.dirname, basename)
os.rename(dir_path, dir_path_new)
#--- Workspace Level Method ---
def remove_non_image_and_rename(workspace, sort_by):
"""删除工作目录下的无关Html文件, 对漫画文件夹内的文件进行重命名。
- 对于网页另存为的图片, 使用 ``ctime`` (create time)
- 对于已经排序好的图片, 使用 ``fname`` (file name)
"""
logger.show("Remove non image and rename image ...")
for abspath in FileCollection.yield_all_top_file_path(workspace):
if abspath.endswith(".html") or abspath.endswith(".htm"):
os.remove(abspath)
for dir_path in FileCollection.yield_all_top_dir_path(workspace):
remove_non_image(dir_path)
serialize_fname(dir_path, sort_by)
logger.show("Complete!", 1)
def correct_pic_num(workspace):
"""添加正确的图片数量。
"""
logger.show("Correct pic number ...")
for dir_path in FileCollection.yield_all_top_dir_path(workspace):
correct_pic_num_for_one_folder(dir_path)
logger.show("Complete!", 1)
def make_archive(workspace):
"""制作漫画压缩包。
"""
logger.show("Make .cbz archive ...")
for dir_path in FileCollection.yield_all_top_dir_path(workspace):
logger.show("Process %s ..." % dir_path, 1)
zip_everything_in_a_folder(dir_path, dir_path + ".cbz")
logger.show("Complete!", 1)
def comic_archive_filter(winfile):
"""漫画文件过滤器。
"""
if winfile.ext == ".cbz":
res = re.findall(r"\[\d+P\]", winfile.fname)
if len(res) == 1:
return True
return False
def organize(src, dst):
"""将漫画归档到某一个目录下。
"""
# check input
if not (os.path.exists(src) and os.path.isdir(src)):
raise Exception("'%s' doens't exist or not a directory" % src)
src = os.path.abspath(src)
if not (os.path.exists(dst) and os.path.isdir(dst)):
raise Exception("'%s' doens't exist or not a directory" % dst)
dst = os.path.abspath(dst)
# organize comic archive
for winfile in FileCollection.from_path_by_criterion(
src, comic_archive_filter).iterfiles():
try:
logger.show("Process '%s' ..." % winfile)
# get destination absolute path
comicbook = ComicBook.from_text(winfile.fname)
abspath = os.path.join(
dst, comicbook.organized_path + winfile.ext)
dir_path, _ = os.path.split(abspath)
# create dir if not exists
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# move archive
if not os.path.exists(abspath):
os.rename(winfile.abspath, abspath)
logger.show("Success!", 1)
except Exception as e:
print(" Failed! Error: %s" % e)
if __name__ == "__main__":
from hcomic.config import DOWNLOAD_DIR, PROCESSED_DIR
# 漫画整理工作流程如下
# 1. 删除工作目录下的无关Html文件, 对漫画文件夹内的文件进行重命名。
# remove_non_image_and_rename(DOWNLOAD_DIR, sort_by="ctime")
# 2. 删除不必要的封面图, 字幕组宣传图。
# 请手动删除封面图
# 3. 对漫画文件夹内的文件进行再次重命名。
# remove_non_image_and_rename(DOWNLOAD_DIR, sort_by="ctime")
# 4. 添加正确的图片数量。
# correct_pic_num(DOWNLOAD_DIR)
# 5. 制作漫画压缩包。
# make_archive(DOWNLOAD_DIR)
# 6. 将其按照一定规则整理好。
# organize(PROCESSED_DIR, PROCESSED_DIR)
|
angoraking/hcomic-project
|
hcomic/manage.py
|
Python
|
mit
| 6,153
|
from __future__ import print_function
import time, json
# Run this as 'watch python misc/dump-stats.py' against a 'wormhole-server
# start --stats-file=stats.json'
with open("stats.json") as f:
data_s = f.read()
now = time.time()
data = json.loads(data_s)
if now < data["valid_until"]:
valid = "valid"
else:
valid = "EXPIRED"
age = now - data["created"]
print("age: %d (%s)" % (age, valid))
print(data_s)
|
warner/magic-wormhole
|
misc/dump-stats.py
|
Python
|
mit
| 421
|
from concurrent.futures import ThreadPoolExecutor
import grpc
import pytest
from crawler.services import Library
import crawler_pb2_grpc
@pytest.fixture(name='grpc_client', scope='session', autouse=True)
def setup_grpc_client():
server = grpc.server(ThreadPoolExecutor(max_workers=4))
crawler_pb2_grpc.add_LibraryServicer_to_server(Library(), server)
port = server.add_insecure_port('[::]:0')
server.start()
with grpc.insecure_channel(f'localhost:{port}') as channel:
yield crawler_pb2_grpc.LibraryStub(channel)
server.stop(0)
|
xavierdutreilh/robots.midgar.fr
|
services/crawler/tests/conftest.py
|
Python
|
mit
| 564
|
#! /usr/bin/env python
import os
import sys
from IPython.terminal.embed import InteractiveShellEmbed
from mongoalchemy.session import Session
HERE = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.join(HERE, '..')
sys.path.append(ROOT)
from server.model.user import User # noqa
from server.model.notification import Notification # noqa
from server.settings import config # noqa
config.configure()
session = Session.connect(config.get("mongo_database_name"))
ipshell = InteractiveShellEmbed()
banner = "[*] Import the model you want to query: from server.model.{model_name} import {Model}" # noqa
ipshell(banner)
|
jf-parent/webbase
|
{{cookiecutter.project_name}}/scripts/query.py
|
Python
|
mit
| 634
|
def suma(a, b):
return a+b
def resta(a, b):
return a+b
|
LeonRave/Tarea_Git
|
a.py
|
Python
|
mit
| 66
|
"""Mongodb implementations of repository sessions."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package mongo package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from bson.objectid import ObjectId
from . import objects
from . import queries
from . import searches
from .. import MONGO_LISTENER
from .. import utilities
from ...abstract_osid.id.primitives import Id as ABCId
from ...abstract_osid.repository import sessions as abc_repository_sessions
from ...abstract_osid.repository.objects import AssetForm as ABCAssetForm
from ...abstract_osid.repository.objects import CompositionForm as ABCCompositionForm
from ...abstract_osid.repository.objects import RepositoryForm as ABCRepositoryForm
from ...abstract_osid.type.primitives import Type as ABCType
from ..id.objects import IdList
from ..list_utilities import move_id_ahead, move_id_behind, order_ids
from ..osid.sessions import OsidSession
from ..primitives import Id
from ..primitives import Type
from ..utilities import MongoClientValidated
from dlkit.abstract_osid.osid import errors
from dlkit.mongo.osid import sessions as osid_sessions
from dlkit.primordium.id.primitives import Id
DESCENDING = -1
ASCENDING = 1
CREATED = True
UPDATED = True
ENCLOSURE_RECORD_TYPE = Type(
identifier='enclosure',
namespace='osid-object',
authority='ODL.MIT.EDU')
CREATED = TrueUPDATED = True
COMPARATIVE = 0
PLENARY = 1
ACTIVE = 0
ANY_STATUS = 1
SEQUESTERED = 0
UNSEQUESTERED = 1
class AssetLookupSession(abc_repository_sessions.AssetLookupSession, osid_sessions.OsidSession):
"""This session defines methods for retrieving assets.
An ``Asset`` represents an element of content stored in a
Repository.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
* isolated repository view: All asset methods in this session
operate, retrieve and pertain to assets defined explicitly in
the current repository. Using an isolated view is useful for
managing ``Assets`` with the ``AssetAdminSession.``
* federated repository view: All asset methods in this session
operate, retrieve and pertain to all assets defined in this
repository and any other assets implicitly available in this
repository through repository inheritence.
The methods ``use_federated_repository_view()`` and
``use_isolated_repository_view()`` behave as a radio group and one
should be selected before invoking any lookup methods.
Assets may have an additional records indicated by their respective
record types. The record may not be accessed through a cast of the
``Asset``.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'AssetLookupSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._kwargs = kwargs
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_lookup_assets(self):
"""Tests if this user can perform ``Asset`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.can_lookup_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_asset_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_comparative_resource_view
self._use_comparative_object_view()
def use_plenary_asset_view(self):
"""A complete view of the ``Asset`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_plenary_resource_view
self._use_plenary_object_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include assets in repositories which are
children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
@utilities.arguments_not_none
def get_asset(self, asset_id):
"""Gets the ``Asset`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Asset`` may have a different
``Id`` than requested, such as the case where a duplicate ``Id``
was assigned to an ``Asset`` and retained for compatibility.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
retrieve
return: (osid.repository.Asset) - the returned ``Asset``
raise: NotFound - no ``Asset`` found with the given ``Id``
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resource
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
result = collection.find_one(
dict({'_id': ObjectId(self._get_id(asset_id, 'repository').get_identifier())},
**self._view_filter()))
return objects.Asset(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_assets_by_ids(self, asset_ids):
"""Gets an ``AssetList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the assets
specified in the ``Id`` list, in the order of the list,
including duplicates, or an error results if an ``Id`` in the
supplied list is not found or inaccessible. Otherwise,
inaccessible ``Assets`` may be omitted from the list and may
present the elements in any order including returning a unique
set.
arg: asset_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``asset_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_ids
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
object_id_list = []
for i in asset_ids:
object_id_list.append(ObjectId(self._get_id(i, 'repository').get_identifier()))
result = collection.find(
dict({'_id': {'$in': object_id_list}},
**self._view_filter()))
result = list(result)
sorted_result = []
for object_id in object_id_list:
for object_map in result:
if object_map['_id'] == object_id:
sorted_result.append(object_map)
break
return objects.AssetList(sorted_result, runtime=self._runtime)
@utilities.arguments_not_none
def get_assets_by_genus_type(self, asset_genus_type):
"""Gets an ``AssetList`` corresponding to the given asset genus ``Type`` which does not include assets of types
derived from the specified ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_genus_type (osid.type.Type): an asset genus type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
result = collection.find(
dict({'genusTypeId': str(asset_genus_type)},
**self._view_filter())).sort('_id', DESCENDING)
return objects.AssetList(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_assets_by_parent_genus_type(self, asset_genus_type):
"""Gets an ``AssetList`` corresponding to the given asset genus ``Type`` and include any additional assets with
genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_genus_type (osid.type.Type): an asset genus type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type
return objects.AssetList([])
@utilities.arguments_not_none
def get_assets_by_record_type(self, asset_record_type):
"""Gets an ``AssetList`` containing the given asset record ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_record_type (osid.type.Type): an asset record type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_record_type
# STILL NEED TO IMPLEMENT!!!
return objects.AssetList([])
@utilities.arguments_not_none
def get_assets_by_provider(self, resource_id):
"""Gets an ``AssetList`` from the given provider.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_assets(self):
"""Gets all ``Assets``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
return: (osid.repository.AssetList) - a list of ``Assets``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
result = collection.find(self._view_filter()).sort('_id', DESCENDING)
return objects.AssetList(result, runtime=self._runtime)
assets = property(fget=get_assets)
class AssetQuerySession(abc_repository_sessions.AssetQuerySession, osid_sessions.OsidSession):
"""This session provides methods for searching among ``Asset`` objects.
The search query is constructed using the ``AssetQuery``.
This session defines views that offer differing behaviors for
searching.
* federated repository view: searches include assets in
repositories of which this repository is a ancestor in the
repository hierarchy
* isolated repository view: searches are restricted to assets in
this repository
Assets may have a query record indicated by their respective record
types. The query record is accessed via the ``AssetQuery``.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'AssetQuerySession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._kwargs = kwargs
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_search_assets(self):
"""Tests if this user can perform ``Asset`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
return: (boolean) - ``false`` if search methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.can_search_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include assets in repositories which are
children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
def get_asset_query(self):
"""Gets an asset query.
return: (osid.repository.AssetQuery) - the asset query
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.get_resource_query_template
return queries.AssetQuery(runtime=self._runtime)
asset_query = property(fget=get_asset_query)
@utilities.arguments_not_none
def get_assets_by_query(self, asset_query):
"""Gets a list of ``Assets`` matching the given asset query.
arg: asset_query (osid.repository.AssetQuery): the asset
query
return: (osid.repository.AssetList) - the returned ``AssetList``
raise: NullArgument - ``asset_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - the ``asset_query`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.get_resources_by_query
and_list = list()
or_list = list()
for term in asset_query._query_terms:
and_list.append({term: asset_query._query_terms[term]})
for term in asset_query._keyword_terms:
or_list.append({term: asset_query._keyword_terms[term]})
if or_list:
and_list.append({'$or': or_list})
view_filter = self._view_filter()
if view_filter:
and_list.append(view_filter)
if and_list:
query_terms = {'$and': and_list}
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
result = collection.find(query_terms).sort('_id', DESCENDING)
return objects.AssetList(result, runtime=self._runtime)
class AssetSearchSession(abc_repository_sessions.AssetSearchSession, AssetQuerySession):
"""This session provides methods for searching among ``Asset`` objects.
The search query is constructed using the ``AssetQuery``.
``get_assets_by_query()`` is the basic search method and returns a
list of ``Assets``. A more advanced search may be performed with
``getAssetsBySearch()``. It accepts an ``AssetSearch`` in addition
to the query for the purpose of specifying additional options
affecting the entire search, such as ordering.
``get_assets_by_search()`` returns an ``AssetSearchResults`` that
can be used to access the resulting ``AssetList`` or be used to
perform a search within the result set through ``AssetList``.
This session defines views that offer differing behaviors for
searching.
* federated repository view: searches include assets in
repositories of which this repository is a ancestor in the
repository hierarchy
* isolated repository view: searches are restricted to assets in
this repository
Assets may have a query record indicated by their respective record
types. The query record is accessed via the ``AssetQuery``.
"""
def get_asset_search(self):
"""Gets an asset search.
return: (osid.repository.AssetSearch) - the asset search
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceSearchSession.get_resource_search_template
return searches.AssetSearch(runtime=self._runtime)
asset_search = property(fget=get_asset_search)
def get_asset_search_order(self):
"""Gets an asset search order.
The ``AssetSearchOrder`` is supplied to an ``AssetSearch`` to
specify the ordering of results.
return: (osid.repository.AssetSearchOrder) - the asset search
order
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
asset_search_order = property(fget=get_asset_search_order)
@utilities.arguments_not_none
def get_assets_by_search(self, asset_query, asset_search):
"""Gets the search results matching the given search query using the given search.
arg: asset_query (osid.repository.AssetQuery): the asset
query
arg: asset_search (osid.repository.AssetSearch): the asset
search
return: (osid.repository.AssetSearchResults) - the asset search
results
raise: NullArgument - ``asset_query`` or ``asset_search`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_query`` or ``asset_search`` is not
of this service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceSearchSession.get_resources_by_search_template
# Copied from osid.resource.ResourceQuerySession.get_resources_by_query_template
and_list = list()
or_list = list()
for term in asset_query._query_terms:
and_list.append({term: asset_query._query_terms[term]})
for term in asset_query._keyword_terms:
or_list.append({term: asset_query._keyword_terms[term]})
if asset_search._id_list is not None:
identifiers = [ObjectId(i.identifier) for i in asset_search._id_list]
and_list.append({'_id': {'$in': identifiers}})
if or_list:
and_list.append({'$or': or_list})
view_filter = self._view_filter()
if view_filter:
and_list.append(view_filter)
if and_list:
query_terms = {'$and': and_list}
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if asset_search.start is not None and asset_search.end is not None:
result = collection.find(query_terms)[asset_search.start:asset_search.end]
else:
result = collection.find(query_terms)
return searches.AssetSearchResults(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_asset_query_from_inspector(self, asset_query_inspector):
"""Gets an asset query from an inspector.
The inspector is available from a ``AssetSearchResults``.
arg: asset_query_inspector
(osid.repository.AssetQueryInspector): an asset query
inspector
return: (osid.repository.AssetQuery) - the asset query
raise: NullArgument - ``asset_query_inspector`` is ``null``
raise: Unsupported - ``asset_query_inspector`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class AssetAdminSession(abc_repository_sessions.AssetAdminSession, osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Assets``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create an
``Asset,`` an ``AssetForm`` is requested using
``get_asset_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``AssetyForm`` will indicate that it is to be used with a create
operation and can be used to examine metdata or validate data prior
to creation. Once the ``AssetForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``AssetForm`` corresponds
to an attempted transaction.
For updates, ``AssetForms`` are requested to the ``Asset`` ``Id``
that is to be updated using ``getAssetFormForUpdate()``. Similarly,
the ``AssetForm`` has metadata about the data that can be updated
and it can perform validation before submitting the update. The
``AssetForm`` can only be used once for a successful update and
cannot be reused.
The delete operations delete ``Assets``. To unmap an ``Asset`` from
the current ``Repository,`` the ``AssetRepositoryAssignmentSession``
should be used. These delete operations attempt to remove the
``Bid`` itself thus removing it from all known ``Repository``
catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
The view of the administrative methods defined in this session is
determined by the provider. For an instance of this session where no
repository has been specified, it may not be parallel to the
``AssetLookupSession``. For example, a default
``AssetLookupSession`` may view the entire repository hierarchy
while the default ``AssetAdminSession`` uses an isolated
``Repository`` to create new ``Assets`` ora specific repository to
operate on a predetermined set of ``Assets``. Another scenario is a
federated provider who does not wish to permit administrative
operations for the federation unaware.
Example create:
if (!session.canCreateAssets()) {
return "asset creation not permitted";
}
Type types[1];
types[0] = assetPhotographType;
if (!session.canCreateAssetWithRecordTypes(types)) {
return "creating an asset with a photograph type is not supported";
}
AssetForm form = session.getAssetFormForCreate();
Metadata metadata = form.getDisplayNameMetadata();
if (metadata.isReadOnly()) {
return "cannot set display name";
}
form.setDisplayName("my photo");
PhotographRecordForm photoForm = (PhotographRecordForn) form.getRecordForm(assetPhotogaphType);
Metadata metadata = form.getApertureMetadata();
if (metadata.isReadOnly()) {
return ("cannot set aperture");
}
photoForm.setAperture("5.6");
if (!form.isValid()) {
return form.getValidationMessage();
}
Asset newAsset = session.createAsset(form);
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'AssetAdminSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._forms = dict()
self._kwargs = kwargs
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_create_assets(self):
"""Tests if this user can create ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating an ``Asset``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer create
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` creation is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_create_asset_with_record_types(self, asset_record_types):
"""Tests if this user can create a single ``Asset`` using the desired record types.
While ``RepositoryManager.getAssetRecordTypes()`` can be used to
examine which records are supported, this method tests which
record(s) are required for creating a specific ``Asset``.
Providing an empty array tests if an ``Asset`` can be created
with no records.
arg: asset_record_types (osid.type.Type[]): array of asset
record types
return: (boolean) - ``true`` if ``Asset`` creation using the
specified record ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``asset_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_asset_form_for_create(self, asset_record_types):
"""Gets the asset form for creating new assets.
A new form should be requested for each create transaction.
arg: asset_record_types (osid.type.Type[]): array of asset
record types
return: (osid.repository.AssetForm) - the asset form
raise: NullArgument - ``asset_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.get_resource_form_for_create_template
for arg in asset_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if asset_record_types == []:
obj_form = objects.AssetForm(
repository_id=self._catalog_id,
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id())
else:
obj_form = objects.AssetForm(
repository_id=self._catalog_id,
record_types=asset_record_types,
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id())
self._forms[obj_form.get_id().get_identifier()] = not CREATED
return obj_form
@utilities.arguments_not_none
def create_asset(self, asset_form):
"""Creates a new ``Asset``.
arg: asset_form (osid.repository.AssetForm): the form for
this ``Asset``
return: (osid.repository.Asset) - the new ``Asset``
raise: IllegalState - ``asset_form`` already used in a create
transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``asset_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_form`` did not originate from
``get_asset_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.create_resource_template
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_form, ABCAssetForm):
raise errors.InvalidArgument('argument type is not an AssetForm')
if asset_form.is_for_update():
raise errors.InvalidArgument('the AssetForm is for update only, not create')
try:
if self._forms[asset_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('asset_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('asset_form did not originate from this session')
if not asset_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
insert_result = collection.insert_one(asset_form._my_map)
self._forms[asset_form.get_id().get_identifier()] = CREATED
result = objects.Asset(
collection.find_one({'_id': insert_result.inserted_id}),
runtime=self._runtime)
return result
def can_update_assets(self):
"""Tests if this user can update ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating an ``Asset``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer update
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` modification is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_asset_form_for_update(self, asset_id):
"""Gets the asset form for updating an existing asset.
A new asset form should be requested for each update
transaction.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset``
return: (osid.repository.AssetForm) - the asset form
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` is null
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.get_resource_form_for_update_template
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
if asset_id.get_identifier_namespace() != 'repository.Asset':
if asset_id.get_authority() != self._authority:
raise errors.InvalidArgument()
else:
asset_id = self._get_asset_id_with_enclosure(asset_id)
result = collection.find_one({'_id': ObjectId(asset_id.get_identifier())})
obj_form = objects.AssetForm(result, runtime=self._runtime)
self._forms[obj_form.get_id().get_identifier()] = not UPDATED
return obj_form
def _get_asset_id_with_enclosure(self, enclosure_id):
"""Create an Asset with an enclosed foreign object.
return: (osid.id.Id) - the id of the new Asset
"""
mgr = self._get_provider_manager('REPOSITORY')
query_session = mgr.get_asset_query_session_for_repository(self._catalog_id)
query_form = query_session.get_asset_query()
query_form.match_enclosed_object_id(enclosure_id)
query_result = query_session.get_assets_by_query(query_form)
if query_result.available() > 0:
asset_id = query_result.next().get_id()
else:
create_form = self.get_asset_form_for_create([ENCLOSURE_RECORD_TYPE])
create_form.set_enclosed_object(enclosure_id)
asset_id = self.create_asset(create_form).get_id()
return asset_id
@utilities.arguments_not_none
def duplicate_asset(self, asset_id):
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
mgr = self._get_provider_manager('REPOSITORY')
lookup_session = mgr.get_asset_lookup_session()
lookup_session.use_federated_repository_view()
try:
lookup_session.use_unsequestered_asset_view()
except AttributeError:
pass
asset_map = dict(lookup_session.get_asset(asset_id)._my_map)
del asset_map['_id']
if 'repositoryId' in asset_map:
asset_map['repositoryId'] = str(self._catalog_id)
if 'assignedRepositoryIds' in asset_map:
asset_map['assignedRepositoryIds'] = [str(self._catalog_id)]
insert_result = collection.insert_one(asset_map)
result = objects.Asset(
collection.find_one({'_id': insert_result.inserted_id}),
runtime=self._runtime)
return result
@utilities.arguments_not_none
def update_asset(self, asset_form):
"""Updates an existing asset.
arg: asset_form (osid.repository.AssetForm): the form
containing the elements to be updated
raise: IllegalState - ``asset_form`` already used in anupdate
transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``asset_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_form`` did not originate from
``get_asset_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.update_resource_template
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_form, ABCAssetForm):
raise errors.InvalidArgument('argument type is not an AssetForm')
if not asset_form.is_for_update():
raise errors.InvalidArgument('the AssetForm is for update only, not create')
try:
if self._forms[asset_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('asset_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('asset_form did not originate from this session')
if not asset_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
collection.save(asset_form._my_map)
self._forms[asset_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned:
return objects.Asset(
asset_form._my_map,
runtime=self._runtime)
def can_delete_assets(self):
"""Tests if this user can delete ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting an ``Asset``
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer delete
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` deletion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def delete_asset(self, asset_id):
"""Deletes an ``Asset``.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
remove
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.delete_resource_template
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
asset_map = collection.find_one(
dict({'_id': ObjectId(asset_id.get_identifier())},
**self._view_filter()))
objects.Asset(asset_map, runtime=self._runtime)._delete()
collection.delete_one({'_id': ObjectId(asset_id.get_identifier())})
def can_manage_asset_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` aliasing is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def alias_asset(self, asset_id, alias_id):
"""Adds an ``Id`` to an ``Asset`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Asset`` is determined by the
provider. The new ``Id`` performs as an alias to the primary
``Id``. If the alias is a pointer to another asset, it is
reassigned to the given asset ``Id``.
arg: asset_id (osid.id.Id): the ``Id`` of an ``Asset``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is already assigned
raise: NotFound - ``asset_id`` not found
raise: NullArgument - ``asset_id`` or ``alias_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.alias_resources_template
self._alias_id(primary_id=asset_id, equivalent_id=alias_id)
def can_create_asset_content(self):
"""Tests if this user can create content for ``Assets``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating an
``AssetContent`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
create operations to an unauthorized user.
return: (boolean) - ``false`` if ``Asset`` content creation is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_create_asset_content_with_record_types(self, asset_content_record_types):
"""Tests if this user can create an ``AssetContent`` using the desired record types.
While ``RepositoryManager.getAssetContentRecordTypes()`` can be
used to test which records are supported, this method tests
which records are required for creating a specific
``AssetContent``. Providing an empty array tests if an
``AssetContent`` can be created with no records.
arg: asset_content_record_types (osid.type.Type[]): array of
asset content record types
return: (boolean) - ``true`` if ``AssetContent`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``asset_content_record_types`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_asset_content_form_for_create(self, asset_id, asset_content_record_types):
"""Gets an asset content form for creating new assets.
arg: asset_id (osid.id.Id): the ``Id`` of an ``Asset``
arg: asset_content_record_types (osid.type.Type[]): array of
asset content record types
return: (osid.repository.AssetContentForm) - the asset content
form
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` or
``asset_content_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.learning.ActivityAdminSession.get_activity_form_for_create_template
if not isinstance(asset_id, ABCId):
raise errors.InvalidArgument('argument is not a valid OSID Id')
for arg in asset_content_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if asset_content_record_types == []:
## WHY are we passing repository_id = self._catalog_id below, seems redundant:
obj_form = objects.AssetContentForm(
repository_id=self._catalog_id,
asset_id=asset_id,
catalog_id=self._catalog_id,
runtime=self._runtime)
else:
obj_form = objects.AssetContentForm(
repository_id=self._catalog_id,
record_types=asset_content_record_types,
asset_id=asset_id,
catalog_id=self._catalog_id,
runtime=self._runtime)
obj_form._for_update = False
self._forms[obj_form.get_id().get_identifier()] = not CREATED
return obj_form
@utilities.arguments_not_none
def create_asset_content(self, asset_content_form):
"""Creates new ``AssetContent`` for a given asset.
arg: asset_content_form (osid.repository.AssetContentForm):
the form for this ``AssetContent``
return: (osid.repository.AssetContent) - the new
``AssetContent``
raise: IllegalState - ``asset_content_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``asset_content_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_content_form`` did not originate
from ``get_asset_content_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.create_asset_content_template
from ...abstract_osid.repository.objects import AssetContentForm as ABCAssetContentForm
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_content_form, ABCAssetContentForm):
raise errors.InvalidArgument('argument type is not an AssetContentForm')
if asset_content_form.is_for_update():
raise errors.InvalidArgument('the AssetContentForm is for update only, not create')
try:
if self._forms[asset_content_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('asset_content_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('asset_content_form did not originate from this session')
if not asset_content_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
asset_content_form._my_map['_id'] = ObjectId()
asset_id = Id(asset_content_form._my_map['assetId']).get_identifier()
asset = collection.find_one(
{'$and': [{'_id': ObjectId(asset_id)},
{'assigned' + self._catalog_name + 'Ids': {'$in': [str(self._catalog_id)]}}]})
asset['assetContents'].append(asset_content_form._my_map)
result = collection.save(asset)
self._forms[asset_content_form.get_id().get_identifier()] = CREATED
from .objects import AssetContent
return AssetContent(asset_content_form._my_map, runtime=self._runtime)
def can_update_asset_contents(self):
"""Tests if this user can update ``AssetContent``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating an
``AssetContent`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
update operations to an unauthorized user.
return: (boolean) - ``false`` if ``AssetContent`` modification
is not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_asset_content_form_for_update(self, asset_content_id):
"""Gets the asset content form for updating an existing asset content.
A new asset content form should be requested for each update
transaction.
arg: asset_content_id (osid.id.Id): the ``Id`` of the
``AssetContent``
return: (osid.repository.AssetContentForm) - the asset content
form
raise: NotFound - ``asset_content_id`` is not found
raise: NullArgument - ``asset_content_id`` is ``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.get_asset_content_form_for_update_template
from ...abstract_osid.id.primitives import Id as ABCId
from .objects import AssetContentForm
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_content_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
document = collection.find_one({'assetContents._id': ObjectId(asset_content_id.get_identifier())})
for sub_doc in document['assetContents']: # There may be a MongoDB shortcut for this
if sub_doc['_id'] == ObjectId(asset_content_id.get_identifier()):
result = sub_doc
obj_form = AssetContentForm(result, runtime=self._runtime)
obj_form._for_update = True
self._forms[obj_form.get_id().get_identifier()] = not UPDATED
return obj_form
@utilities.arguments_not_none
def update_asset_content(self, asset_content_form):
"""Updates an existing asset content.
arg: asset_content_form (osid.repository.AssetContentForm):
the form containing the elements to be updated
raise: IllegalState - ``asset_content_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``asset_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``asset_content_form`` did not originate
from ``get_asset_content_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.update_asset_content_template
from ...abstract_osid.repository.objects import AssetContentForm as ABCAssetContentForm
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_content_form, ABCAssetContentForm):
raise errors.InvalidArgument('argument type is not an AssetContentForm')
if not asset_content_form.is_for_update():
raise errors.InvalidArgument('the AssetContentForm is for update only, not create')
try:
if self._forms[asset_content_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('asset_content_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('asset_content_form did not originate from this session')
if not asset_content_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
asset_id = Id(asset_content_form._my_map['assetId']).get_identifier()
asset = collection.find_one(
{'$and': [{'_id': ObjectId(asset_id)},
{'assigned' + self._catalog_name + 'Ids': {'$in': [str(self._catalog_id)]}}]})
index = 0
found = False
for i in asset['assetContents']:
if i['_id'] == ObjectId(asset_content_form._my_map['_id']):
asset['assetContents'].pop(index)
asset['assetContents'].insert(index, asset_content_form._my_map)
found = True
break
index += 1
if not found:
raise errors.NotFound()
try:
collection.save(asset)
except: # what exceptions does mongodb save raise?
raise errors.OperationFailed()
self._forms[asset_content_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned:
from .objects import AssetContent
return AssetContent(asset_content_form._my_map, runtime=self._runtime)
def can_delete_asset_contents(self):
"""Tests if this user can delete ``AssetsContents``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting an
``AssetContent`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
delete operations to an unauthorized user.
return: (boolean) - ``false`` if ``AssetContent`` deletion is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def delete_asset_content(self, asset_content_id):
"""Deletes content from an ``Asset``.
arg: asset_content_id (osid.id.Id): the ``Id`` of the
``AssetContent``
raise: NotFound - ``asset_content_id`` is not found
raise: NullArgument - ``asset_content_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.repository.AssetAdminSession.delete_asset_content_template
from ...abstract_osid.id.primitives import Id as ABCId
from .objects import AssetContent
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
if not isinstance(asset_content_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
asset = collection.find_one({'assetContents._id': ObjectId(asset_content_id.get_identifier())})
index = 0
found = False
for i in asset['assetContents']:
if i['_id'] == ObjectId(asset_content_id.get_identifier()):
asset_content_map = asset['assetContents'].pop(index)
index += 1
found = True
if not found:
raise errors.OperationFailed()
AssetContent(asset_content_map, runtime=self._runtime)._delete()
collection.save(asset)
class AssetNotificationSession(abc_repository_sessions.AssetNotificationSession, osid_sessions.OsidSession):
"""This session defines methods to receive notifications on adds/changes to ``Asset`` objects in this
``Repository``.
This also includes existing assets that may appear or disappear due
to changes in the ``Repository`` hierarchy, This session is intended
for consumers needing to synchronize their state with this service
without the use of polling. Notifications are cancelled when this
session is closed.
The two views defined in this session correspond to the views in the
``AssetLookupSession``.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'AssetNotificationSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
if not MONGO_LISTENER.is_alive():
MONGO_LISTENER.initialize(runtime)
MONGO_LISTENER.start()
self._receiver = kwargs['receiver']
db_prefix = ''
try:
db_prefix_param_id = Id('parameter:mongoDBNamePrefix@mongo')
db_prefix = runtime.get_configuration().get_value_by_parameter(db_prefix_param_id).get_string_value()
except (AttributeError, KeyError, errors.NotFound):
pass
self._ns='{0}repository.Asset'.format(db_prefix)
if self._ns not in MONGO_LISTENER.receivers:
MONGO_LISTENER.receivers[self._ns] = dict()
MONGO_LISTENER.receivers[self._ns][self._receiver] = {
'authority': self._authority,
'obj_name_plural': 'assets',
'i': False,
'u': False,
'd': False,
'reliable': False,
}
def __del__(self):
"""Make sure the receiver is removed from the listener"""
del MONGO_LISTENER.receivers[self._ns][self._receiver]
super(AssetNotificationSession, self).__del__()
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_register_for_asset_notifications(self):
"""Tests if this user can register for ``Asset`` notifications.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
return: (boolean) - ``false`` if notification methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include assets in repositories which are
children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts notifications to this repository
only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
def register_for_new_assets(self):
"""Register for notifications of new assets.
``AssetReceiver.newAssets()`` is invoked when a new ``Asset``
appears in this repository.
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_new_resources
MONGO_LISTENER.receivers[self._ns][self._receiver]['i'] = True
@utilities.arguments_not_none
def register_for_new_assets_by_genus_type(self, asset_genus_type):
"""Registers for notification of new assets of the given asset genus type.
``AssetReceiver.newAssets()`` is invoked when an asset is
appears in this repository.
arg: asset_genus_type (osid.type.Type): the genus type of the
``Asset`` to monitor
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_new_resources
MONGO_LISTENER.receivers[self._ns][self._receiver]['i'] = True
def register_for_changed_assets(self):
"""Registers for notification of updated assets.
``AssetReceiver.changedAssets()`` is invoked when an asset in
this repository is changed.
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_changed_resources
MONGO_LISTENER.receivers[self._ns][self._receiver]['u'] = True
@utilities.arguments_not_none
def register_for_changed_assets_by_genus_type(self, asset_genus_type):
"""Registers for notification of updated assets of the given asset genus type.
``AssetReceiver.changedAssets()`` is invoked when an asset in
this repository is changed.
arg: asset_genus_type (osid.type.Type): the genus type of the
``Asset`` to monitor
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_changed_resource
if MONGO_LISTENER.receivers[self._ns][self._receiver]['u'] == False:
MONGO_LISTENER.receivers[self._ns][self._receiver]['u'] = []
if isinstance(MONGO_LISTENER.receivers[self._ns][self._receiver]['u'], list):
MONGO_LISTENER.receivers[self._ns][self._receiver]['u'].append(asset_genus_type.get_identifier())
@utilities.arguments_not_none
def register_for_changed_asset(self, asset_id):
"""Registers for notification of an updated asset.
``AssetReceiver.changedAssets()`` is invoked when the specified
asset in this repository is changed.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
monitor
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_changed_resource
if MONGO_LISTENER.receivers[self._ns][self._receiver]['u'] == False:
MONGO_LISTENER.receivers[self._ns][self._receiver]['u'] = []
if isinstance(MONGO_LISTENER.receivers[self._ns][self._receiver]['u'], list):
MONGO_LISTENER.receivers[self._ns][self._receiver]['u'].append(asset_id.get_identifier())
def register_for_deleted_assets(self):
"""Registers for notification of deleted assets.
``AssetReceiver.deletedAssets()`` is invoked when an asset is
deleted or removed from this repository.
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_deleted_resources
MONGO_LISTENER.receivers[self._ns][self._receiver]['d'] = True
@utilities.arguments_not_none
def register_for_deleted_assets_by_genus_type(self, asset_genus_type):
"""Registers for notification of deleted assets of the given asset genus type.
``AssetReceiver.deletedAssets()`` is invoked when an asset is
deleted or removed from this repository.
arg: asset_genus_type (osid.type.Type): the genus type of the
``Asset`` to monitor
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_deleted_resource
if MONGO_LISTENER.receivers[self._ns][self._receiver]['d'] == False:
MONGO_LISTENER.receivers[self._ns][self._receiver]['d'] = []
if isinstance(MONGO_LISTENER.receivers[self._ns][self._receiver]['d'], list):
self.MONGO_LISTENER.receivers[self._ns][self._receiver]['d'].append(asset_genus_type.get_identifier())
@utilities.arguments_not_none
def register_for_deleted_asset(self, asset_id):
"""Registers for notification of a deleted asset.
``AssetReceiver.deletedAssets()`` is invoked when the specified
asset is deleted or removed from this repository.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset`` to
monitor
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.register_for_deleted_resource
if MONGO_LISTENER.receivers[self._ns][self._receiver]['d'] == False:
MONGO_LISTENER.receivers[self._ns][self._receiver]['d'] = []
if isinstance(MONGO_LISTENER.receivers[self._ns][self._receiver]['d'], list):
self.MONGO_LISTENER.receivers[self._ns][self._receiver]['d'].append(asset_id.get_identifier())
def reliable_asset_notifications(self):
"""Reliable notifications are desired.
In reliable mode, notifications are to be acknowledged using
``acknowledge_item_notification()`` .
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.reliable_resource_notifications
MONGO_LISTENER.receivers[self._ns][self._receiver]['reliable'] = True
def unreliable_asset_notifications(self):
"""Unreliable notifications are desired.
In unreliable mode, notifications do not need to be
acknowledged.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceNotificationSession.unreliable_resource_notifications
MONGO_LISTENER.receivers[self._ns][self._receiver]['reliable'] = False
@utilities.arguments_not_none
def acknowledge_asset_notification(self, notification_id):
"""Acknowledge an asset notification.
arg: notification_id (osid.id.Id): the ``Id`` of the
notification
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class AssetRepositorySession(abc_repository_sessions.AssetRepositorySession, osid_sessions.OsidSession):
"""This session provides methods to retrieve ``Assets`` to ``Repository`` mappings.
An ``Asset`` may appear in multiple ``Repository`` objects. Each
Repository may have its own authorizations governing who is allowed
to look at it.
This lookup session defines two views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
"""
_session_name = 'AssetRepositorySession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._catalog_view = COMPARATIVE
self._kwargs = kwargs
def can_lookup_asset_repository_mappings(self):
"""Tests if this user can perform lookups of asset/repository mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
return: (boolean) - ``false`` if looking up mappings is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.can_lookup_resource_bin_mappings
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_repository_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_comparative_bin_view
self._catalog_view = COMPARATIVE
def use_plenary_repository_view(self):
"""A complete view of the ``Asset`` and ``Repository`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_plenary_bin_view
self._catalog_view = PLENARY
@utilities.arguments_not_none
def get_asset_ids_by_repository(self, repository_id):
"""Gets the list of ``Asset`` ``Ids`` associated with a ``Repository``.
arg: repository_id (osid.id.Id): ``Id`` of the ``Repository``
return: (osid.id.IdList) - list of related asset ``Ids``
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bin
id_list = []
for asset in self.get_assets_by_repository(repository_id):
id_list.append(asset.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_assets_by_repository(self, repository_id):
"""Gets the list of ``Assets`` associated with a ``Repository``.
arg: repository_id (osid.id.Id): ``Id`` of the ``Repository``
return: (osid.repository.AssetList) - list of related assets
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bin
mgr = self._get_provider_manager('REPOSITORY')
lookup_session = mgr.get_asset_lookup_session_for_repository(repository_id)
lookup_session.use_isolated_repository_view()
return lookup_session.get_assets()
@utilities.arguments_not_none
def get_asset_ids_by_repositories(self, repository_ids):
"""Gets the list of ``Asset Ids`` corresponding to a list of ``Repository`` objects.
arg: repository_ids (osid.id.IdList): list of repository
``Ids``
return: (osid.id.IdList) - list of asset ``Ids``
raise: NullArgument - ``repository_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bins
id_list = []
for asset in self.get_assets_by_repositories(repository_ids):
id_list.append(asset.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_assets_by_repositories(self, repository_ids):
"""Gets the list of ``Assets`` corresponding to a list of ``Repository`` objects.
arg: repository_ids (osid.id.IdList): list of repository
``Ids``
return: (osid.repository.AssetList) - list of assets
raise: NullArgument - ``repository_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bins
asset_list = []
for repository_id in repository_ids:
asset_list += list(
self.get_assets_by_repository(repository_id))
return objects.AssetList(asset_list)
@utilities.arguments_not_none
def get_repository_ids_by_asset(self, asset_id):
"""Gets the list of ``Repository`` ``Ids`` mapped to an ``Asset``.
arg: asset_id (osid.id.Id): ``Id`` of an ``Asset``
return: (osid.id.IdList) - list of repository ``Ids``
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_bin_ids_by_resource
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_asset_lookup_session()
lookup_session.use_federated_repository_view()
asset = lookup_session.get_asset(asset_id)
id_list = []
for idstr in asset._my_map['assignedRepositoryIds']:
id_list.append(Id(idstr))
return IdList(id_list)
@utilities.arguments_not_none
def get_repositories_by_asset(self, asset_id):
"""Gets the list of ``Repository`` objects mapped to an ``Asset``.
arg: asset_id (osid.id.Id): ``Id`` of an ``Asset``
return: (osid.repository.RepositoryList) - list of repositories
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_bins_by_resource
mgr = self._get_provider_manager('REPOSITORY')
lookup_session = mgr.get_repository_lookup_session()
return lookup_session.get_repositories_by_ids(
self.get_repository_ids_by_asset(asset_id))
class AssetRepositoryAssignmentSession(abc_repository_sessions.AssetRepositoryAssignmentSession, osid_sessions.OsidSession):
"""This session provides methods to re-assign ``Assets`` to ``Repositories``.
An ``Asset`` may map to multiple ``Repository`` objects and removing
the last reference to an ``Asset`` is the equivalent of deleting it.
Each ``Repository`` may have its own authorizations governing who is
allowed to operate on it.
Moving or adding a reference of an ``Asset`` to another
``Repository`` is not a copy operation (eg: does not change its
``Id`` ).
"""
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._session_name = 'AssetRepositoryAssignmentSession'
self._catalog_name = 'Repository'
self._forms = dict()
self._kwargs = kwargs
def can_assign_assets(self):
"""Tests if this user can alter asset/repository mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
return: (boolean) - ``false`` if mapping is not authorized,
``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.can_assign_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_assign_assets_to_repository(self, repository_id):
"""Tests if this user can alter asset/repository mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
return: (boolean) - ``false`` if mapping is not authorized,
``true`` otherwise
raise: NullArgument - ``repository_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.can_assign_resources_to_bin
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if repository_id.get_identifier() == '000000000000000000000000':
return False
return True
@utilities.arguments_not_none
def get_assignable_repository_ids(self, repository_id):
"""Gets a list of repositories including and under the given repository node in which any asset can be assigned.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
return: (osid.id.IdList) - list of assignable repository ``Ids``
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids
# This will likely be overridden by an authorization adapter
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_repository_lookup_session()
assets = lookup_session.get_repositories()
id_list = []
for asset in assets:
id_list.append(assets.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_assignable_repository_ids_for_asset(self, repository_id, asset_id):
"""Gets a list of repositories including and under the given repository node in which a specific asset can be
assigned.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset``
return: (osid.id.IdList) - list of assignable repository ``Ids``
raise: NullArgument - ``repository_id`` or ``asset_id`` is
``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids_for_resource
# This will likely be overridden by an authorization adapter
return self.get_assignable_bin_ids()
@utilities.arguments_not_none
def assign_asset_to_repository(self, asset_id, repository_id):
"""Adds an existing ``Asset`` to a ``Repository``.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset``
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
raise: AlreadyExists - ``asset_id`` already assigned to
``repository_id``
raise: NotFound - ``asset_id`` or ``repository_id`` not found
raise: NullArgument - ``asset_id`` or ``repository_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_repository_lookup_session()
lookup_session.get_repository(repository_id) # to raise NotFound
self._assign_object_to_catalog(asset_id, repository_id)
@utilities.arguments_not_none
def unassign_asset_from_repository(self, asset_id, repository_id):
"""Removes an ``Asset`` from a ``Repository``.
arg: asset_id (osid.id.Id): the ``Id`` of the ``Asset``
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
raise: NotFound - ``asset_id`` or ``repository_id`` not found
or ``asset_id`` not assigned to ``repository_id``
raise: NullArgument - ``asset_id`` or ``repository_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_repository_lookup_session()
cat = lookup_session.get_repository(repository_id) # to raise NotFound
self._unassign_object_from_catalog(asset_id, repository_id)
class AssetCompositionSession(abc_repository_sessions.AssetCompositionSession, osid_sessions.OsidSession):
"""This session defines methods for looking up ``Asset`` to ``Composition`` mappings.
A ``Composition`` represents a collection of ``Assets``.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
* isolated repository view: All lookup methods in this session
operate, retrieve and pertain to asseta and compositions defined
explicitly in the current repository. Using an isolated view is
useful for managing compositions with the
CompositionAdminSession.
* federated repository view: All lookup methods in this session
operate, retrieve and pertain to all compositions and assets
defined in this repository and any other compositions implicitly
available in this repository through repository inheritence.
The methods ``use_federated_asset_composition_view()`` and
``use_isolated_asset_compositiont_view()`` behave as a radio group
and one should be selected before invoking any lookup methods.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'AssetCompositionSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._kwargs = kwargs
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_access_asset_compositions(self):
"""Tests if this user can perform composition lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def use_comparative_asset_composition_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_comparative_resource_view
self._use_comparative_object_view()
def use_plenary_asset_composition_view(self):
"""A complete view of the returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_plenary_resource_view
self._use_plenary_object_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include compositions in repositories which
are children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
@utilities.arguments_not_none
def get_composition_assets(self, composition_id):
"""Gets the list of assets mapped to the given ``Composition``.
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
return: (osid.repository.AssetList) - list of assets
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
composition = collection.find_one(
dict({'_id': ObjectId(composition_id.get_identifier())},
**self._view_filter()))
if 'assetIds' not in composition:
raise errors.NotFound('no Assets are assigned to this Composition')
asset_ids = []
for idstr in composition['assetIds']:
asset_ids.append(Id(idstr))
mgr = self._get_provider_manager('REPOSITORY')
als = mgr.get_asset_lookup_session()
als.use_federated_repository_view()
return als.get_assets_by_ids(asset_ids)
@utilities.arguments_not_none
def get_compositions_by_asset(self, asset_id):
"""Gets a list of compositions including the given asset.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NotFound - ``asset_id`` is not found
raise: NullArgument - ``asset_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
result = collection.find(
dict({'assetIds': {'$in': [str(asset_id)]}},
**self._view_filter())).sort('_id', DESCENDING)
return objects.CompositionList(result, runtime=self._runtime)
class AssetCompositionDesignSession(abc_repository_sessions.AssetCompositionDesignSession, osid_sessions.OsidSession):
"""This session provides the means for adding assets to an asset composiiton.
The asset is identified inside a composition using its own Id. To
add the same asset to the composition, multiple compositions should
be used and placed at the same level in the ``Composition``
hierarchy.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'AssetCompositionDesignSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._kwargs = kwargs
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_compose_assets(self):
"""Tests if this user can manage mapping of ``Assets`` to ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as an application hint that may opt not to offer composition
operations.
return: (boolean) - ``false`` if asset composiion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return True
@utilities.arguments_not_none
def add_asset(self, asset_id, composition_id):
"""Appends an asset to a composition.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
raise: AlreadyExists - ``asset_id`` already part
``composition_id``
raise: NotFound - ``asset_id`` or ``composition_id`` not found
raise: NullArgument - ``asset_id`` or ``composition_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
# This asset found check may want to be run through _get_provider_manager
# so as to ensure assess control:
from ...abstract_osid.id.primitives import Id as ABCId
if not isinstance(asset_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
if asset_id.get_identifier_namespace() != 'repository.Asset':
if asset_id.get_authority() != self._authority:
raise errors.InvalidArgument()
else:
mgr = self._get_provider_manager('REPOSITORY')
admin_session = mgr.get_asset_admin_session_for_repository(self._catalog_id)
asset_id = admin_session._get_asset_id_with_enclosure(asset_id)
collection = MongoClientValidated('repository',
collection='Asset',
runtime=self._runtime)
asset = collection.find_one({'_id': ObjectId(asset_id.get_identifier())})
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
composition = collection.find_one({'_id': ObjectId(composition_id.get_identifier())})
if 'assetIds' in composition:
if str(asset_id) not in composition['assetIds']:
composition['assetIds'].append(str(asset_id))
else:
composition['assetIds'] = [str(asset_id)]
collection.save(composition)
@utilities.arguments_not_none
def move_asset_ahead(self, asset_id, composition_id, reference_id):
"""Reorders assets in a composition by moving the specified asset in front of a reference asset.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
arg: reference_id (osid.id.Id): ``Id`` of the reference
``Asset``
raise: NotFound - ``asset_id`` or ``reference_id`` ``not found
in composition_id``
raise: NullArgument - ``asset_id, reference_id`` or
``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
composition = collection.find_one({'_id': ObjectId(composition_id.get_identifier())})
if 'assetIds' not in composition:
raise errors.NotFound('no Assets are assigned to this Composition')
composition['assetIds'] = move_id_ahead(asset_id, reference_id, composition['assetIds'])
collection.save(composition)
@utilities.arguments_not_none
def move_asset_behind(self, asset_id, composition_id, reference_id):
"""Reorders assets in a composition by moving the specified asset behind of a reference asset.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
arg: reference_id (osid.id.Id): ``Id`` of the reference
``Asset``
raise: NotFound - ``asset_id`` or ``reference_id`` ``not found
in composition_id``
raise: NullArgument - ``asset_id, reference_id`` or
``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
composition = collection.find_one({'_id': ObjectId(composition_id.get_identifier())})
if 'assetIds' not in composition:
raise errors.NotFound('no Assets are assigned to this Composition')
composition['assetIds'] = move_id_behind(asset_id, reference_id, composition['assetIds'])
collection.save(composition)
@utilities.arguments_not_none
def order_assets(self, asset_ids, composition_id):
"""Reorders a set of assets in a composition.
arg: asset_ids (osid.id.Id[]): ``Ids`` for a set of
``Assets``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
raise: NotFound - ``composition_id`` not found or, an
``asset_id`` not related to ``composition_id``
raise: NullArgument - ``instruction_ids`` or ``agenda_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
composition = collection.find_one({'_id': ObjectId(composition_id.get_identifier())})
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
if 'assetIds' not in composition:
raise errors.NotFound('no Assets are assigned to this Composition')
composition['assetIds'] = order_ids(asset_ids, composition['assetIds'])
collection.save(composition)
@utilities.arguments_not_none
def remove_asset(self, asset_id, composition_id):
"""Removes an ``Asset`` from a ``Composition``.
arg: asset_id (osid.id.Id): ``Id`` of the ``Asset``
arg: composition_id (osid.id.Id): ``Id`` of the
``Composition``
raise: NotFound - ``asset_id`` ``not found in composition_id``
raise: NullArgument - ``asset_id`` or ``composition_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization fauilure
*compliance: mandatory -- This method must be implemented.*
"""
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
composition = collection.find_one({'_id': ObjectId(composition_id.get_identifier())})
try:
composition['assetIds'].remove(str(asset_id))
except (KeyError, ValueError):
raise errors.NotFound()
collection.save(composition)
class CompositionLookupSession(abc_repository_sessions.CompositionLookupSession, osid_sessions.OsidSession):
"""This session provides methods for retrieving ``Composition`` objects.
The ``Composition`` represents a collection of ``Assets``.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete and ordered result set or is
an error condition
* isolated repository view: All lookup methods in this session
operate, retrieve and pertain to compositions defined explicitly
in the current repository. Using an isolated view is useful for
managing compositions with the ``CompositionAdminSession.``
* federated repository view: All composition methods in this
session operate, retrieve and pertain to all compositions
defined in this repository and any other compositions implicitly
available in this repository through repository inheritence.
* active composition view: All composition lookup methods return
active compositions.
* any status composition view: Compositions of any active or
inactive status are returned from methods.
* sequestered composition viiew: All composition methods suppress
sequestered compositions.
* unsequestered composition view: All composition methods return
all compositions.
Generally, the comparative view should be used for most applications
as it permits operation even if there is data that cannot be
accessed. For example, a browsing application may only need to
examine the ``Composition`` it can access, without breaking
execution. However, an administrative application may require a
complete set of ``Composition`` objects to be returned.
Compositions may have an additional records indicated by their
respective record types. The record may not be accessed through a
cast of the ``Composition``.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'CompositionLookupSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._kwargs = kwargs
self._status_view = ACTIVE
self._sequestered_view = SEQUESTERED
def _view_filter(self):
"""
Overrides OsidSession._view_filter to add sequestering filter.
"""
view_filter = OsidSession._view_filter(self)
if self._sequestered_view == SEQUESTERED:
view_filter['sequestered'] = False
return view_filter
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_lookup_compositions(self):
"""Tests if this user can perform ``Composition`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.can_lookup_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_composition_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_comparative_resource_view
self._use_comparative_object_view()
def use_plenary_composition_view(self):
"""A complete view of the ``Composition`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_plenary_resource_view
self._use_plenary_object_view()
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include compositions in repositories which
are children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
def use_active_composition_view(self):
"""Only active compositions are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
self._status_view = ACTIVE
def use_any_status_composition_view(self):
"""All active and inactive compositions are returned by methods in this session.
*compliance: mandatory -- This method is must be implemented.*
"""
self._status_view = ANY_STATUS
def use_sequestered_composition_view(self):
"""The methods in this session omit sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._sequestered_view = SEQUESTERED
def use_unsequestered_composition_view(self):
"""The methods in this session return all compositions, including sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._sequestered_view = UNSEQUESTERED
@utilities.arguments_not_none
def get_composition(self, composition_id):
"""Gets the ``Composition`` specified by its ``Id``.
arg: composition_id (osid.id.Id): ``Id`` of the
``Composiiton``
return: (osid.repository.Composition) - the composition
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resource
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
result = collection.find_one(
dict({'_id': ObjectId(self._get_id(composition_id, 'repository').get_identifier())},
**self._view_filter()))
return objects.Composition(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_compositions_by_ids(self, composition_ids):
"""Gets a ``CompositionList`` corresponding to the given ``IdList``.
arg: composition_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``composition_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_ids
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
object_id_list = []
for i in composition_ids:
object_id_list.append(ObjectId(self._get_id(i, 'repository').get_identifier()))
result = collection.find(
dict({'_id': {'$in': object_id_list}},
**self._view_filter()))
result = list(result)
sorted_result = []
for object_id in object_id_list:
for object_map in result:
if object_map['_id'] == object_id:
sorted_result.append(object_map)
break
return objects.CompositionList(sorted_result, runtime=self._runtime)
@utilities.arguments_not_none
def get_compositions_by_genus_type(self, composition_genus_type):
"""Gets a ``CompositionList`` corresponding to the given composition genus ``Type`` which does not include
compositions of types derived from the specified ``Type``.
arg: composition_genus_type (osid.type.Type): a composition
genus type
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``composition_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
result = collection.find(
dict({'genusTypeId': str(composition_genus_type)},
**self._view_filter())).sort('_id', DESCENDING)
return objects.CompositionList(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_compositions_by_parent_genus_type(self, composition_genus_type):
"""Gets a ``CompositionList`` corresponding to the given composition genus ``Type`` and include any additional
compositions with genus types derived from the specified ``Type``.
arg: composition_genus_type (osid.type.Type): a composition
genus type
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``composition_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type
return objects.CompositionList([])
@utilities.arguments_not_none
def get_compositions_by_record_type(self, composition_record_type):
"""Gets a ``CompositionList`` containing the given composition record ``Type``.
arg: composition_record_type (osid.type.Type): a composition
record type
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``composition_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_record_type
# STILL NEED TO IMPLEMENT!!!
return objects.CompositionList([])
@utilities.arguments_not_none
def get_compositions_by_provider(self, resource_id):
"""Gets a ``CompositionList`` from the given provider ````.
In plenary mode, the returned list contains all known
compositions or an error results. Otherwise, the returned list
may contain only those compositions that are accessible through
this session.
In sequestered mode, no sequestered compositions are returned.
In unsequestered mode, all compositions are returned.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.repository.CompositionList) - the returned
``Composition list``
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_compositions(self):
"""Gets all ``Compositions``.
return: (osid.repository.CompositionList) - a list of
``Compositions``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
result = collection.find(self._view_filter()).sort('_id', DESCENDING)
return objects.CompositionList(result, runtime=self._runtime)
compositions = property(fget=get_compositions)
class CompositionQuerySession(abc_repository_sessions.CompositionQuerySession, osid_sessions.OsidSession):
"""This session provides methods for searching among ``Composition`` objects.
The search query is constructed using the ``CompositionQuery``.
This session defines views that offer differing behaviors when
searching.
* federated repository view: searches include compositions in
repositories of which this repository is an ancestor in the
repository hierarchy
* isolated repository view: searches are restricted to subjects in
this repository
* sequestered composition viiew: All composition methods suppress
sequestered compositions.
* unsequestered composition view: All composition methods return
all compositions.
Compositions may have a query record indicated by their respective
record types. The query record is accessed via the
``CompositionQuery``.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'CompositionQuerySession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._kwargs = kwargs
self._status_view = ACTIVE
self._sequestered_view = SEQUESTERED
def _view_filter(self):
"""
Overrides OsidSession._view_filter to add sequestering filter.
"""
view_filter = OsidSession._view_filter(self)
if self._sequestered_view == SEQUESTERED:
view_filter['sequestered'] = False
return view_filter
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_search_compositions(self):
"""Tests if this user can perform ``Composition`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
return: (boolean) - ``false`` if search methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.can_search_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_federated_repository_view(self):
"""Federates the view for methods in this session.
A federated view will include compositions in repositories which
are children of this repository in the repository hierarchy.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_federated_bin_view
self._use_federated_catalog_view()
def use_isolated_repository_view(self):
"""Isolates the view for methods in this session.
An isolated view restricts lookups to this repository only.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.use_isolated_bin_view
self._use_isolated_catalog_view()
def use_sequestered_composition_view(self):
"""The methods in this session omit sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._sequestered_view = SEQUESTERED
def use_unsequestered_composition_view(self):
"""The methods in this session return all compositions, including sequestered compositions.
*compliance: mandatory -- This method is must be implemented.*
"""
self._sequestered_view = UNSEQUESTERED
def get_composition_query(self):
"""Gets a composition query.
return: (osid.repository.CompositionQuery) - the composition
query
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.get_resource_query_template
return queries.CompositionQuery(runtime=self._runtime)
composition_query = property(fget=get_composition_query)
@utilities.arguments_not_none
def get_compositions_by_query(self, composition_query):
"""Gets a list of ``Compositions`` matching the given composition query.
arg: composition_query (osid.repository.CompositionQuery):
the composition query
return: (osid.repository.CompositionList) - the returned
``CompositionList``
raise: NullArgument - ``composition_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``composition_query`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.get_resources_by_query
and_list = list()
or_list = list()
for term in composition_query._query_terms:
and_list.append({term: composition_query._query_terms[term]})
for term in composition_query._keyword_terms:
or_list.append({term: composition_query._keyword_terms[term]})
if or_list:
and_list.append({'$or': or_list})
view_filter = self._view_filter()
if view_filter:
and_list.append(view_filter)
if and_list:
query_terms = {'$and': and_list}
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
result = collection.find(query_terms).sort('_id', DESCENDING)
return objects.CompositionList(result, runtime=self._runtime)
class CompositionSearchSession(abc_repository_sessions.CompositionSearchSession, CompositionQuerySession):
"""This session provides methods for searching among ``Composition`` objects.
The search query is constructed using the ``CompositionQuery``.
``get_compositions_by_query()`` is the basic search method and
returns a list of ``Compositions``. A more advanced search may be
performed with ``getCompositionsBySearch()``. It accepts an
``Composition`` in addition to the query for the purpose of
specifying additional options affecting the entire search, such as
ordering. ``get_compositions_by_search()`` returns an
``CompositionSearchResults`` that can be used to access the
resulting ``Composition`` or be used to perform a search within the
result set through ``CompositionSearch``.
This session defines views that offer differing behaviors when
searching.
* federated repository view: searches include compositions in
repositories of which this repository is an ancestor in the
repository hierarchy
* isolated repository view: searches are restricted to subjects in
this repository
Compositions may have a query record indicated by their respective
record types. The query record is accessed via the
``CompositionQuery``.
"""
def get_composition_search(self):
"""Gets a composition search.
return: (osid.repository.CompositionSearch) - the composition
search
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceSearchSession.get_resource_search_template
return searches.CompositionSearch(runtime=self._runtime)
composition_search = property(fget=get_composition_search)
def get_composition_search_order(self):
"""Gets a composition search order.
The ``CompositionSearchOrder`` is supplied to an
``CompositionSearch`` to specify the ordering of results.
return: (osid.repository.CompositionSearchOrder) - the
composition search order
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
composition_search_order = property(fget=get_composition_search_order)
@utilities.arguments_not_none
def get_compositions_by_search(self, composition_query, composition_search):
"""Gets the search results matching the given search query using the given search.
arg: composition_query (osid.repository.CompositionQuery):
the composition query
arg: composition_search (osid.repository.CompositionSearch):
the composition search
return: (osid.repository.CompositionSearchResults) - the
composition search results
raise: NullArgument - ``composition_query`` or
``composition_search`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``composition_query`` or
``composition_search`` is not of this service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceSearchSession.get_resources_by_search_template
# Copied from osid.resource.ResourceQuerySession.get_resources_by_query_template
and_list = list()
or_list = list()
for term in composition_query._query_terms:
and_list.append({term: composition_query._query_terms[term]})
for term in composition_query._keyword_terms:
or_list.append({term: composition_query._keyword_terms[term]})
if composition_search._id_list is not None:
identifiers = [ObjectId(i.identifier) for i in composition_search._id_list]
and_list.append({'_id': {'$in': identifiers}})
if or_list:
and_list.append({'$or': or_list})
view_filter = self._view_filter()
if view_filter:
and_list.append(view_filter)
if and_list:
query_terms = {'$and': and_list}
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
if composition_search.start is not None and composition_search.end is not None:
result = collection.find(query_terms)[composition_search.start:composition_search.end]
else:
result = collection.find(query_terms)
return searches.CompositionSearchResults(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_composition_query_from_inspector(self, composition_query_inspector):
"""Gets a composition query from an inspector.
The inspector is available from a ``CompositionSearchResults``.
arg: composition_query_inspector
(osid.repository.CompositionQueryInspector): a
composition query inspector
return: (osid.repository.CompositionQuery) - the composition
query
raise: NullArgument - ``composition_query_inspector`` is
``null``
raise: Unsupported - ``composition_query_inspector`` is not of
this service
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class CompositionAdminSession(abc_repository_sessions.CompositionAdminSession, osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Compositions``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Composition,`` a ``CompositionForm`` is requested using
``get_composition_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``CompositionForm`` will indicate that it is to be used with a
create operation and can be used to examine metdata or validate data
prior to creation. Once the ``CompositionForm`` is submiited to a
create operation, it cannot be reused with another create operation
unless the first operation was unsuccessful. Each
``CompositionForm`` corresponds to an attempted transaction.
For updates, ``CompositionForms`` are requested to the
``Composition`` ``Id`` that is to be updated using
``getCompositionFormForUpdate()``. Similarly, the
``CompositionForm`` has metadata about the data that can be updated
and it can perform validation before submitting the update. The
``CompositionForm`` can only be used once for a successful update
and cannot be reused.
The delete operations delete ``Compositions``. To unmap a
``Composition`` from the current ``Repository,`` the
``CompositionRepositoryAssignmentSession`` should be used. These
delete operations attempt to remove the ``Bid`` itself thus removing
it from all known ``Repository`` catalogs.
This session includes an ``Id`` aliasing mechanism to assign an
external ``Id`` to an internally assigned Id.
"""
def __init__(self, catalog_id=None, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
self._catalog_class = objects.Repository
self._session_name = 'CompositionAdminSession'
self._catalog_name = 'Repository'
OsidSession._init_object(
self,
catalog_id,
proxy,
runtime,
db_name='repository',
cat_name='Repository',
cat_class=objects.Repository)
self._forms = dict()
self._kwargs = kwargs
def get_repository_id(self):
"""Gets the ``Repository`` ``Id`` associated with this session.
return: (osid.id.Id) - the ``Repository Id`` associated with
this session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin_id
return self._catalog_id
repository_id = property(fget=get_repository_id)
def get_repository(self):
"""Gets the ``Repository`` associated with this session.
return: (osid.repository.Repository) - the ``Repository``
associated with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceLookupSession.get_bin
return self._catalog
repository = property(fget=get_repository)
def can_create_compositions(self):
"""Tests if this user can create ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Composition`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
return: (boolean) - ``false`` if ``Composition`` creation is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_create_composition_with_record_types(self, composition_record_types):
"""Tests if this user can create a single ``Composition`` using the desired record types.
While ``RepositoryManager.getCompositionRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``Composition``. Providing an empty array tests if a
``Composition`` can be created with no records.
arg: composition_record_types (osid.type.Type[]): array of
composition record types
return: (boolean) - ``true`` if ``Composition`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``composition_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_composition_form_for_create(self, composition_record_types):
"""Gets the composition form for creating new compositions.
A new form should be requested for each create transaction.
arg: composition_record_types (osid.type.Type[]): array of
composition record types
return: (osid.repository.CompositionForm) - the composition form
raise: NullArgument - ``composition_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.get_resource_form_for_create_template
for arg in composition_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if composition_record_types == []:
obj_form = objects.CompositionForm(
repository_id=self._catalog_id,
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id())
else:
obj_form = objects.CompositionForm(
repository_id=self._catalog_id,
record_types=composition_record_types,
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id())
self._forms[obj_form.get_id().get_identifier()] = not CREATED
return obj_form
@utilities.arguments_not_none
def create_composition(self, composiiton_form):
"""Creates a new ``Composition``.
arg: composiiton_form (osid.repository.CompositionForm): the
form for this ``Composition``
return: (osid.repository.Composition) - the new ``Composition``
raise: IllegalState - ``composition_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``composition_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``composition_form`` did not originate
from ``get_composition_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.create_resource_template
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
if not isinstance(composiiton_form, ABCCompositionForm):
raise errors.InvalidArgument('argument type is not an CompositionForm')
if composiiton_form.is_for_update():
raise errors.InvalidArgument('the CompositionForm is for update only, not create')
try:
if self._forms[composiiton_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('composiiton_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('composiiton_form did not originate from this session')
if not composiiton_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
insert_result = collection.insert_one(composiiton_form._my_map)
self._forms[composiiton_form.get_id().get_identifier()] = CREATED
result = objects.Composition(
collection.find_one({'_id': insert_result.inserted_id}),
runtime=self._runtime)
return result
def can_update_compositions(self):
"""Tests if this user can update ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Composition`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
return: (boolean) - ``false`` if ``Composition`` modification is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_composition_form_for_update(self, composition_id):
"""Gets the composition form for updating an existing composition.
A new composition form should be requested for each update
transaction.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition``
return: (osid.repository.CompositionForm) - the composition form
raise: NotFound - ``composition_id`` is not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.get_resource_form_for_update_template
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
if not isinstance(composition_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
if composition_id.get_identifier_namespace() != 'repository.Composition':
if composition_id.get_authority() != self._authority:
raise errors.InvalidArgument()
else:
composition_id = self._get_composition_id_with_enclosure(composition_id)
result = collection.find_one({'_id': ObjectId(composition_id.get_identifier())})
obj_form = objects.CompositionForm(result, runtime=self._runtime)
self._forms[obj_form.get_id().get_identifier()] = not UPDATED
return obj_form
def _get_composition_id_with_enclosure(self, enclosure_id):
"""Create an Composition with an enclosed foreign object.
return: (osid.id.Id) - the id of the new Composition
"""
mgr = self._get_provider_manager('REPOSITORY')
query_session = mgr.get_composition_query_session_for_repository(self._catalog_id)
query_form = query_session.get_composition_query()
query_form.match_enclosed_object_id(enclosure_id)
query_result = query_session.get_compositions_by_query(query_form)
if query_result.available() > 0:
composition_id = query_result.next().get_id()
else:
create_form = self.get_composition_form_for_create([ENCLOSURE_RECORD_TYPE])
create_form.set_enclosed_object(enclosure_id)
composition_id = self.create_composition(create_form).get_id()
return composition_id
@utilities.arguments_not_none
def duplicate_composition(self, composition_id):
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
mgr = self._get_provider_manager('REPOSITORY')
lookup_session = mgr.get_composition_lookup_session()
lookup_session.use_federated_repository_view()
try:
lookup_session.use_unsequestered_composition_view()
except AttributeError:
pass
composition_map = dict(lookup_session.get_composition(composition_id)._my_map)
del composition_map['_id']
if 'repositoryId' in composition_map:
composition_map['repositoryId'] = str(self._catalog_id)
if 'assignedRepositoryIds' in composition_map:
composition_map['assignedRepositoryIds'] = [str(self._catalog_id)]
insert_result = collection.insert_one(composition_map)
result = objects.Composition(
collection.find_one({'_id': insert_result.inserted_id}),
runtime=self._runtime)
return result
@utilities.arguments_not_none
def update_composition(self, composiiton_form):
"""Updates an existing composition.
arg: composiiton_form (osid.repository.CompositionForm): the
form containing the elements to be updated
raise: IllegalState - ``composition_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``composition_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``composition_form`` did not originate
from ``get_composition_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.update_resource_template
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
if not isinstance(composiiton_form, ABCCompositionForm):
raise errors.InvalidArgument('argument type is not an CompositionForm')
if not composiiton_form.is_for_update():
raise errors.InvalidArgument('the CompositionForm is for update only, not create')
try:
if self._forms[composiiton_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('composiiton_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('composiiton_form did not originate from this session')
if not composiiton_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
collection.save(composiiton_form._my_map)
self._forms[composiiton_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned:
return objects.Composition(
composiiton_form._my_map,
runtime=self._runtime)
def can_delete_compositions(self):
"""Tests if this user can delete ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Composition`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
return: (boolean) - ``false`` if ``Composition`` deletion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def delete_composition(self, composition_id):
"""Deletes a ``Composition``.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition`` to remove
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.delete_resource_template
collection = MongoClientValidated('repository',
collection='Composition',
runtime=self._runtime)
if not isinstance(composition_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
composition_map = collection.find_one(
dict({'_id': ObjectId(composition_id.get_identifier())},
**self._view_filter()))
objects.Composition(composition_map, runtime=self._runtime)._delete()
collection.delete_one({'_id': ObjectId(composition_id.get_identifier())})
@utilities.arguments_not_none
def delete_composition_node(self, composition_id):
"""Deletes a ``Composition`` and all contained children.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition`` to remove
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def add_composition_child(self, composition_id, child_composition_id):
"""Adds a composition to a parent composition.
arg: composition_id (osid.id.Id): the ``Id`` of a parent
``Composition``
arg: child_composition_id (osid.id.Id): the ``Id`` of a child
``Composition``
raise: AlreadyExists - ``child_composition_id`` is already a
child of ``composition_id``
raise: NotFound - ``composition_id`` or
``child_composition_id`` is not found
raise: NullArgument - ``composition_id`` or
``child_composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def remove_composition_child(self, composition_id, child_composition_id):
"""Removes a composition from a parent composition.
arg: composition_id (osid.id.Id): the ``Id`` of a parent
``Composition``
arg: child_composition_id (osid.id.Id): the ``Id`` of a child
``Composition``
raise: NotFound - ``composition_id`` or
``child_composition_id`` is not found or not related
raise: NullArgument - ``composition_id`` or
``child_composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def can_manage_composition_aliases(self):
"""Tests if this user can manage ``Id`` aliases for ``Compositions``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Composition`` aliasing is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def alias_composition(self, composition_id, alias_id):
"""Adds an ``Id`` to a ``Composition`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Composition`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another composition, it is reassigned
to the given composition ``Id``.
arg: composition_id (osid.id.Id): the ``Id`` of a
``Composition``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is in use as a primary
``Id``
raise: NotFound - ``composition_id`` not found
raise: NullArgument - ``composition_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.alias_resources_template
self._alias_id(primary_id=composition_id, equivalent_id=alias_id)
class CompositionRepositorySession(abc_repository_sessions.CompositionRepositorySession, osid_sessions.OsidSession):
"""This session provides methods to retrieve ``Composition`` to ``Repository`` mappings.
A ``Composition`` may appear in multiple ``Repository`` objects.
Each ``Repository`` may have its own authorizations governing who is
allowed to look at it.
This lookup session defines several views:
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete result set or is an error
condition
"""
_session_name = 'CompositionRepositorySession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._catalog_view = COMPARATIVE
self._kwargs = kwargs
def use_comparative_composition_repository_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_comparative_bin_view
self._catalog_view = COMPARATIVE
def use_plenary_composition_repository_view(self):
"""A complete view of the ``Composition`` and ``Repository`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_plenary_bin_view
self._catalog_view = PLENARY
def can_lookup_composition_repository_mappings(self):
"""Tests if this user can perform lookups of composition/repository mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known lookup methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
lookup operations to unauthorized users.
return: (boolean) - ``false`` if looking up mappings is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.can_lookup_resource_bin_mappings
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_composition_ids_by_repository(self, repository_id):
"""Gets the list of ``Composition`` ``Ids`` associated with a ``Repository``.
arg: repository_id (osid.id.Id): ``Id`` of the ``Repository``
return: (osid.id.IdList) - list of related composition ``Ids``
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bin
id_list = []
for composition in self.get_compositions_by_repository(repository_id):
id_list.append(composition.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_compositions_by_repository(self, repository_id):
"""Gets the list of ``Compositions`` associated with a ``Repository``.
arg: repository_id (osid.id.Id): ``Id`` of the ``Repository``
return: (osid.repository.CompositionList) - list of related
compositions
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bin
mgr = self._get_provider_manager('REPOSITORY')
lookup_session = mgr.get_composition_lookup_session_for_repository(repository_id)
lookup_session.use_isolated_repository_view()
return lookup_session.get_compositions()
@utilities.arguments_not_none
def get_composition_ids_by_repositories(self, repository_ids):
"""Gets the list of ``Composition`` ``Ids`` corresponding to a list of ``Repository`` objects.
arg: repository_ids (osid.id.IdList): list of repository
``Ids``
return: (osid.id.IdList) - list of composition ``Ids``
raise: NullArgument - ``repository_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resource_ids_by_bins
id_list = []
for composition in self.get_compositions_by_repositories(repository_ids):
id_list.append(composition.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_compoitions_by_repositories(self, repository_ids):
"""Gets the list of ``Compositions`` corresponding to a list of ``Repository`` objects.
arg: repository_ids (osid.id.IdList): list of repository
``Ids``
return: (osid.repository.CompositionList) - list of Compositions
raise: NullArgument - ``repository_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_resources_by_bins
composition_list = []
for repository_id in repository_ids:
composition_list += list(
self.get_compositions_by_repository(repository_id))
return objects.CompositionList(composition_list)
@utilities.arguments_not_none
def get_repository_ids_by_composition(self, composition_id):
"""Gets the ``Repository`` ``Ids`` mapped to a ``Composition``.
arg: composition_id (osid.id.Id): ``Id`` of a ``Composition``
return: (osid.id.IdList) - list of repository ``Ids``
raise: NotFound - ``composition_id`` is not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_composition_lookup_session()
lookup_session.use_federated_repository_view()
lookup_session.use_unsequestered_composition_view()
composition = lookup_session.get_composition(composition_id)
id_list = []
if 'assignedRepositoryIds' in composition._my_map:
for idstr in composition._my_map['assignedRepositoryIds']:
id_list.append(Id(idstr))
return IdList(id_list)
@utilities.arguments_not_none
def get_repositories_by_composition(self, composition_id):
"""Gets the ``Repository`` objects mapped to a ``Composition``.
arg: composition_id (osid.id.Id): ``Id`` of a ``Composition``
return: (osid.repository.RepositoryList) - list of repositories
raise: NotFound - ``composition_id`` is not found
raise: NullArgument - ``composition_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_bins_by_resource
mgr = self._get_provider_manager('REPOSITORY')
lookup_session = mgr.get_repository_lookup_session()
return lookup_session.get_repositories_by_ids(
self.get_repository_ids_by_composition(composition_id))
class CompositionRepositoryAssignmentSession(abc_repository_sessions.CompositionRepositoryAssignmentSession, osid_sessions.OsidSession):
"""This session provides methods to re-assign ``Compositions`` to ``Repository`` objects.
A ``Composition`` may be associated with multiple ``Repository``
objects. Removing the last reference to a ``Composition`` is the
equivalent of deleting it. Each ``Repository`` may have its own
authorizations governing who is allowed to operate on it.
Moving or adding a reference of a ``Composition`` to another
``Repository`` is not a copy operation (eg: does not change its
``Id`` ).
"""
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._session_name = 'CompositionRepositoryAssignmentSession'
self._catalog_name = 'Repository'
self._forms = dict()
self._kwargs = kwargs
def can_assign_compositions(self):
"""Tests if this user can alter composition/repository mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
return: (boolean) - ``false`` if mapping is not authorized,
``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.can_assign_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_assign_compositions_to_repository(self, repository_id):
"""Tests if this user can alter composition/repository mappings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known mapping methods in
this session will result in a ``PermissionDenied``. This is
intended as a hint to an application that may opt not to offer
assignment operations to unauthorized users.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
return: (boolean) - ``false`` if mapping is not authorized,
``true`` otherwise
raise: NullArgument - ``repository_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.can_assign_resources_to_bin
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
if repository_id.get_identifier() == '000000000000000000000000':
return False
return True
@utilities.arguments_not_none
def get_assignable_repository_ids(self, repository_id):
"""Gets a list of repositories including and under the given repository node in which any composition can be
assigned.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
return: (osid.id.IdList) - list of assignable repository ``Ids``
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids
# This will likely be overridden by an authorization adapter
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_repository_lookup_session()
compositions = lookup_session.get_repositories()
id_list = []
for composition in compositions:
id_list.append(compositions.get_id())
return IdList(id_list)
@utilities.arguments_not_none
def get_assignable_repository_ids_for_composition(self, repository_id, composition_id):
"""Gets a list of repositories including and under the given repository node in which a specific composition can
be assigned.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition``
return: (osid.id.IdList) - list of assignable repository ``Ids``
raise: NullArgument - ``repository_id`` or ``composition_id``
is ``null``
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.get_assignable_bin_ids_for_resource
# This will likely be overridden by an authorization adapter
return self.get_assignable_bin_ids()
@utilities.arguments_not_none
def assign_composition_to_repository(self, composition_id, repository_id):
"""Adds an existing ``Composition`` to a ``Repository``.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition``
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
raise: AlreadyExists - ``composition_id`` already assigned to
``repository_id``
raise: NotFound - ``composition_id`` or ``repository_id`` not
found
raise: NullArgument - ``composition_id`` or ``repository_id``
is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.assign_resource_to_bin
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_repository_lookup_session()
lookup_session.get_repository(repository_id) # to raise NotFound
self._assign_object_to_catalog(composition_id, repository_id)
@utilities.arguments_not_none
def unassign_composition_from_repository(self, composition_id, repository_id):
"""Removes ``Composition`` from a ``Repository``.
arg: composition_id (osid.id.Id): the ``Id`` of the
``Composition``
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
raise: NotFound - ``composition_id`` or ``repository_id`` not
found or ``composition_id`` not assigned to
``repository_id``
raise: NullArgument - ``composition_id`` or ``repository_id``
is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinAssignmentSession.unassign_resource_from_bin
mgr = self._get_provider_manager('REPOSITORY', local=True)
lookup_session = mgr.get_repository_lookup_session()
cat = lookup_session.get_repository(repository_id) # to raise NotFound
self._unassign_object_from_catalog(composition_id, repository_id)
class RepositoryLookupSession(abc_repository_sessions.RepositoryLookupSession, osid_sessions.OsidSession):
"""This session provides methods for retrieving ``Repository`` objects.
The ``Repository`` represents a collection of ``Assets`` and
``Compositions``.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: elements may be silently omitted or re-ordered
* plenary view: provides a complete set or is an error condition
Generally, the comparative view should be used for most applications
as it permits operation even if there is data that cannot be
accessed. For example, a browsing application may only need to
examine the ``Repositories`` it can access, without breaking
execution. However, an administrative application may require all
``Repository`` elements to be available.
Repositories may have an additional records indicated by their
respective record types. The record may not be accessed through a
cast of the ``Repository``.
"""
_session_name = 'RepositoryLookupSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._catalog_view = COMPARATIVE
self._kwargs = kwargs
def can_lookup_repositories(self):
"""Tests if this user can perform ``Repository`` lookups.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations to unauthorized users.
return: (boolean) - ``false`` if lookup methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.can_lookup_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_repository_view(self):
"""The returns from the lookup methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_comparative_bin_view
self._catalog_view = COMPARATIVE
def use_plenary_repository_view(self):
"""A complete view of the ``Repository`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_plenary_bin_view
self._catalog_view = PLENARY
@utilities.arguments_not_none
def get_repository(self, repository_id):
"""Gets the ``Repository`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``Repository`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``Repository`` and retained
for compatibility.
arg: repository_id (osid.id.Id): ``Id`` of the ``Repository``
return: (osid.repository.Repository) - the repository
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.get_bin
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
# Need to consider how to best deal with the "phantom root" catalog issue
if repository_id.get_identifier() == '000000000000000000000000':
return self._get_phantom_root_catalog(cat_class=objects.Repository, cat_name='Repository')
try:
result = collection.find_one({'_id': ObjectId(repository_id.get_identifier())})
except errors.NotFound:
# Try creating an orchestrated Repository. Let it raise errors.NotFound()
result = self._create_orchestrated_cat(repository_id, 'repository', 'Repository')
return objects.Repository(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_repositories_by_ids(self, repository_ids):
"""Gets a ``RepositoryList`` corresponding to the given ``IdList``.
In plenary mode, the returned list contains all of the
repositories specified in the ``Id`` list, in the order of the
list, including duplicates, or an error results if an ``Id`` in
the supplied list is not found or inaccessible. Otherwise,
inaccessible ``Repositories`` may be omitted from the list and
may present the elements in any order including returning a
unique set.
arg: repository_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``repository_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.get_bins_by_ids_template
# NOTE: This implementation currently ignores plenary view
# Also, this should be implemented to use get_Repository() instead of direct to database
catalog_id_list = []
for i in repository_ids:
catalog_id_list.append(ObjectId(i.get_identifier()))
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
result = collection.find({'_id': {'$in': catalog_id_list}}).sort('_id', DESCENDING)
return objects.RepositoryList(result, runtime=self._runtime)
@utilities.arguments_not_none
def get_repositories_by_genus_type(self, repository_genus_type):
"""Gets a ``RepositoryList`` corresponding to the given repository genus ``Type`` which does not include
repositories of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: repository_genus_type (osid.type.Type): a repository
genus type
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``repository_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_repositories_by_parent_genus_type(self, repository_genus_type):
"""Gets a ``RepositoryList`` corresponding to the given repository genus ``Type`` and include any additional
repositories with genus types derived from the specified ``Type``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: repository_genus_type (osid.type.Type): a repository
genus type
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``repository_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_repositories_by_record_type(self, repository_record_type):
"""Gets a ``RepositoryList`` containing the given repository record ``Type``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: repository_record_type (osid.type.Type): a repository
record type
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``repository_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_repositories_by_provider(self, resource_id):
"""Gets a ``RepositoryList`` from the given provider ````.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
arg: resource_id (osid.id.Id): a resource ``Id``
return: (osid.repository.RepositoryList) - the returned
``Repository list``
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
def get_repositories(self):
"""Gets all ``Repositories``.
In plenary mode, the returned list contains all known
repositories or an error results. Otherwise, the returned list
may contain only those repositories that are accessible through
this session.
return: (osid.repository.RepositoryList) - a list of
``Repositories``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.get_bins_template
# NOTE: This implementation currently ignores plenary view
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
result = collection.find().sort('_id', DESCENDING)
return objects.RepositoryList(result, runtime=self._runtime)
repositories = property(fget=get_repositories)
class RepositoryQuerySession(abc_repository_sessions.RepositoryQuerySession, osid_sessions.OsidSession):
"""This session provides methods for searching among ``Repository`` objects.
The search query is constructed using the ``RepositoryQuery``.
Repositories may have a query record indicated by their respective
record types. The query record is accessed via the
``RepositoryQuery``.
"""
_session_name = 'RepositoryQuerySession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._forms = dict()
self._kwargs = kwargs
def can_search_repositories(self):
"""Tests if this user can perform ``Repository`` searches.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer search
operations to unauthorized users.
return: (boolean) - ``false`` if search methods are not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceQuerySession.can_search_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def get_repository_query(self):
"""Gets a repository query.
return: (osid.repository.RepositoryQuery) - the repository query
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinQuerySession.get_bin_query_template
return queries.RepositoryQuery(runtime=self._runtime)
repository_query = property(fget=get_repository_query)
@utilities.arguments_not_none
def get_repositories_by_query(self, repository_query):
"""Gets a list of ``Repositories`` matching the given repository query.
arg: repository_query (osid.repository.RepositoryQuery): the
repository query
return: (osid.repository.RepositoryList) - the returned
``RepositoryList``
raise: NullArgument - ``repository_query`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``repository_query`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinQuerySession.get_bins_by_query_template
query_terms = dict(repository_query._query_terms)
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
result = collection.find(query_terms).sort('_id', DESCENDING)
return objects.RepositoryList(result, runtime=self._runtime)
class RepositoryAdminSession(abc_repository_sessions.RepositoryAdminSession, osid_sessions.OsidSession):
"""This session creates, updates, and deletes ``Repositories``.
The data for create and update is provided by the consumer via the
form object. ``OsidForms`` are requested for each create or update
and may not be reused.
Create and update operations differ in their usage. To create a
``Repository,`` a ``RepositoryForm`` is requested using
``get_repository_form_for_create()`` specifying the desired record
``Types`` or none if no record ``Types`` are needed. The returned
``RepositoryForm`` will indicate that it is to be used with a create
operation and can be used to examine metdata or validate data prior
to creation. Once the ``RepositoryForm`` is submiited to a create
operation, it cannot be reused with another create operation unless
the first operation was unsuccessful. Each ``RepositoryForm``
corresponds to an attempted transaction.
For updates, ``RepositoryForms`` are requested to the ``Repository``
``Id`` that is to be updated using ``getRepositoryFormForUpdate()``.
Similarly, the ``RepositoryForm`` has metadata about the data that
can be updated and it can perform validation before submitting the
update. The ``RepositoryForm`` can only be used once for a
successful update and cannot be reused.
The delete operations delete ``Repositories``. This session includes
an ``Id`` aliasing mechanism to assign an external ``Id`` to an
internally assigned Id.
"""
_session_name = 'RepositoryAdminSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._forms = dict()
self._kwargs = kwargs
def can_create_repositories(self):
"""Tests if this user can create ``Repositories``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known creating a
``Repository`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
create operations to unauthorized users.
return: (boolean) - ``false`` if ``Repository`` creation is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def can_create_repository_with_record_types(self, repository_record_types):
"""Tests if this user can create a single ``Repository`` using the desired record types.
While ``RepositoryManager.getRepositoryRecordTypes()`` can be
used to examine which records are supported, this method tests
which record(s) are required for creating a specific
``Repository``. Providing an empty array tests if a
``Repository`` can be created with no records.
arg: repository_record_types (osid.type.Type[]): array of
repository record types
return: (boolean) - ``true`` if ``Repository`` creation using
the specified ``Types`` is supported, ``false``
otherwise
raise: NullArgument - ``repository_record_types`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resource_with_record_types
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_repository_form_for_create(self, repository_record_types):
"""Gets the repository form for creating new repositories.
A new form should be requested for each create transaction.
arg: repository_record_types (osid.type.Type[]): array of
repository record types
return: (osid.repository.RepositoryForm) - the repository form
raise: NullArgument - ``repository_record_types`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - unable to get form for requested record
types
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.get_bin_form_for_create_template
for arg in repository_record_types:
if not isinstance(arg, ABCType):
raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')
if repository_record_types == []:
result = objects.RepositoryForm(
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id())
else:
result = objects.RepositoryForm(
record_types=repository_record_types,
runtime=self._runtime,
effective_agent_id=self.get_effective_agent_id())
self._forms[result.get_id().get_identifier()] = not CREATED
return result
@utilities.arguments_not_none
def create_repository(self, repository_form):
"""Creates a new ``Repository``.
arg: repository_form (osid.repository.RepositoryForm): the
form for this ``Repository``
return: (osid.repository.Repository) - the new ``Repository``
raise: IllegalState - ``repository_form`` already used in a
create transaction
raise: InvalidArgument - one or more of the form elements is
invalid
raise: NullArgument - ``repository_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``repository_form`` did not originate from
``get_repository_form_for_create()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.create_bin_template
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
if not isinstance(repository_form, ABCRepositoryForm):
raise errors.InvalidArgument('argument type is not an RepositoryForm')
if repository_form.is_for_update():
raise errors.InvalidArgument('the RepositoryForm is for update only, not create')
try:
if self._forms[repository_form.get_id().get_identifier()] == CREATED:
raise errors.IllegalState('repository_form already used in a create transaction')
except KeyError:
raise errors.Unsupported('repository_form did not originate from this session')
if not repository_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
insert_result = collection.insert_one(repository_form._my_map)
self._forms[repository_form.get_id().get_identifier()] = CREATED
result = objects.Repository(
collection.find_one({'_id': insert_result.inserted_id}),
runtime=self._runtime)
return result
def can_update_repositories(self):
"""Tests if this user can update ``Repositories``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known updating a
``Repository`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
update operations to unauthorized users.
return: (boolean) - ``false`` if ``Repository`` modification is
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def get_repository_form_for_update(self, repository_id):
"""Gets the repository form for updating an existing repository.
A new repository form should be requested for each update
transaction.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository``
return: (osid.repository.RepositoryForm) - the repository form
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.get_bin_form_for_update_template
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
if not isinstance(repository_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
result = collection.find_one({'_id': ObjectId(repository_id.get_identifier())})
cat_form = objects.RepositoryForm(result, runtime=self._runtime)
self._forms[cat_form.get_id().get_identifier()] = not UPDATED
return cat_form
@utilities.arguments_not_none
def update_repository(self, repository_form):
"""Updates an existing repository.
arg: repository_form (osid.repository.RepositoryForm): the
form containing the elements to be updated
raise: IllegalState - ``repository_form`` already used in an
update transaction
raise: InvalidArgument - the form contains an invalid value
raise: NullArgument - ``repository_form`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
raise: Unsupported - ``repository_form`` did not originate from
``get_repository_form_for_update()``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.update_bin_template
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
if not isinstance(repository_form, ABCRepositoryForm):
raise errors.InvalidArgument('argument type is not an RepositoryForm')
if not repository_form.is_for_update():
raise errors.InvalidArgument('the RepositoryForm is for update only, not create')
try:
if self._forms[repository_form.get_id().get_identifier()] == UPDATED:
raise errors.IllegalState('repository_form already used in an update transaction')
except KeyError:
raise errors.Unsupported('repository_form did not originate from this session')
if not repository_form.is_valid():
raise errors.InvalidArgument('one or more of the form elements is invalid')
collection.save(repository_form._my_map) # save is deprecated - change to replace_one
self._forms[repository_form.get_id().get_identifier()] = UPDATED
# Note: this is out of spec. The OSIDs don't require an object to be returned
return objects.Repository(repository_form._my_map, runtime=self._runtime)
def can_delete_repositories(self):
"""Tests if this user can delete ``Repositories``.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known deleting a
``Repository`` will result in a ``PermissionDenied``. This is
intended as a hint to an application that may not wish to offer
delete operations to unauthorized users.
return: (boolean) - ``false`` if ``Repository`` deletion is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceAdminSession.can_create_resources
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def delete_repository(self, repository_id):
"""Deletes a ``Repository``.
arg: repository_id (osid.id.Id): the ``Id`` of the
``Repository`` to remove
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinAdminSession.delete_bin_template
collection = MongoClientValidated('repository',
collection='Repository',
runtime=self._runtime)
if not isinstance(repository_id, ABCId):
raise errors.InvalidArgument('the argument is not a valid OSID Id')
for object_catalog in ['Asset', 'Composition', 'Repository']:
obj_collection = MongoClientValidated('repository',
collection=object_catalog,
runtime=self._runtime)
if obj_collection.find({'assignedRepositoryIds': {'$in': [str(repository_id)]}}).count() != 0:
raise errors.IllegalState('catalog is not empty')
collection.delete_one({'_id': ObjectId(repository_id.get_identifier())})
def can_manage_repository_aliases(self):
"""Tests if this user can manage ``Id`` aliases for repositories.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known changing an alias
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer alias
operations to an unauthorized user.
return: (boolean) - ``false`` if ``Repository`` aliasing is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def alias_repository(self, repository_id, alias_id):
"""Adds an ``Id`` to a ``Repository`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Repository`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another repository, it is reassigned
to the given repository ``Id``.
arg: repository_id (osid.id.Id): the ``Id`` of a
``Repository``
arg: alias_id (osid.id.Id): the alias ``Id``
raise: AlreadyExists - ``alias_id`` is in use as a primary
``Id``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``alias_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.alias_bin_template
# NEED TO FIGURE OUT HOW TO IMPLEMENT THIS SOMEDAY
raise errors.Unimplemented()
class RepositoryHierarchySession(abc_repository_sessions.RepositoryHierarchySession, osid_sessions.OsidSession):
"""This session defines methods for traversing a hierarchy of ``Repository`` objects.
Each node in the hierarchy is a unique ``Repository``. The hierarchy
may be traversed recursively to establish the tree structure through
``get_parents()`` and ``getChildren()``. To relate these ``Ids`` to
another OSID, ``get_ancestors()`` and ``get_descendants()`` can be
used for retrievals that can be used for bulk lookups in other
OSIDs. Any ``Repository`` available in the Repository OSID is known
to this hierarchy but does not appear in the hierarchy traversal
until added as a root node or a child of another node.
A user may not be authorized to traverse the entire hierarchy. Parts
of the hierarchy may be made invisible through omission from the
returns of ``get_parents()`` or ``get_children()`` in lieu of a
``PermissionDenied`` error that may disrupt the traversal through
authorized pathways.
This session defines views that offer differing behaviors when
retrieving multiple objects.
* comparative view: repository elements may be silently omitted or
re-ordered
* plenary view: provides a complete set or is an error condition
"""
_session_name = 'RepositoryHierarchySession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession.__init__(self)
OsidSession._init_catalog(self, proxy, runtime)
self._forms = dict()
self._kwargs = kwargs
hierarchy_mgr = self._get_provider_manager('HIERARCHY')
self._hierarchy_session = hierarchy_mgr.get_hierarchy_traversal_session_for_hierarchy(
Id(authority='REPOSITORY',
namespace='CATALOG',
identifier='REPOSITORY')
)
def get_repository_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
return: (osid.id.Id) - the hierarchy ``Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_bin_hierarchy_id
return self._hierarchy_session.get_hierarchy_id()
repository_hierarchy_id = property(fget=get_repository_hierarchy_id)
def get_repository_hierarchy(self):
"""Gets the hierarchy associated with this session.
return: (osid.hierarchy.Hierarchy) - the hierarchy associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_bin_hierarchy
return self._hierarchy_session.get_hierarchy()
repository_hierarchy = property(fget=get_repository_hierarchy)
def can_access_repository_hierarchy(self):
"""Tests if this user can perform hierarchy queries.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
return: (boolean) - ``false`` if hierarchy traversal methods are
not authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.can_access_bin_hierarchy
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
def use_comparative_repository_view(self):
"""The returns from the repository methods may omit or translate elements based on this session, such as
authorization, and not result in an error.
This view is used when greater interoperability is desired at
the expense of precision.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_comparative_bin_view
self._catalog_view = COMPARATIVE
def use_plenary_repository_view(self):
"""A complete view of the ``Repository`` returns is desired.
Methods will return what is requested or result in an error.
This view is used when greater precision is desired at the
expense of interoperability.
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.BinLookupSession.use_plenary_bin_view
self._catalog_view = PLENARY
def get_root_repository_ids(self):
"""Gets the root repository ``Ids`` in this hierarchy.
return: (osid.id.IdList) - the root repository ``Ids``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_root_bin_ids
return self._hierarchy_session.get_roots()
root_repository_ids = property(fget=get_root_repository_ids)
def get_root_repositories(self):
"""Gets the root repositories in the repository hierarchy.
A node with no parents is an orphan. While all repository
``Ids`` are known to the hierarchy, an orphan does not appear in
the hierarchy unless explicitly added as a root node or child of
another node.
return: (osid.repository.RepositoryList) - the root repositories
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method is must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_root_bins
return RepositoryLookupSession(
self._proxy,
self._runtime).get_repositories_by_ids(list(self.get_root_repository_ids()))
root_repositories = property(fget=get_root_repositories)
@utilities.arguments_not_none
def has_parent_repositories(self, repository_id):
"""Tests if the ``Repository`` has any parents.
arg: repository_id (osid.id.Id): a repository ``Id``
return: (boolean) - ``true`` if the repository has parents,
``false`` otherwise
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.has_parent_bins
return self._hierarchy_session.has_parents(id_=repository_id)
@utilities.arguments_not_none
def is_parent_of_repository(self, id_, repository_id):
"""Tests if an ``Id`` is a direct parent of a repository.
arg: id (osid.id.Id): an ``Id``
arg: repository_id (osid.id.Id): the ``Id`` of a repository
return: (boolean) - ``true`` if this ``id`` is a parent of
``repository_id,`` ``false`` otherwise
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``id`` or ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.is_parent_of_bin
return self._hierarchy_session.is_parent(id_=repository_id, parent_id=id_)
@utilities.arguments_not_none
def get_parent_repository_ids(self, repository_id):
"""Gets the parent ``Ids`` of the given repository.
arg: repository_id (osid.id.Id): a repository ``Id``
return: (osid.id.IdList) - the parent ``Ids`` of the repository
raise: NotFound - ``repository_id`` is not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_parent_bin_ids
return self._hierarchy_session.get_parents(id_=repository_id)
@utilities.arguments_not_none
def get_parent_repositories(self, repository_id):
"""Gets the parents of the given repository.
arg: repository_id (osid.id.Id): the ``Id`` to query
return: (osid.repository.RepositoryList) - the parents of the
repository
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_parent_bins
return RepositoryLookupSession(
self._proxy,
self._runtime).get_repositories_by_ids(
list(self.get_parent_repository_ids(repository_id)))
@utilities.arguments_not_none
def is_ancestor_of_repository(self, id_, repository_id):
"""Tests if an ``Id`` is an ancestor of a repository.
arg: id (osid.id.Id): an ``Id``
arg: repository_id (osid.id.Id): the Id of a repository
return: (boolean) - ``true`` if this ``id`` is an ancestor of
``repository_id,`` ``false`` otherwise
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.is_ancestor_of_bin
return self._hierarchy_session.is_ancestor(id_=id_, ancestor_id=repository_id)
@utilities.arguments_not_none
def has_child_repositories(self, repository_id):
"""Tests if a repository has any children.
arg: repository_id (osid.id.Id): a repository ``Id``
return: (boolean) - ``true`` if the ``repository_id`` has
children, ``false`` otherwise
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.has_child_bins
return self._hierarchy_session.has_children(id_=repository_id)
@utilities.arguments_not_none
def is_child_of_repository(self, id_, repository_id):
"""Tests if a node is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: repository_id (osid.id.Id): the ``Id`` of a repository
return: (boolean) - ``true`` if the ``id`` is a child of
``repository_id,`` ``false`` otherwise
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` not found return ``false``.
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.is_child_of_bin
return self._hierarchy_session.is_child(id_=repository_id, child_id=id_)
@utilities.arguments_not_none
def get_child_repository_ids(self, repository_id):
"""Gets the ``Ids`` of the children of the given repository.
arg: repository_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the repository
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_child_bin_ids
return self._hierarchy_session.get_children(id_=repository_id)
@utilities.arguments_not_none
def get_child_repositories(self, repository_id):
"""Gets the children of the given repository.
arg: repository_id (osid.id.Id): the ``Id`` to query
return: (osid.repository.RepositoryList) - the children of the
repository
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_child_bins
return RepositoryLookupSession(
self._proxy,
self._runtime).get_repositories_by_ids(
list(self.get_child_repository_ids(repository_id)))
@utilities.arguments_not_none
def is_descendant_of_repository(self, id_, repository_id):
"""Tests if an ``Id`` is a descendant of a repository.
arg: id (osid.id.Id): an ``Id``
arg: repository_id (osid.id.Id): the ``Id`` of a repository
return: (boolean) - ``true`` if the ``id`` is a descendant of
the ``repository_id,`` ``false`` otherwise
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: If ``id`` is not found return ``false``.
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.is_descendant_of_bin
return self._hierarchy_session.is_descendant(id_=id_, descendant_id=repository_id)
@utilities.arguments_not_none
def get_repository_node_ids(self, repository_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` to query
arg: ancestor_levels (cardinal): the maximum number of
ancestor levels to include. A value of 0 returns no
parents in the node.
arg: descendant_levels (cardinal): the maximum number of
descendant levels to include. A value of 0 returns no
children in the node.
arg: include_siblings (boolean): ``true`` to include the
siblings of the given node, ``false`` to omit the
siblings
return: (osid.hierarchy.Node) - the specified repository node
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_bin_node_ids
return self._hierarchy_session.get_nodes(
id_=repository_id,
ancestor_levels=ancestor_levels,
descendant_levels=descendant_levels,
include_siblings=include_siblings)
@utilities.arguments_not_none
def get_repository_nodes(self, repository_id, ancestor_levels, descendant_levels, include_siblings):
"""Gets a portion of the hierarchy for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` to query
arg: ancestor_levels (cardinal): the maximum number of
ancestor levels to include. A value of 0 returns no
parents in the node.
arg: descendant_levels (cardinal): the maximum number of
descendant levels to include. A value of 0 returns no
children in the node.
arg: include_siblings (boolean): ``true`` to include the
siblings of the given node, ``false`` to omit the
siblings
return: (osid.repository.RepositoryNode) - the specified
repository node
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_bin_nodes
return objects.RepositoryNode(self.get_repository_node_ids(
repository_id=repository_id,
ancestor_levels=ancestor_levels,
descendant_levels=descendant_levels,
include_siblings=include_siblings)._my_map, runtime=self._runtime, proxy=self._proxy)
class RepositoryHierarchyDesignSession(abc_repository_sessions.RepositoryHierarchyDesignSession, osid_sessions.OsidSession):
"""This session defines methods for managing a hierarchy of ``Repository`` objects.
Each node in the hierarchy is a unique ``Repository``.
"""
_session_name = 'RepositoryHierarchyDesignSession'
def __init__(self, proxy=None, runtime=None, **kwargs):
OsidSession._init_catalog(self, proxy, runtime)
self._forms = dict()
self._kwargs = kwargs
hierarchy_mgr = self._get_provider_manager('HIERARCHY')
self._hierarchy_session = hierarchy_mgr.get_hierarchy_design_session_for_hierarchy(
Id(authority='REPOSITORY',
namespace='CATALOG',
identifier='REPOSITORY')
)
def get_repository_hierarchy_id(self):
"""Gets the hierarchy ``Id`` associated with this session.
return: (osid.id.Id) - the hierarchy ``Id`` associated with this
session
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_bin_hierarchy_id
return self._hierarchy_session.get_hierarchy_id()
repository_hierarchy_id = property(fget=get_repository_hierarchy_id)
def get_repository_hierarchy(self):
"""Gets the hierarchy associated with this session.
return: (osid.hierarchy.Hierarchy) - the hierarchy associated
with this session
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchySession.get_bin_hierarchy
return self._hierarchy_session.get_hierarchy()
repository_hierarchy = property(fget=get_repository_hierarchy)
def can_modify_repository_hierarchy(self):
"""Tests if this user can change the hierarchy.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known performing any update
will result in a ``PermissionDenied``. This is intended as a
hint to an application that may opt not to offer these
operations to an unauthorized user.
return: (boolean) - ``false`` if changing this hierarchy is not
authorized, ``true`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchyDesignSession.can_modify_objective_bank_hierarchy
# NOTE: It is expected that real authentication hints will be
# handled in a service adapter above the pay grade of this impl.
return True
@utilities.arguments_not_none
def add_root_repository(self, repository_id):
"""Adds a root repository.
arg: repository_id (osid.id.Id): the ``Id`` of a repository
raise: AlreadyExists - ``repository_id`` is already in
hierarchy
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchyDesignSession.add_root_bin_template
return self._hierarchy_session.add_root(id_=repository_id)
@utilities.arguments_not_none
def remove_root_repository(self, repository_id):
"""Removes a root repository.
arg: repository_id (osid.id.Id): the ``Id`` of a repository
raise: NotFound - ``repository_id`` not a root
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchyDesignSession.remove_root_bin_template
return self._hierarchy_session.remove_root(id_=repository_id)
@utilities.arguments_not_none
def add_child_repository(self, repository_id, child_id):
"""Adds a child to a repository.
arg: repository_id (osid.id.Id): the ``Id`` of a repository
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: AlreadyExists - ``repository_id`` is already a parent of
``child_id``
raise: NotFound - ``repository_id`` or ``child_id`` not found
raise: NullArgument - ``repository_id`` or ``child_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchyDesignSession.add_child_bin_template
return self._hierarchy_session.add_child(id_=repository_id, child_id=child_id)
@utilities.arguments_not_none
def remove_child_repository(self, repository_id, child_id):
"""Removes a child from a repository.
arg: repository_id (osid.id.Id): the ``Id`` of a repository
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: NotFound - ``repository_id`` not a parent of
``child_id``
raise: NullArgument - ``repository_id`` or ``child_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchyDesignSession.remove_child_bin_template
return self._hierarchy_session.remove_child(id_=repository_id, child_id=child_id)
@utilities.arguments_not_none
def remove_child_repositories(self, repository_id):
"""Removes all children from a repository.
arg: repository_id (osid.id.Id): the ``Id`` of a repository
raise: NotFound - ``repository_id`` not in hierarchy
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceHierarchyDesignSession.remove_child_bin_template
return self._hierarchy_session.remove_children(id_=repository_id)
|
birdland/dlkit-doc
|
dlkit/mongo/repository/sessions.py
|
Python
|
mit
| 237,798
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 31 10:57:02 2016
@author: noore
"""
import bioservices.kegg
import pandas as pd
kegg = bioservices.kegg.KEGG()
cid2name = kegg.list('cpd')
cid2name = filter(lambda x: len(x) == 2, map(lambda l : l.split('\t'), cid2name.split('\n')))
cid_df = pd.DataFrame(cid2name, columns=['cpd', 'names'])
cid_df['cpd'] = cid_df['cpd'].apply(lambda x: x[4:])
cid_df['name'] = cid_df['names'].apply(lambda s: s.split(';')[0])
cid_df.set_index('cpd', inplace=True)
cid_df['inchi'] = None
for cid in cid_df.index[0:10]:
ChEBI = re.findall('ChEBI: ([\d\s]+)\n', kegg.get(cid))
if len(ChEBI) == 0:
print 'Cannot find a ChEBI for %s' % cid
elif len(ChEBI) > 1:
print 'Error parsing compound %s' % cid
else:
cid2chebi.at[cid, 'ChEBI'] = ChEBI[0]
cid2chebi.to_csv(settings.KEGG2CHEBI_FNAME)
|
dmccloskey/component-contribution
|
component_contribution/kegg_database.py
|
Python
|
mit
| 860
|
"""The tests for the Graphite component."""
import socket
import unittest
from unittest import mock
import blumate.core as ha
import blumate.components.graphite as graphite
from blumate.const import (
EVENT_STATE_CHANGED,
EVENT_BLUMATE_START, EVENT_BLUMATE_STOP,
STATE_ON, STATE_OFF)
from tests.common import get_test_home_assistant
class TestGraphite(unittest.TestCase):
"""Test the Graphite component."""
def setup_method(self, method):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config.latitude = 32.87336
self.hass.config.longitude = 117.22743
self.gf = graphite.GraphiteFeeder(self.hass, 'foo', 123, 'bm')
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
@mock.patch('blumate.components.graphite.GraphiteFeeder')
def test_minimal_config(self, mock_gf):
"""Test setup with minimal configuration."""
self.assertTrue(graphite.setup(self.hass, {}))
mock_gf.assert_called_once_with(self.hass, 'localhost', 2003, 'bm')
@mock.patch('blumate.components.graphite.GraphiteFeeder')
def test_full_config(self, mock_gf):
"""Test setup with full configuration."""
config = {
'graphite': {
'host': 'foo',
'port': 123,
'prefix': 'me',
}
}
self.assertTrue(graphite.setup(self.hass, config))
mock_gf.assert_called_once_with(self.hass, 'foo', 123, 'me')
@mock.patch('blumate.components.graphite.GraphiteFeeder')
def test_config_bad_port(self, mock_gf):
"""Test setup with invalid port."""
config = {
'graphite': {
'host': 'foo',
'port': 'wrong',
}
}
self.assertFalse(graphite.setup(self.hass, config))
self.assertFalse(mock_gf.called)
def test_subscribe(self):
"""Test the subscription."""
fake_hass = mock.MagicMock()
gf = graphite.GraphiteFeeder(fake_hass, 'foo', 123, 'bm')
fake_hass.bus.listen_once.has_calls([
mock.call(EVENT_BLUMATE_START, gf.start_listen),
mock.call(EVENT_BLUMATE_STOP, gf.shutdown),
])
fake_hass.bus.listen.assert_called_once_with(
EVENT_STATE_CHANGED, gf.event_listener)
def test_start(self):
"""Test the start."""
with mock.patch.object(self.gf, 'start') as mock_start:
self.gf.start_listen('event')
mock_start.assert_called_once_with()
def test_shutdown(self):
"""Test the shutdown."""
with mock.patch.object(self.gf, '_queue') as mock_queue:
self.gf.shutdown('event')
mock_queue.put.assert_called_once_with(self.gf._quit_object)
def test_event_listener(self):
"""Test the event listener."""
with mock.patch.object(self.gf, '_queue') as mock_queue:
self.gf.event_listener('foo')
mock_queue.put.assert_called_once_with('foo')
@mock.patch('time.time')
def test_report_attributes(self, mock_time):
"""Test the reporting with attributes."""
mock_time.return_value = 12345
attrs = {'foo': 1,
'bar': 2.0,
'baz': True,
'bat': 'NaN',
}
expected = [
'bm.entity.state 0.000000 12345',
'bm.entity.foo 1.000000 12345',
'bm.entity.bar 2.000000 12345',
'bm.entity.baz 1.000000 12345',
]
state = mock.MagicMock(state=0, attributes=attrs)
with mock.patch.object(self.gf, '_send_to_graphite') as mock_send:
self.gf._report_attributes('entity', state)
actual = mock_send.call_args_list[0][0][0].split('\n')
self.assertEqual(sorted(expected), sorted(actual))
@mock.patch('time.time')
def test_report_with_string_state(self, mock_time):
"""Test the reporting with strings."""
mock_time.return_value = 12345
expected = [
'bm.entity.foo 1.000000 12345',
'bm.entity.state 1.000000 12345',
]
state = mock.MagicMock(state='above_horizon', attributes={'foo': 1.0})
with mock.patch.object(self.gf, '_send_to_graphite') as mock_send:
self.gf._report_attributes('entity', state)
actual = mock_send.call_args_list[0][0][0].split('\n')
self.assertEqual(sorted(expected), sorted(actual))
@mock.patch('time.time')
def test_report_with_binary_state(self, mock_time):
"""Test the reporting with binary state."""
mock_time.return_value = 12345
state = ha.State('domain.entity', STATE_ON, {'foo': 1.0})
with mock.patch.object(self.gf, '_send_to_graphite') as mock_send:
self.gf._report_attributes('entity', state)
expected = ['bm.entity.foo 1.000000 12345',
'bm.entity.state 1.000000 12345']
actual = mock_send.call_args_list[0][0][0].split('\n')
self.assertEqual(sorted(expected), sorted(actual))
state.state = STATE_OFF
with mock.patch.object(self.gf, '_send_to_graphite') as mock_send:
self.gf._report_attributes('entity', state)
expected = ['bm.entity.foo 1.000000 12345',
'bm.entity.state 0.000000 12345']
actual = mock_send.call_args_list[0][0][0].split('\n')
self.assertEqual(sorted(expected), sorted(actual))
@mock.patch('time.time')
def test_send_to_graphite_errors(self, mock_time):
"""Test the sending with errors."""
mock_time.return_value = 12345
state = ha.State('domain.entity', STATE_ON, {'foo': 1.0})
with mock.patch.object(self.gf, '_send_to_graphite') as mock_send:
mock_send.side_effect = socket.error
self.gf._report_attributes('entity', state)
mock_send.side_effect = socket.gaierror
self.gf._report_attributes('entity', state)
@mock.patch('socket.socket')
def test_send_to_graphite(self, mock_socket):
"""Test the sending of data."""
self.gf._send_to_graphite('foo')
mock_socket.assert_called_once_with(socket.AF_INET,
socket.SOCK_STREAM)
sock = mock_socket.return_value
sock.connect.assert_called_once_with(('foo', 123))
sock.sendall.assert_called_once_with('foo'.encode('ascii'))
sock.send.assert_called_once_with('\n'.encode('ascii'))
sock.close.assert_called_once_with()
def test_run_stops(self):
"""Test the stops."""
with mock.patch.object(self.gf, '_queue') as mock_queue:
mock_queue.get.return_value = self.gf._quit_object
self.assertEqual(None, self.gf.run())
mock_queue.get.assert_called_once_with()
mock_queue.task_done.assert_called_once_with()
def test_run(self):
"""Test the running."""
runs = []
event = mock.MagicMock(event_type=EVENT_STATE_CHANGED,
data={'entity_id': 'entity',
'new_state': mock.MagicMock()})
def fake_get():
if len(runs) >= 2:
return self.gf._quit_object
elif runs:
runs.append(1)
return mock.MagicMock(event_type='somethingelse',
data={'new_event': None})
else:
runs.append(1)
return event
with mock.patch.object(self.gf, '_queue') as mock_queue:
with mock.patch.object(self.gf, '_report_attributes') as mock_r:
mock_queue.get.side_effect = fake_get
self.gf.run()
# Twice for two events, once for the stop
self.assertEqual(3, mock_queue.task_done.call_count)
mock_r.assert_called_once_with(
'entity',
event.data['new_state'])
|
bdfoster/blumate
|
tests/components/test_graphite.py
|
Python
|
mit
| 8,152
|
# -*- coding: utf-8 -*-
"""Init and utils."""
from zope.i18nmessageid import MessageFactory
_ = MessageFactory('dakhli.sitecontent')
def initialize(context):
"""Initializer called when used as a Zope 2 product."""
|
a25kk/dakhli
|
src/dakhli.sitecontent/dakhli/sitecontent/__init__.py
|
Python
|
mit
| 222
|
# Generated by Django 2.2.5 on 2019-09-25 17:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('testproducts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProductRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_date', models.DateTimeField(auto_now_add=True)),
('email', models.EmailField(blank=True, help_text='Optional email of the customer who made the request', max_length=254, null=True)),
('variant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='requests', to='testproducts.ProductVariant')),
],
),
]
|
JamesRamm/longclaw
|
longclaw/contrib/productrequests/migrations/0001_initial.py
|
Python
|
mit
| 886
|
from datasift import (
DataSiftUser,
DataSiftDefinition,
DataSiftStream,
DataSiftStreamListener
)
|
msmathers/datasift-python
|
__init__.py
|
Python
|
mit
| 105
|
__author__ = 'bdeutsch'
import re
import numpy as np
import pandas as pd
# List cards drawn by me and played by opponent
def get_cards(filename):
# Open the file
with open(filename) as f:
mycards = []
oppcards = []
for line in f:
# Generate my revealed card list
m = re.search('name=(.+)id.+to FRIENDLY HAND', line)
if m:
mycards.append(m.group(1))
n = re.search('name=(.+)id.+to OPPOSING PLAY(?! \(Hero)', line)
if n:
oppcards.append(n.group(1))
for item in mycards:
print item
print '\n'
for item in oppcards:
print item
# make a list of card IDs and names
def get_ids():
# Create an empty list of IDs
idlist = []
with open('test_game') as f:
# For each line
for line in f:
# Find the entity ids
m = re.search('[\[ ]id=(\d+) ', line)
# if one is found
if m:
# Check that we haven't found it yet, convert to an integer
id = int(m.group(1))
# Add it to the list
if id not in idlist:
idlist.append(id)
# Sort the ids
idlist.sort()
# Convert to dataframe
d = pd.DataFrame(index=idlist)
# Rename the index
d.index.name = "Entity ID"
# Create an empty column for names
d["Name"] = np.nan
#print d
return d
# make a list of card names only if followed by id
def get_names():
with open('test_game') as f:
for line in f:
# Find the entity ids
m = re.search('[\[ ]name=([\w ]+?) id=', line)
if m:
print m.group(1)
def get_ids_names(df):
with open('test_game') as f:
namedict = {}
for line in f:
# Find combinations of entities and names
m = re.search('[\[ ]name=([\w ]+?) id=(\d+)', line)
if m:
ent_id = int(m.group(2))
name = m.group(1)
df.ix[ent_id, 'Name'] = name
#print m.group(2), m.group(1)
return df
idlist = []
with open('test_game') as f:
# For each line
for line in f:
# Find the entity ids
m = re.search('[\[ ]id=(\d+) ', line)
# if one is found
if m:
# Check that we haven't found it yet, convert to an integer
id = int(m.group(1))
# Add it to the list
if id not in idlist:
idlist.append(id)
# Sort the ids
idlist.sort()
# Convert to dataframe
df = pd.DataFrame(index=idlist)
# Rename the index
df.index.name = "Entity ID"
# Create an empty column for names
df["Name"] = np.nan
df["CardId"] = np.nan
df["Player"] = np.nan
with open('test_game') as f:
updates = []
for line in f:
# Find lists of the innermost nested brackets
m = re.findall(r"\[([^\[]+?)]", line)
# If it's not just the command designation bracket ("zone", e.g.)
if len(m)>1:
# for each set of bracket contents
for item in m[1:]:
# add to the list of updates
updates.append(item)
for item in updates:
# find the id
m = re.search("id=(\d+)", item)
if m:
# Assign ID variable
id = int(m.group(1))
# find name and assign
n = re.search("name=(.+?) \w+?=", item)
if n:
name = n.group(1)
df.ix[id, "Name"] = name
# find cardId and assign
n = re.search("cardId=(\w.+?) ", item)
if n:
cardId = n.group(1)
df.ix[id, "CardId"] = cardId
# find player
n = re.search("player=(\d)", item)
if n:
player = n.group(1)
df.ix[id, "Player"] = player
# update the dataframe for each update
# get rid of the "zone" and "power" markers.
# collect the entries into a list
# Put card IDs into a DataFrame
#df = get_ids_names(get_ids())
pd.set_option('display.max_rows', 200)
print df
# get_cards('test_game')
|
aspera1631/hs_logreader
|
logreader.py
|
Python
|
mit
| 4,183
|
"""
Created on 1 May 2019
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
https://www.sensirion.com/en/environmental-sensors/particulate-matter-sensors-pm25/
https://bytes.com/topic/python/answers/171354-struct-ieee-754-internal-representation
Firmware report:
89667EE8A8B34BC0
"""
import time
from scs_core.data.datetime import LocalizedDatetime
from scs_core.data.datum import Decode, Encode
from scs_core.particulate.sps_datum import SPSDatum, SPSDatumCounts
from scs_dfe.particulate.opc import OPC
from scs_host.bus.i2c import I2C
# --------------------------------------------------------------------------------------------------------------------
class SPS30(OPC):
"""
classdocs
"""
SOURCE = 'S30'
MIN_SAMPLE_PERIOD = 1.0 # seconds
MAX_SAMPLE_PERIOD = 10.0 # seconds
DEFAULT_SAMPLE_PERIOD = 10.0 # seconds
DEFAULT_ADDR = 0x69
# ----------------------------------------------------------------------------------------------------------------
__BOOT_TIME = 4.0 # seconds
__POWER_CYCLE_TIME = 2.0 # seconds
__FAN_START_TIME = 2.0 # seconds
__FAN_STOP_TIME = 2.0 # seconds
__CLEANING_TIME = 10.0 # seconds
__MAX_PERMITTED_ZERO_READINGS = 4
__CMD_START_MEASUREMENT = 0x0010
__CMD_STOP_MEASUREMENT = 0x0104
__CMD_READ_DATA_READY_FLAG = 0x0202
__CMD_READ_MEASURED_VALUES = 0x0300
__CMD_AUTO_CLEANING_INTERVAL = 0x8004
__CMD_START_FAN_CLEANING = 0x5607
__CMD_READ_ARTICLE_CODE = 0xd025
__CMD_READ_SERIAL_NUMBER = 0xd033
__CMD_RESET = 0xd304
__POST_WRITE_DELAY = 0.020 # seconds
__LOCK_TIMEOUT = 2.0
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def source(cls):
return cls.SOURCE
@classmethod
def uses_spi(cls):
return False
@classmethod
def datum_class(cls):
return SPSDatum
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def __decode(cls, chars):
decoded = []
for i in range(0, len(chars), 3):
group = chars[i:i + 2]
decoded.extend(group)
actual_crc = chars[i + 2]
required_crc = cls.__crc(group)
if actual_crc != required_crc:
raise ValueError("bad checksum: required: 0x%02x actual: 0x%02x" % (required_crc, actual_crc))
return decoded
@classmethod
def __encode(cls, chars):
encoded = []
for i in range(0, len(chars), 2):
group = chars[i:i + 2]
encoded.extend(group)
encoded.append(cls.__crc(group))
return encoded
@staticmethod
def __crc(data):
crc = 0xff
for datum in data:
crc ^= datum
for bit in range(8, 0, -1):
crc = ((crc << 1) ^ 0x31 if crc & 0x80 else (crc << 1)) & 0xff
return crc
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def lock_timeout(cls):
return cls.__LOCK_TIMEOUT
@classmethod
def boot_time(cls):
return cls.__BOOT_TIME
@classmethod
def power_cycle_time(cls):
return cls.__POWER_CYCLE_TIME
@classmethod
def max_permitted_zero_readings(cls):
return cls.__MAX_PERMITTED_ZERO_READINGS
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, interface, i2c_bus, i2c_addr):
"""
Constructor
"""
super().__init__(interface)
self.__i2c_bus = i2c_bus
self.__i2c_addr = i2c_addr
# ----------------------------------------------------------------------------------------------------------------
def operations_on(self):
self.__write(self.__CMD_START_MEASUREMENT, self.__FAN_START_TIME, 0x03, 0x00)
def operations_off(self):
self.__read(self.__CMD_STOP_MEASUREMENT, self.__FAN_STOP_TIME)
def reset(self):
self.__read(self.__CMD_RESET, self.__BOOT_TIME)
# ----------------------------------------------------------------------------------------------------------------
def clean(self):
self.__read(self.__CMD_START_FAN_CLEANING, self.__CLEANING_TIME)
@property
def cleaning_interval(self):
r = self.__read(self.__CMD_AUTO_CLEANING_INTERVAL, 0, 6)
interval = Decode.unsigned_long(r[0:4], '>')
return interval
@cleaning_interval.setter
def cleaning_interval(self, interval):
values = Encode.unsigned_long(interval, '>')
self.__write(self.__CMD_AUTO_CLEANING_INTERVAL, self.__POST_WRITE_DELAY, *values)
# ----------------------------------------------------------------------------------------------------------------
def data_ready(self):
chars = self.__read(self.__CMD_READ_DATA_READY_FLAG, 0, 3)
return chars[1] == 0x01
def sample(self):
r = self.__read(self.__CMD_READ_MEASURED_VALUES, 0, 60)
# density...
pm1 = Decode.float(r[0:4], '>')
pm2p5 = Decode.float(r[4:8], '>')
pm4 = Decode.float(r[8:12], '>')
pm10 = Decode.float(r[12:16], '>')
# count...
pm0p5_count = Decode.float(r[16:20], '>')
pm1_count = Decode.float(r[20:24], '>')
pm2p5_count = Decode.float(r[24:28], '>')
pm4_count = Decode.float(r[28:32], '>')
pm10_count = Decode.float(r[32:36], '>')
# typical size...
tps = Decode.float(r[36:40], '>')
# time...
rec = LocalizedDatetime.now().utc()
# report...
counts = SPSDatumCounts(pm0p5_count, pm1_count, pm2p5_count, pm4_count, pm10_count)
return SPSDatum(self.SOURCE, rec, pm1, pm2p5, pm4, pm10, counts, tps)
# ----------------------------------------------------------------------------------------------------------------
def version(self):
r = self.__read(self.__CMD_READ_ARTICLE_CODE, 0, 48)
version = ''.join(chr(byte) for byte in r)
return version
def serial_no(self):
r = self.__read(self.__CMD_READ_SERIAL_NUMBER, 0, 48)
serial_no = ''.join(chr(byte) for byte in r)
return serial_no
def firmware(self):
return self.serial_no()
# ----------------------------------------------------------------------------------------------------------------
def get_firmware_conf(self):
raise NotImplementedError
def set_firmware_conf(self, jdict):
raise NotImplementedError
def commit_firmware_conf(self):
raise NotImplementedError
# ----------------------------------------------------------------------------------------------------------------
@property
def bus(self):
return self.__i2c_bus
@property
def address(self):
return self.__i2c_addr
# ----------------------------------------------------------------------------------------------------------------
@property
def lock_name(self):
return self.__class__.__name__ + '-' + str(self.__i2c_bus) + '-' + ("0x%02x" % self.__i2c_addr)
# ----------------------------------------------------------------------------------------------------------------
def __read(self, command, wait, count=0):
try:
self.obtain_lock()
try:
I2C.Sensors.start_tx(self.__i2c_addr)
encoded = I2C.Sensors.read_cmd16(command, count)
values = self.__decode(encoded)
finally:
I2C.Sensors.end_tx()
time.sleep(wait)
return values
finally:
self.release_lock()
def __write(self, command, wait, *values):
try:
self.obtain_lock()
try:
I2C.Sensors.start_tx(self.__i2c_addr)
encoded = self.__encode(values)
I2C.Sensors.write_addr16(command, *encoded)
finally:
I2C.Sensors.end_tx()
time.sleep(wait)
finally:
self.release_lock()
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "SPS30:{interface:%s, i2c_bus:%d i2c_addr:0x%02x}" % \
(self.interface, self.__i2c_bus, self.__i2c_addr)
|
south-coast-science/scs_dfe_eng
|
src/scs_dfe/particulate/sps_30/sps_30.py
|
Python
|
mit
| 8,991
|
#Using the variable 'i' is restricted by polyphony's name scope rule
from polyphony import testbench
def loop_var01():
for i in range(10):
pass
return i
@testbench
def test():
loop_var01()
test()
|
ktok07b6/polyphony
|
tests/error/loop_var01.py
|
Python
|
mit
| 222
|
"""38. Count and Say
https://leetcode.com/problems/count-and-say/description/
The count-and-say sequence is the sequence of integers with the first five
terms as following:
1. 1
2. 11
3. 21
4. 1211
5. 111221
1 is read off as "one 1" or 11.
11 is read off as "two 1s" or 21.
21 is read off as "one 2, then one 1" or 1211.
Given an integer n where 1 ≤ n ≤ 30, generate the n^th term of the
count-and-say sequence.
Note: Each term of the sequence of integers will be represented as a
string.
Example 1:
Input: 1
Output: "1"
Example 2:
Input: 4
Output: "1211"
"""
class Solution:
def count_and_say(self, n: int) -> str:
assert 1 <= n <= 30
if n == 1:
return "1"
def say(num_str: str) -> str:
res = ""
cur_digit = num_str[0]
cur_digit_count = 1
for i in range(1, len(num_str)):
if num_str[i] == cur_digit:
cur_digit_count += 1
else:
res += str(cur_digit_count) + cur_digit
cur_digit = num_str[i]
cur_digit_count = 1
res += str(cur_digit_count) + cur_digit
return res
ans = "1"
for i in range(1, n):
ans = say(ans)
return ans
|
isudox/leetcode-solution
|
python-algorithm/leetcode/problem_38.py
|
Python
|
mit
| 1,320
|
#!/usr/bin/env python
# convertAWT.py - Mass Table Conversion Utility
import os
massFile = 'AWTMass-2003.dat'
newFile = os.path.join('..', 'nmrfreq', 'masstable.py')
def main():
with open(massFile, 'r') as file:
massDict = extractMasses(file)
writeToFile(newFile, massDict, massFile)
def extractMasses(file):
massdict = {}
for line in file:
line = adjustLine(line)
if line is not None:
isotope, Z, mass = getValuesFrom(line)
mass = convertMass(mass)
massdict[isotope] = (Z, mass)
return massdict
def adjustLine(line):
line = line.strip()
if line[0] != '#' and line[-1] != '#':
line = line[9:].strip()
line = line.split()
return line
def getValuesFrom(splitline):
isotope = '{0}{1}'.format(splitline[2], splitline[1])
isotope = isotope.upper()
Z = int(splitline[0])
mass = '{0}{1}'.format(splitline[-3], splitline[-2])
return isotope, Z, mass
def convertMass(mass):
mass = float(mass) / 1000000.0
return mass
def writeToFile(filename, massdict, massFile):
with open(filename, 'w') as f:
f.write('# Mass table for use in nmrfreq from {0}\n'.format(massFile))
f.write('table = {\n')
f.write(createIsotopesString(massdict))
f.write('}\n')
def createIsotopesString(massdict):
string = ''
for key in sorted(massdict.iterkeys()):
string = '{2} "{0}": {1},\n'.format(key, massdict[key], string)
return string
if __name__ == '__main__':
main()
|
mmoran0032/NMRpy
|
data/AWTconvertTable.py
|
Python
|
mit
| 1,558
|
import json
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from rest_framework.test import force_authenticate
from rest_framework import status
from rest_framework.authtoken.models import Token
from core.models import Profile, User
from api.views import ProfileDetail
from api.serializers import UserSerializer
class ProfileDetailViewTestCase(TestCase):
"""Test suite for the api profile list view."""
def setUp(self):
"""Define the test global variables."""
if not User.objects.filter(username='testadmin').exists():
self.admin_user = User.objects.create_superuser(username='testadmin', password='123', email='')
Token.objects.create(user=self.admin_user)
user = User.objects.create(username='testuser1', email='test@user1.com', password='sometestpass')
self.test_profile = Profile.objects.create(user=user)
self.factory = APIRequestFactory()
self.view = ProfileDetail.as_view()
def test_dont_get_profile_data_without_authorization(self):
"""Test dont get profile data without authorization"""
request = self.factory.get('/core/api/profile/')
response = self.view(request, pk=self.test_profile.id)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_profile_data(self):
"""Test get profile data"""
request = self.factory.get('/core/api/profile/')
force_authenticate(request, user=self.admin_user, token=self.admin_user.auth_token)
response = self.view(request, pk=self.test_profile.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue('user' in response.data)
def test_update_profile_data(self):
"""Test update profile data"""
new_email = 'just@some.test'
data = json.dumps({'user': {'email': new_email}})
request = self.factory.patch('/core/api/profile/',
data=data,
content_type='application/json')
force_authenticate(request, user=self.admin_user, token=self.admin_user.auth_token)
response = self.view(request, pk=self.test_profile.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue('user' in response.data)
self.assertEqual(response.data['user']['email'], new_email)
def test_delete_profile(self):
"""Test delete profile"""
request = self.factory.delete('/core/api/profile/',
content_type='application/json')
force_authenticate(request, user=self.admin_user, token=self.admin_user.auth_token)
response = self.view(request, pk=self.test_profile.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(Profile.objects.filter(pk=self.test_profile.id).exists())
|
desenho-sw-g5/service_control
|
Trabalho_1/api/tests/views_test/profile_detail_view_test.py
|
Python
|
mit
| 2,944
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Aureus Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import AureusTestFramework
from test_framework.util import *
from binascii import a2b_hex, b2a_hex
from hashlib import sha256
from struct import pack
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
def b2x(b):
return b2a_hex(b).decode('ascii')
# NOTE: This does not work for signed numbers (set the high bit) or zero (use b'\0')
def encodeUNum(n):
s = bytearray(b'\1')
while n > 127:
s[0] += 1
s.append(n % 256)
n //= 256
s.append(n)
return bytes(s)
def varlenEncode(n):
if n < 0xfd:
return pack('<B', n)
if n <= 0xffff:
return b'\xfd' + pack('<H', n)
if n <= 0xffffffff:
return b'\xfe' + pack('<L', n)
return b'\xff' + pack('<Q', n)
def dblsha(b):
return sha256(sha256(b).digest()).digest()
def genmrklroot(leaflist):
cur = leaflist
while len(cur) > 1:
n = []
if len(cur) & 1:
cur.append(cur[-1])
for i in range(0, len(cur), 2):
n.append(dblsha(cur[i] + cur[i+1]))
cur = n
return cur[0]
def template_to_bytes(tmpl, txlist):
blkver = pack('<L', tmpl['version'])
mrklroot = genmrklroot(list(dblsha(a) for a in txlist))
timestamp = pack('<L', tmpl['curtime'])
nonce = b'\0\0\0\0'
blk = blkver + a2b_hex(tmpl['previousblockhash'])[::-1] + mrklroot + timestamp + a2b_hex(tmpl['bits'])[::-1] + nonce
blk += varlenEncode(len(txlist))
for tx in txlist:
blk += tx
return blk
def template_to_hex(tmpl, txlist):
return b2x(template_to_bytes(tmpl, txlist))
def assert_template(node, tmpl, txlist, expect):
rsp = node.getblocktemplate({'data':template_to_hex(tmpl, txlist),'mode':'proposal'})
if rsp != expect:
raise AssertionError('unexpected: %s' % (rsp,))
class GetBlockTemplateProposalTest(AureusTestFramework):
'''
Test block proposals with getblocktemplate.
'''
def run_test(self):
node = self.nodes[0]
node.generate(1) # Mine a block to leave initial block download
tmpl = node.getblocktemplate()
if 'coinbasetxn' not in tmpl:
rawcoinbase = encodeUNum(tmpl['height'])
rawcoinbase += b'\x01-'
hexcoinbase = b2x(rawcoinbase)
hexoutval = b2x(pack('<Q', tmpl['coinbasevalue']))
tmpl['coinbasetxn'] = {'data': '01000000' + '01' + '0000000000000000000000000000000000000000000000000000000000000000ffffffff' + ('%02x' % (len(rawcoinbase),)) + hexcoinbase + 'fffffffe' + '01' + hexoutval + '00' + '00000000'}
txlist = list(bytearray(a2b_hex(a['data'])) for a in (tmpl['coinbasetxn'],) + tuple(tmpl['transactions']))
# Test 0: Capability advertised
assert('proposal' in tmpl['capabilities'])
# NOTE: This test currently FAILS (regtest mode doesn't enforce block height in coinbase)
## Test 1: Bad height in coinbase
#txlist[0][4+1+36+1+1] += 1
#assert_template(node, tmpl, txlist, 'FIXME')
#txlist[0][4+1+36+1+1] -= 1
# Test 2: Bad input hash for gen tx
txlist[0][4+1] += 1
assert_template(node, tmpl, txlist, 'bad-cb-missing')
txlist[0][4+1] -= 1
# Test 3: Truncated final tx
lastbyte = txlist[-1].pop()
try:
assert_template(node, tmpl, txlist, 'n/a')
except JSONRPCException:
pass # Expected
txlist[-1].append(lastbyte)
# Test 4: Add an invalid tx to the end (duplicate of gen tx)
txlist.append(txlist[0])
assert_template(node, tmpl, txlist, 'bad-txns-duplicate')
txlist.pop()
# Test 5: Add an invalid tx to the end (non-duplicate)
txlist.append(bytearray(txlist[0]))
txlist[-1][4+1] = b'\xff'
assert_template(node, tmpl, txlist, 'bad-txns-inputs-missingorspent')
txlist.pop()
# Test 6: Future tx lock time
txlist[0][-4:] = b'\xff\xff\xff\xff'
assert_template(node, tmpl, txlist, 'bad-txns-nonfinal')
txlist[0][-4:] = b'\0\0\0\0'
# Test 7: Bad tx count
txlist.append(b'')
try:
assert_template(node, tmpl, txlist, 'n/a')
except JSONRPCException:
pass # Expected
txlist.pop()
# Test 8: Bad bits
realbits = tmpl['bits']
tmpl['bits'] = '1c0000ff' # impossible in the real world
assert_template(node, tmpl, txlist, 'bad-diffbits')
tmpl['bits'] = realbits
# Test 9: Bad merkle root
rawtmpl = template_to_bytes(tmpl, txlist)
rawtmpl[4+32] = (rawtmpl[4+32] + 1) % 0x100
rsp = node.getblocktemplate({'data':b2x(rawtmpl),'mode':'proposal'})
if rsp != 'bad-txnmrklroot':
raise AssertionError('unexpected: %s' % (rsp,))
# Test 10: Bad timestamps
realtime = tmpl['curtime']
tmpl['curtime'] = 0x7fffffff
assert_template(node, tmpl, txlist, 'time-too-new')
tmpl['curtime'] = 0
assert_template(node, tmpl, txlist, 'time-too-old')
tmpl['curtime'] = realtime
# Test 11: Valid block
assert_template(node, tmpl, txlist, None)
# Test 12: Orphan block
tmpl['previousblockhash'] = 'ff00' * 16
assert_template(node, tmpl, txlist, 'inconclusive-not-best-prevblk')
if __name__ == '__main__':
GetBlockTemplateProposalTest().main()
|
hideoussquid/aureus-12-bitcore
|
qa/rpc-tests/getblocktemplate_proposals.py
|
Python
|
mit
| 6,330
|
#!/usr/bin/env python
# import keystoneclient.v2_0.client as ksclient
# import glanceclient.v2.client as glclient
# import novaclient.client as nvclient
# import neutronclient.v2_0.client as ntclient
# import cinderclient.v2.client as cdclient
# import swiftclient.client as sftclient
__author__ = 'Yuvv'
OS_PROJECT_DOMAIN_ID = 'default'
OS_USER_DOMAIN_ID = 'default'
OS_PROJECT_NAME = 'admin'
OS_TENANT_NAME = 'admin'
OS_USERNAME = 'admin'
OS_PASSWORD = 'yuvv'
# OS_AUTH_URL = 'http://controller:35357/v3'
OS_AUTH_URL = 'http://controller:5000/v2.0/'
OS_IDENTITY_API_VERSION = 3
OS_IMAGE_API_VERSION = 2
OS_AUTH_VERSION = 3
'''
keystone = ksclient.Client(auth_url=OS_AUTH_URL,
username=OS_USERNAME,
password=OS_PASSWORD,
tenant_name=OS_TENANT_NAME)
glance = glclient.Client(endpoint=keystone.service_catalog.url_for(service_type='image'),
token=keystone.auth_token)
neutron = ntclient.Client(endpoint_url=keystone.service_catalog.url_for(service_type='network'),
token=keystone.auth_token)
cinder = cdclient.Client(auth_url=OS_AUTH_URL,
username=OS_USERNAME,
api_key=OS_PASSWORD,
project_id=OS_TENANT_NAME)
nova = nvclient.Client('2.1', auth_url=OS_AUTH_URL,
username=OS_USERNAME,
api_key=OS_PASSWORD,
project_id=OS_TENANT_NAME)
'''
def get_keystone_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['password'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['tenant_name'] = OS_TENANT_NAME
return cred
def get_neutron_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['password'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['tenant_name'] = OS_TENANT_NAME
return cred
def get_cinder_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['api_key'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['project_id'] = OS_TENANT_NAME
return cred
def get_nova_credits():
cred = dict()
cred['username'] = OS_USERNAME
cred['api_key'] = OS_PASSWORD
cred['auth_url'] = OS_AUTH_URL
cred['project_id'] = OS_TENANT_NAME
return cred
def get_swift_credits():
cred = dict()
cred['user'] = OS_USERNAME
cred['key'] = OS_PASSWORD
cred['authurl'] = OS_AUTH_URL
return cred
'''
+----------------------------------+----------+--------------+
| ID | Name | Type |
+----------------------------------+----------+--------------+
| 02e5b5c270784e76bf5c144f0fa54030 | cinder | volume |
| 3a7ecbf5069d42d784fdf3ebe9deb745 | swift | object-store |
| 8e185002e3fe4028bda5c6cd910d31f6 | nova | compute |
| aaf1a49b4a1e463990880ddf9c8fb658 | glance | image |
| b3600985814247558a289c332ad62f09 | keystone | identity |
| bc4d28242d3a466ebce7663b28465a99 | neutron | network |
| cb799b0f7447401fb15821cffb103e74 | cinderv2 | volumev2 |
+----------------------------------+----------+--------------+
'''
|
Yuvv/LearnTestDemoTempMini
|
py-django/DataBackup/ops/credentials.py
|
Python
|
mit
| 3,202
|
# -*- coding: utf-8 -*-
__author__ = 'Yacine Haddad'
__email__ = 'yhaddad@cern.ch'
__version__ = '2.0.0'
|
yhaddad/Heppi
|
tests/__init__.py
|
Python
|
mit
| 110
|
#!/usr/bin/env python
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Error codes for JavaScript style checker."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
def ByName(name):
"""Get the error code for the given error name.
Args:
name: The name of the error
Returns:
The error code
"""
return globals()[name]
# "File-fatal" errors - these errors stop further parsing of a single file
FILE_NOT_FOUND = -1
FILE_DOES_NOT_PARSE = -2
# Spacing
EXTRA_SPACE = 1
MISSING_SPACE = 2
EXTRA_LINE = 3
MISSING_LINE = 4
ILLEGAL_TAB = 5
WRONG_INDENTATION = 6
WRONG_BLANK_LINE_COUNT = 7
# Semicolons
MISSING_SEMICOLON = 10
MISSING_SEMICOLON_AFTER_FUNCTION = 11
ILLEGAL_SEMICOLON_AFTER_FUNCTION = 12
REDUNDANT_SEMICOLON = 13
# Miscellaneous
ILLEGAL_PROTOTYPE_MEMBER_VALUE = 100
LINE_TOO_LONG = 110
LINE_STARTS_WITH_OPERATOR = 120
COMMA_AT_END_OF_LITERAL = 121
MULTI_LINE_STRING = 130
UNNECESSARY_DOUBLE_QUOTED_STRING = 131
UNUSED_PRIVATE_MEMBER = 132
UNUSED_LOCAL_VARIABLE = 133
# Requires, provides
GOOG_REQUIRES_NOT_ALPHABETIZED = 140
GOOG_PROVIDES_NOT_ALPHABETIZED = 141
MISSING_GOOG_REQUIRE = 142
MISSING_GOOG_PROVIDE = 143
EXTRA_GOOG_REQUIRE = 144
EXTRA_GOOG_PROVIDE = 145
# JsDoc
INVALID_JSDOC_TAG = 200
INVALID_USE_OF_DESC_TAG = 201
NO_BUG_NUMBER_AFTER_BUG_TAG = 202
MISSING_PARAMETER_DOCUMENTATION = 210
EXTRA_PARAMETER_DOCUMENTATION = 211
WRONG_PARAMETER_DOCUMENTATION = 212
MISSING_JSDOC_TAG_TYPE = 213
MISSING_JSDOC_TAG_DESCRIPTION = 214
MISSING_JSDOC_PARAM_NAME = 215
OUT_OF_ORDER_JSDOC_TAG_TYPE = 216
MISSING_RETURN_DOCUMENTATION = 217
UNNECESSARY_RETURN_DOCUMENTATION = 218
MISSING_BRACES_AROUND_TYPE = 219
MISSING_MEMBER_DOCUMENTATION = 220
MISSING_PRIVATE = 221
EXTRA_PRIVATE = 222
INVALID_OVERRIDE_PRIVATE = 223
INVALID_INHERIT_DOC_PRIVATE = 224
MISSING_JSDOC_TAG_THIS = 225
UNNECESSARY_BRACES_AROUND_INHERIT_DOC = 226
INVALID_AUTHOR_TAG_DESCRIPTION = 227
JSDOC_PREFER_QUESTION_TO_PIPE_NULL = 230
JSDOC_ILLEGAL_QUESTION_WITH_PIPE = 231
JSDOC_MISSING_OPTIONAL_TYPE = 232
JSDOC_MISSING_OPTIONAL_PREFIX = 233
JSDOC_MISSING_VAR_ARGS_TYPE = 234
JSDOC_MISSING_VAR_ARGS_NAME = 235
# TODO(robbyw): Split this in to more specific syntax problems.
INCORRECT_SUPPRESS_SYNTAX = 250
INVALID_SUPPRESS_TYPE = 251
UNNECESSARY_SUPPRESS = 252
# File ending
FILE_MISSING_NEWLINE = 300
FILE_IN_BLOCK = 301
# Interfaces
INTERFACE_CONSTRUCTOR_CANNOT_HAVE_PARAMS = 400
INTERFACE_METHOD_CANNOT_HAVE_CODE = 401
# Comments
MISSING_END_OF_SCOPE_COMMENT = 500
MALFORMED_END_OF_SCOPE_COMMENT = 501
# goog.scope - Namespace aliasing
# TODO(nnaze) Add additional errors here and in aliaspass.py
INVALID_USE_OF_GOOG_SCOPE = 600
EXTRA_GOOG_SCOPE_USAGE = 601
# ActionScript specific errors:
# TODO(user): move these errors to their own file and move all JavaScript
# specific errors to their own file as well.
# All ActionScript specific errors should have error number at least 1000.
FUNCTION_MISSING_RETURN_TYPE = 1132
PARAMETER_MISSING_TYPE = 1133
VAR_MISSING_TYPE = 1134
PARAMETER_MISSING_DEFAULT_VALUE = 1135
IMPORTS_NOT_ALPHABETIZED = 1140
IMPORT_CONTAINS_WILDCARD = 1141
UNUSED_IMPORT = 1142
INVALID_TRACE_SEVERITY_LEVEL = 1250
MISSING_TRACE_SEVERITY_LEVEL = 1251
MISSING_TRACE_MESSAGE = 1252
REMOVE_TRACE_BEFORE_SUBMIT = 1253
REMOVE_COMMENT_BEFORE_SUBMIT = 1254
# End of list of ActionScript specific errors.
NEW_ERRORS = frozenset([
# Errors added after 2.0.2:
WRONG_INDENTATION,
MISSING_SEMICOLON,
# Errors added after 2.3.4:
MISSING_END_OF_SCOPE_COMMENT,
MALFORMED_END_OF_SCOPE_COMMENT,
UNUSED_PRIVATE_MEMBER,
# Errors added after 2.3.5:
INVALID_USE_OF_GOOG_SCOPE,
EXTRA_GOOG_SCOPE_USAGE,
# Errors added after 2.3.9:
JSDOC_MISSING_VAR_ARGS_TYPE,
JSDOC_MISSING_VAR_ARGS_NAME,
# Errors added after 2.3.12:
])
|
Gobie/gjslinter
|
tools/closure_linter/errors.py
|
Python
|
mit
| 4,404
|
import pytest, sys, os
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../")
from unittest import TestCase
from pylogic.case import Case
class TestBaseOperand(TestCase):
def test_eq_case(self):
case1 = Case("parent", "homer", "bart")
case2 = Case("parent", "homer", "bart")
assert case1 == case2
def test_not_eq_case1(self):
case1 = Case("parent", "homer", "bart")
case2 = Case("parent", "homer", "lisa")
assert case1 != case2
def test_not_eq_case2(self):
case1 = Case("parent", "homer", "bart")
case2 = Case("brother", "homer", "lisa")
assert case1 != case2
|
fran-bravo/pylogic-module
|
test/test_case_operands.py
|
Python
|
mit
| 666
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (c) 2011 Chris D. Lasher & Phillip Whisenhunt
#
# This software is released under the MIT License. Please see
# LICENSE.txt for details.
"""A program to detect Process Linkage Networks using
Simulated Annealing.
"""
import collections
import itertools
import sys
from convutils import convutils
import bpn.cli
import bpn.structures
from defaults import (
SUPERDEBUG,
SUPERDEBUG_MODE,
LINKS_FIELDNAMES,
PARAMETERS_FIELDNAMES,
TRANSITIONS_FIELDNAMES,
DETAILED_TRANSITIONS_FIELDNAMES
)
# Configure all the logging stuff
import logging
logger = logging.getLogger('bpn.sabpn')
if SUPERDEBUG_MODE:
# A logging level below logging.DEBUG
logging.addLevelName(SUPERDEBUG, 'SUPERDEBUG')
logger.setLevel(SUPERDEBUG)
#stream_handler.setLevel(SUPERDEBUG)
import simulatedannealing
import states
import recorders
def main(argv=None):
cli_parser = bpn.cli.SaCli()
input_data = cli_parser.parse_args(argv)
logger.info("Constructing supporting data structures; this may "
"take a while...")
annotated_interactions = bpn.structures.AnnotatedInteractionsArray(
input_data.interactions_graph,
input_data.annotations_dict
)
logger.info("Considering %d candidate links in total." %
annotated_interactions.calc_num_links())
logger.info("Constructing Simulated Annealing")
if input_data.free_parameters:
logger.info("Using free parameter transitions.")
parameters_state_class = states.RandomTransitionParametersState
else:
parameters_state_class = states.PLNParametersState
if input_data.disable_swaps:
logger.info("Disabling swap transitions.")
links_state_class = states.NoSwapArrayLinksState
else:
links_state_class = states.ArrayLinksState
if input_data.detailed_transitions:
logger.info("Recording extra information for each state.")
transitions_csvfile = convutils.make_csv_dict_writer(
input_data.transitions_outfile,
DETAILED_TRANSITIONS_FIELDNAMES
)
else:
transitions_csvfile = convutils.make_csv_dict_writer(
input_data.transitions_outfile,
TRANSITIONS_FIELDNAMES
)
sa = simulatedannealing.ArraySimulatedAnnealing(
annotated_interactions,
input_data.activity_threshold,
input_data.transition_ratio,
num_steps=input_data.steps,
temperature=input_data.temperature,
end_temperature=input_data.end_temperature,
parameters_state_class=parameters_state_class,
links_state_class=links_state_class
)
logger.info("Beginning to Anneal. This may take a while...")
sa.run()
logger.info("Run completed.")
logger.info("Writing link results to %s" %
input_data.links_outfile.name)
links_out_csvwriter = convutils.make_csv_dict_writer(
input_data.links_outfile, LINKS_FIELDNAMES)
logger.info("Writing parameter results to %s" % (
input_data.parameters_outfile.name))
parameters_out_csvwriter = convutils.make_csv_dict_writer(
input_data.parameters_outfile, PARAMETERS_FIELDNAMES)
logger.info("Writing transitions data to %s." % (
input_data.transitions_outfile.name))
logger.info("Finished.")
if __name__ == '__main__':
main()
|
gotgenes/BiologicalProcessNetworks
|
bpn/mcmc/sabpn.py
|
Python
|
mit
| 3,485
|
# coding=utf-8
import os
import csv
import time
import random
import operator
import numpy as np
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data as mnist_input_data
def main():
# Test classification using the following code:
#
# classifier = Classifier("model.pb")
# dataset = KatakanaDataSet()
# x, y_truth = dataset.get_training_batch(1)
# print max_index(classifier.classify(x)[0]), max_index(y_truth[0])
cnn = ConvolutionalNeuralNetwork([
ConvolutionLayer(5, 1, 64),
PoolingLayer(2),
ConvolutionLayer(5, 1, 128),
PoolingLayer(2),
FullyConnectedLayer(1024),
DropoutLayer()
], KatakanaDataSet())
cnn.build_graph()
cnn.train_model(steps=500,
training_batch_size=100,
evaluation_batch_size=100,
learning_rate=0.001,
file_path="model.pb")
# Method definitions
def relative_path(path):
return os.path.dirname(os.path.realpath(__file__)) + '/' + path
def max_index(array):
return max(enumerate(array), key=operator.itemgetter(1))[0]
class ConvolutionalNeuralNetwork:
def __init__(self, layers, dataset):
self.layers = layers
self.dataset = dataset
layers.append(ReadoutLayer())
def build_graph(self):
input_shape = self.dataset.get_input_shape()
output_size = self.dataset.get_output_size()
readout = self.layers[-1]
readout.output_size = output_size
self.x = tf.placeholder(tf.float32, [None] + input_shape, 'x')
self.y_truth = tf.placeholder(tf.float32, [None, output_size], 'y_truth')
tensor = self.x
for layer in self.layers:
tensor = layer.build_training_node(tensor)
self.y = tensor
self.cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=self.y_truth, logits=self.y)
self.loss = tf.reduce_mean(self.cross_entropy)
# TODO adapt learning rate each step to improve learning speed
self.learning_rate = tf.placeholder(tf.float32, name='learning_rate')
self.train_step = tf.train.AdamOptimizer(self.learning_rate).minimize(self.loss)
self.accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(self.y, 1), tf.argmax(self.y_truth, 1)), tf.float32))
# Prepare TensorBoard
tf.summary.image('inputs', self.x, 10)
tf.summary.scalar('loss', self.loss)
tf.summary.scalar('accuracy', self.accuracy)
self.summary = tf.summary.merge_all()
def train_model(self, steps=500, learning_rate=0.005, training_batch_size=None, evaluation_batch_size=None, file_path=False):
with tf.Session() as session:
summary_path = relative_path('data/training_summaries/run_{}'.format(str(int(time.time()))))
summary_writer = tf.summary.FileWriter(summary_path, session.graph)
session.run(tf.global_variables_initializer())
for step in range(steps):
# Train
x, y_truth = self.dataset.get_training_batch(training_batch_size)
self.train_step.run(feed_dict=self._feed(0.6, {self.x: x,
self.y_truth: y_truth,
self.learning_rate: learning_rate}))
# Send data to TensorBoard
x, y_truth = self.dataset.get_test_batch(evaluation_batch_size)
summary_run = session.run(self.summary, feed_dict=self._feed(1, {self.x: x,
self.y_truth: y_truth,
self.learning_rate: learning_rate}))
summary_writer.add_summary(summary_run, step)
if file_path:
self._save_graph(session, file_path)
def _save_graph(self, session, file_path):
prediction_graph = tf.Graph()
with prediction_graph.as_default():
input_shape = self.dataset.get_input_shape()
output_size = self.dataset.get_output_size()
readout = self.layers[-1]
readout.output_size = output_size
input_placeholder = tf.placeholder(tf.float32, [None] + input_shape, 'input')
tensor = input_placeholder
for layer in self.layers:
if not isinstance(layer, DropoutLayer):
tensor = layer.build_prediction_node(tensor, session)
tf.identity(tf.nn.softmax(tensor), 'prediction')
tf.train.write_graph(prediction_graph, os.path.dirname(file_path), os.path.basename(file_path), as_text=False)
def _feed(self, keep_probability, feed):
for layer in self.layers:
if isinstance(layer, DropoutLayer):
feed[layer.placeholder] = keep_probability
return feed
class CNNLayer:
def weight_variables(self, shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variables(self, shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def build_training_node(self, input_tensor):
pass
def build_prediction_node(self, input_tensor, session):
pass
class ConvolutionLayer(CNNLayer):
def __init__(self, patch, stride, features):
self.patch = patch
self.stride = stride
self.features = features
self._weights = False
self._biases = False
def build_training_node(self, input_tensor):
if not self._weights and not self._biases:
input_size = input_tensor.shape[-1].value
self._weights = self.weight_variables([self.patch, self.patch, input_size, self.features])
self._biases = self.bias_variables([self.features])
return self._build_node(input_tensor, self._weights, self._biases)
def build_prediction_node(self, input_tensor, session):
return self._build_node(input_tensor,
tf.constant(session.run(self._weights)),
tf.constant(session.run(self._biases)))
def _conv2d(self, input_tensor, weights):
return tf.nn.conv2d(input_tensor, weights,
strides=[self.stride, self.stride, self.stride, self.stride],
padding='SAME')
def _build_node(self, input_tensor, weights, biases):
return tf.nn.relu(self._conv2d(input_tensor, weights) + biases)
class PoolingLayer(CNNLayer):
def __init__(self, pooling):
self.pooling = pooling
def build_training_node(self, input_tensor):
return self._build_node(input_tensor)
def build_prediction_node(self, input_tensor, session):
return self._build_node(input_tensor)
def _build_node(self, input_tensor):
return tf.nn.max_pool(input_tensor,
ksize=[1, self.pooling, self.pooling, 1],
strides=[1, self.pooling, self.pooling, 1],
padding='SAME')
class FullyConnectedLayer(CNNLayer):
def __init__(self, neurons):
self.neurons = neurons
self._weights = False
self._biases = False
def build_training_node(self, input_tensor):
flattened_size = self._flattened_size(input_tensor)
if not self._weights and not self._biases:
self._weights = self.weight_variables([flattened_size, self.neurons])
self._biases = self.bias_variables([self.neurons])
return self._build_node(input_tensor, self._weights, self._biases)
def build_prediction_node(self, input_tensor, session):
return self._build_node(input_tensor,
tf.constant(session.run(self._weights)),
tf.constant(session.run(self._biases)))
def _flattened_size(self, input_tensor):
return reduce(lambda a, b: a*b, input_tensor.shape[1:]).value
def _build_node(self, input_tensor, weights, biases):
flattened_size = self._flattened_size(input_tensor)
flattened_tensor = tf.reshape(input_tensor, [-1, flattened_size])
return tf.nn.relu(tf.matmul(flattened_tensor, weights) + biases)
class DropoutLayer(CNNLayer):
def __init__(self):
self.placeholder = tf.placeholder(tf.float32, name='dropout_probability')
def build_training_node(self, input_tensor):
return self._build_node(input_tensor)
def build_prediction_node(self, input_tensor, session):
return self._build_node(input_tensor)
def _build_node(self, input_tensor):
return tf.nn.dropout(input_tensor, self.placeholder)
class ReadoutLayer(CNNLayer):
def __init__(self, output_size=None):
self.output_size = output_size
self._weights = False
self._biases = False
def build_training_node(self, input_tensor):
if not self._weights and not self._biases:
input_size = input_tensor.shape[-1].value
self._weights = self.weight_variables([input_size, self.output_size])
self._biases = self.bias_variables([self.output_size])
return self._build_node(input_tensor, self._weights, self._biases)
def build_prediction_node(self, input_tensor, session):
return self._build_node(input_tensor,
tf.constant(session.run(self._weights)),
tf.constant(session.run(self._biases)))
def _build_node(self, input_tensor, weights, biases):
return tf.matmul(input_tensor, weights) + biases
class DataSet():
def get_input_shape(self):
pass
def get_output_size(self):
pass
def get_validation_batch(self, size=None):
pass
def get_training_batch(self, size=None):
pass
def get_test_batch(self, size=None):
pass
class MNISTDataSet(DataSet):
def __init__(self):
self._mnist = mnist_input_data.read_data_sets("data/MNIST_data/", one_hot=True)
def get_input_shape(self):
return [28, 28, 1]
def get_output_size(self):
return 10
def get_validation_batch(self, size=None):
return self._get_batch(self._mnist.validation, size, 5000)
def get_training_batch(self, size=None):
return self._get_batch(self._mnist.train, size, 55000)
def get_test_batch(self, size=None):
return self._get_batch(self._mnist.test, size, 10000)
def _get_batch(self, data, size, default_size):
if size == None:
size = default_size
batch = data.next_batch(size)
inputs = batch[0].reshape((-1, 28, 28, 1))
classification = batch[1]
return inputs, classification
class KatakanaDataSet(DataSet):
def __init__(self):
self.categories = []
self.categories_display = {}
with open(relative_path('data/katakana/categories.csv')) as file:
reader = csv.reader(file)
reader.next()
for category, display in reader:
self.categories_display[int(category)] = display
self.categories.append(int(category))
self.classification = []
with open(relative_path('data/katakana/classification.csv')) as file:
reader = csv.reader(file)
reader.next()
for position, category in reader:
self.classification.append((int(position), int(category)))
def get_input_shape(self):
return [64, 64, 1]
def get_output_size(self):
return len(self.categories)
def get_validation_batch(self, size=None):
if size == None:
size = len(self.classification)*0.2
return self._get_batch(0, len(self.classification)*0.2, size)
def get_training_batch(self, size=None):
if size == None:
size = len(self.classification)*0.6
return self._get_batch(len(self.classification)*0.2, len(self.classification)*0.8, size)
def get_test_batch(self, size=None):
if size == None:
size = len(self.classification)*0.2
return self._get_batch(len(self.classification)*0.8, len(self.classification), size)
def _get_batch(self, start, end, length):
inputs = []
classification = []
categories_size = len(self.categories)
with open(relative_path('data/katakana/data')) as data_file:
for i in random.sample(range(int(start), int(end)), length):
position, category = self.classification[i]
inputs.append(self._image_data(data_file, position))
classification.append(self._one_hot(self.categories.index(category), categories_size))
return inputs, classification
def _image_data(self, file, position):
file.seek(position * 512)
data = np.unpackbits(np.frombuffer(file.read(512), dtype=np.uint8))
data = data.reshape([64, 64, 1])
return data
def _one_hot(self, index, length):
vector = np.zeros(length)
vector[index] = 1
return vector
class Classifier:
def __init__(self, file_path):
graph_def = tf.GraphDef()
graph_def.ParseFromString(open(file_path, 'rb').read())
tf.import_graph_def(graph_def, name='')
graph = tf.get_default_graph()
self.input_placeholder = graph.get_tensor_by_name('input:0')
self.prediction = graph.get_tensor_by_name('prediction:0')
def classify(self, input):
with tf.Session() as session:
return session.run(self.prediction, feed_dict={ self.input_placeholder: input})
# Runtime
if __name__ == '__main__':
main()
|
NoelDeMartin/Japanese-Character-Recognition
|
train_model.py
|
Python
|
mit
| 11,858
|
from django.shortcuts import render
from django.views import View
class SiteUpdateNotifier(View):
def get(self, request):
pass
|
k00n/site_update_notifier
|
siteUpdateNotifier/sun/views.py
|
Python
|
mit
| 141
|
#!/usr/bin/env python
"""
make_a_star_cluster.py creates a model star cluster,
which can then be used in N-body simulations or for other purposes.
It requires AMUSE, which can be downloaded from http://amusecode.org or
https://github.com/amusecode/amuse.
Currently not feature-complete yet, and function/argument names are
subject to change.
-- Steven Rieder steven at rieder punt nl
"""
import logging
import numpy
from amuse.units import (
units,
nbody_system,
generic_unit_converter,
)
from amuse.units.trigo import sin, cos
from amuse.datamodel.particles import Particles
from amuse.ic.plummer import new_plummer_sphere
from amuse.ic.kingmodel import new_king_model
try:
from amuse.ic.fractalcluster import new_fractal_cluster_model
except ImportError:
new_fractal_cluster_model = None
def new_masses(
stellar_mass=False,
initial_mass_function="salpeter",
upper_mass_limit=125. | units.MSun,
lower_mass_limit=0.1 | units.MSun,
number_of_stars=1024,
exceed_mass=True,
):
imf_name = initial_mass_function.lower()
if imf_name == "salpeter":
from amuse.ic.salpeter import new_salpeter_mass_distribution
initial_mass_function = new_salpeter_mass_distribution
elif imf_name == "kroupa":
from amuse.ic.brokenimf import new_kroupa_mass_distribution
initial_mass_function = new_kroupa_mass_distribution
elif imf_name == "flat":
from amuse.ic.flatimf import new_flat_mass_distribution
initial_mass_function = new_flat_mass_distribution
elif imf_name == "fixed":
from amuse.ic.flatimf import new_flat_mass_distribution
def new_fixed_mass_distribution(
number_of_particles, *list_arguments, **keyword_arguments
):
return new_flat_mass_distribution(
number_of_particles,
mass_min=stellar_mass/number_of_stars,
mass_max=stellar_mass/number_of_stars,
)
initial_mass_function = new_fixed_mass_distribution
if stellar_mass:
# best underestimate mean_mass a bit for faster results
mean_mass = 0.25 | units.MSun
mass = initial_mass_function(
int(stellar_mass / mean_mass),
mass_min=lower_mass_limit,
mass_max=upper_mass_limit,
)
previous_number_of_stars = len(mass)
if exceed_mass:
# Allow one final star to exceed stellar_mass
final_star = 1+numpy.argmax(mass.cumsum() > stellar_mass)
if (final_star > 1 and final_star < len(mass)):
mass = mass[:final_star]
else:
# Limit to stars not exceeding stellar_mass
mass = mass[mass.cumsum() < stellar_mass]
additional_mass = [] | units.MSun
while True:
if previous_number_of_stars + len(additional_mass) > len(mass):
break
# We don't have enough stars yet, or at least not tested this
additional_mass = initial_mass_function(
int(stellar_mass / mean_mass),
mass_min=lower_mass_limit,
mass_max=upper_mass_limit,
)
if exceed_mass:
# Allow one final star to exceed stellar_mass
final_star = 1+numpy.argmax(
mass.sum() + additional_mass.cumsum() > stellar_mass
)
if (final_star > 1 and final_star < len(mass)):
additional_mass = additional_mass[:final_star]
else:
# Limit to stars not exceeding stellar_mass
additional_mass = additional_mass[
mass.sum() + additional_mass.cumsum() < stellar_mass
]
mass.append(additional_mass)
number_of_stars = len(mass)
else:
# Give stars their mass
mass = initial_mass_function(
number_of_stars,
mass_min=lower_mass_limit,
mass_max=upper_mass_limit,
)
return mass
def new_star_cluster(
stellar_mass=False,
initial_mass_function="salpeter",
upper_mass_limit=125. | units.MSun,
lower_mass_limit=0.1 | units.MSun,
number_of_stars=1024,
effective_radius=3.0 | units.parsec,
star_distribution="plummer",
star_distribution_w0=7.0,
star_distribution_fd=2.0,
star_metallicity=0.01,
# initial_binary_fraction=0,
**kwargs
):
"""
Create stars.
When using an IMF, either the stellar mass is fixed (within
stochastic error) or the number of stars is fixed. When using
equal-mass stars, both are fixed.
"""
mass = new_masses(
stellar_mass=stellar_mass,
initial_mass_function=initial_mass_function,
upper_mass_limit=upper_mass_limit,
lower_mass_limit=lower_mass_limit,
number_of_stars=number_of_stars,
)
total_mass = mass.sum()
number_of_stars = len(mass)
print(number_of_stars, total_mass, effective_radius)
converter = generic_unit_converter.ConvertBetweenGenericAndSiUnits(
total_mass,
1. | units.kms,
effective_radius,
)
# Give stars a position and velocity, based on the distribution model.
if star_distribution == "plummer":
stars = new_plummer_sphere(
number_of_stars,
convert_nbody=converter,
)
elif star_distribution == "king":
stars = new_king_model(
number_of_stars,
star_distribution_w0,
convert_nbody=converter,
)
elif star_distribution == "fractal":
stars = new_fractal_cluster_model(
number_of_stars,
fractal_dimension=star_distribution_fd,
convert_nbody=converter,
)
else:
return -1, "No stellar distribution"
# set the stellar mass.
stars.mass = mass
# set other stellar parameters.
stars.metallicity = star_metallicity
# Virialize the star cluster if > 1 star
if len(stars) > 1:
stars.move_to_center()
stars.scale_to_standard(
convert_nbody=converter,
# virial_ratio=virial_ratio,
# smoothing_length_squared= ...,
)
# Record the cluster's initial parameters to the particle distribution
stars.collection_attributes.initial_mass_function = \
initial_mass_function.lower()
stars.collection_attributes.upper_mass_limit = upper_mass_limit
stars.collection_attributes.lower_mass_limit = lower_mass_limit
stars.collection_attributes.number_of_stars = number_of_stars
stars.collection_attributes.effective_radius = effective_radius
stars.collection_attributes.star_distribution = star_distribution
stars.collection_attributes.star_distribution_w0 = star_distribution_w0
stars.collection_attributes.star_distribution_fd = star_distribution_fd
stars.collection_attributes.star_metallicity = star_metallicity
# Derived/legacy values
stars.collection_attributes.converter_mass = \
converter.to_si(1 | nbody_system.mass)
stars.collection_attributes.converter_length =\
converter.to_si(1 | nbody_system.length)
stars.collection_attributes.converter_speed =\
converter.to_si(1 | nbody_system.speed)
return stars
def new_stars_from_sink(
origin,
upper_mass_limit=125 | units.MSun,
lower_mass_limit=0.1 | units.MSun,
default_radius=0.25 | units.pc,
velocity_dispersion=1 | units.kms,
logger=None,
initial_mass_function="kroupa",
distribution="random",
randomseed=None,
**keyword_arguments
):
"""
Form stars from an origin particle that keeps track of the properties of
this region.
"""
logger = logger or logging.getLogger(__name__)
if randomseed is not None:
logger.info("setting random seed to %i", randomseed)
numpy.random.seed(randomseed)
try:
initialised = origin.initialised
except AttributeError:
initialised = False
if not initialised:
logger.debug(
"Initialising origin particle %i for star formation",
origin.key
)
next_mass = new_star_cluster(
initial_mass_function=initial_mass_function,
upper_mass_limit=upper_mass_limit,
lower_mass_limit=lower_mass_limit,
number_of_stars=1,
**keyword_arguments
)
origin.next_primary_mass = next_mass[0].mass
origin.initialised = True
if origin.mass < origin.next_primary_mass:
logger.debug(
"Not enough in star forming region %i to form the next star",
origin.key
)
return Particles()
mass_reservoir = origin.mass - origin.next_primary_mass
stellar_masses = new_star_cluster(
stellar_mass=mass_reservoir,
upper_mass_limit=upper_mass_limit,
lower_mass_limit=lower_mass_limit,
imf=initial_mass_function,
).mass
number_of_stars = len(stellar_masses)
new_stars = Particles(number_of_stars)
new_stars.age = 0 | units.yr
new_stars[0].mass = origin.next_primary_mass
new_stars[1:].mass = stellar_masses[:-1]
origin.next_primary_mass = stellar_masses[-1]
new_stars.position = origin.position
new_stars.velocity = origin.velocity
try:
radius = origin.radius
except AttributeError:
radius = default_radius
rho = numpy.random.random(number_of_stars) * radius
theta = (
numpy.random.random(number_of_stars)
* (2 * numpy.pi | units.rad)
)
phi = (
numpy.random.random(number_of_stars) * numpy.pi | units.rad
)
x = rho * sin(phi) * cos(theta)
y = rho * sin(phi) * sin(theta)
z = rho * cos(phi)
new_stars.x += x
new_stars.y += y
new_stars.z += z
velocity_magnitude = numpy.random.normal(
scale=velocity_dispersion.value_in(units.kms),
size=number_of_stars,
) | units.kms
velocity_theta = (
numpy.random.random(number_of_stars)
* (2 * numpy.pi | units.rad)
)
velocity_phi = (
numpy.random.random(number_of_stars)
* (numpy.pi | units.rad)
)
vx = velocity_magnitude * sin(velocity_phi) * cos(velocity_theta)
vy = velocity_magnitude * sin(velocity_phi) * sin(velocity_theta)
vz = velocity_magnitude * cos(velocity_phi)
new_stars.vx += vx
new_stars.vy += vy
new_stars.vz += vz
new_stars.origin = origin.key
origin.mass -= new_stars.total_mass()
return new_stars
|
rieder/MASC
|
src/amuse/ext/masc/cluster.py
|
Python
|
mit
| 10,655
|
from setuptools import setup, find_packages
setup(
name="tomorrow",
version="0.2.4",
author="Madison May",
author_email="madison@indico.io",
packages=find_packages(
exclude=[
'tests'
]
),
install_requires=[
"futures >= 2.2.0"
],
description="""
Magic decorator syntax for asynchronous code.
""",
license="MIT License (See LICENSE)",
long_description=open("README.rst").read(),
url="https://github.com/madisonmay/tomorrow"
)
|
madisonmay/Tomorrow
|
setup.py
|
Python
|
mit
| 521
|
import os
import MySQLdb
import logging
_log = logging.getLogger('thanatos')
def execute_sql(sql, db_connection, fetch='all'):
"""
:param sql:
:param db_connection:
:param fetch: A string of either 'all' or 'one'.
:return:
"""
cursor = db_connection.cursor()
cursor.execute(sql)
if fetch == 'all':
results = [x for x in cursor.fetchall()]
elif fetch == 'one':
results = cursor.fetchone()
else:
results = None
cursor.close()
return results
def get_stored_procs(db_connection):
"""
:param db_connection:
:return:
"""
sql = "SHOW PROCEDURE STATUS;"
procs = execute_sql(sql, db_connection)
return [x[1] for x in procs]
def drop_proc(proc_name, db_connection):
"""
:param proc_name:
:param db_connection:
:return:
"""
sql = "DROP PROCEDURE {};".format(proc_name)
execute_sql(sql, db_connection)
def update_sql_stored_procs(db_connection):
"""
:param db_connection:
:return:
"""
procs_dir_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'procs')
existing_procs = get_stored_procs(db_connection)
for proc_file_name in os.listdir(procs_dir_path):
_log.info('Running {}'.format(proc_file_name))
proc_file_path = os.path.join(procs_dir_path, proc_file_name)
proc_name = proc_file_name.split('.')[1]
if proc_name in existing_procs:
drop_proc(proc_name, db_connection)
with open(proc_file_path, 'r') as sql_file:
sql = " ".join(sql_file.readlines())
execute_sql(sql, db_connection, fetch=None)
def load_tables_from_files(db_connection):
""" Looks in the current working directory for all required tables. """
_log.info('Loading tables from disk to DB.')
sde_dir_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'sde')
for sde_file_name in os.listdir(sde_dir_path):
_log.info('Loading the following table: {}'.format(sde_file_name))
sde_file_path = os.path.join(sde_dir_path, sde_file_name)
with open(sde_file_path, 'r') as sde_file:
sql = sde_file.read()
execute_sql(sql, db_connection)
_log.info('Finished loading all requested tables.')
def get_connection(connection_details=None):
""" Creates a connection to the MySQL DB. """
if connection_details is None:
connection_details = get_default_connection_details()
return MySQLdb.connect(
connection_details['host'],
connection_details['user'],
connection_details['password'],
connection_details['database']
)
def get_default_connection_details():
""" Gets the connection details based on environment vars or Thanatos default settings.
:return: Returns a dictionary of connection details.
:rtype: dict
"""
return {
'host': os.environ.get('MYSQL_HOST', '127.0.0.1'),
'user': os.environ.get('MYSQL_USER', 'vagrant'),
'password': os.environ.get('MYSQL_PASSWORD', 'vagrant'),
'database': os.environ.get('MYSQL_DB', 'thanatos'),
}
|
evetrivia/thanatos
|
thanatos/database/db_utils.py
|
Python
|
mit
| 3,157
|
__version__ = "0.0.2"
from .samplesubmod import *
|
hwong/samplesubmod
|
__init__.py
|
Python
|
mit
| 51
|
from w3lib.html import remove_tags
from requests import session, codes
from bs4 import BeautifulSoup
# Net/gross calculator for student under 26 years
class Student:
_hours = 0
_wage = 0
_tax_rate = 18
_cost = 20
def __init__(self, hours, wage, cost):
self._hours = hours
self._wage = wage
self._cost = cost
def _get_real_tax_rate(self):
tax_from = (100 - self._cost) / 100
return tax_from * self._tax_rate / 100
def get_net(self):
return self._wage * self._hours
def get_gross(self):
value = self.get_net() / (1 - self._get_real_tax_rate())
return int(value + 0.5)
def get_tax_base(self):
return self.get_gross() - self.get_cost()
def get_cost(self):
return self.get_gross() - self.get_gross() * (100 - self._cost) / 100
def get_tax(self):
return self.get_gross() - self.get_net()
def get_cost_percentage(self):
return self._cost
# Net/gross calculator using web client with optional fallback
class WebCalculator:
_data = None
_calculator = None
_cost = 0
def __init__(self, hours, wage, cost):
from tools import Config
self._cost = cost
self._data = Config.get_calculator_bot().parse(hours * wage, 1 if cost == 50 else 0)
# Check if bot returned some data
if self._data == None:
self._calculator = Config.get_fallback_calculator()(hours, wage, cost)
def get_net(self):
if self._data == None:
return self._calculator.get_net()
return self._data['net']
def get_gross(self):
if self._data == None:
return self._calculator.get_gross()
return self._data['gross']
def get_tax_base(self):
if self._data == None:
return self._calculator.get_tax_base()
return self._data['tax_base']
def get_cost(self):
if self._data == None:
return self._calculator.get_cost()
return self._data['cost']
def get_tax(self):
if self._data == None:
return self._calculator.get_tax()
return self._data['tax']
def get_cost_percentage(self):
return self._cost
# Bot finding invoice values on wfirma.pl calculator page
class WfirmaPlBot:
_url = 'https://poradnik.wfirma.pl/staff_contract_headers/evaluate/errand'
# Send needed data
@staticmethod
def parse(net, copyright):
from tools import Config
# Prepare data for request
form_data = Config.get('wfirma.pl')
header_data = {
'quota_type': form_data['quota_type'],
'quota': net,
'company_incidental': form_data['company_incidental'],
}
form_data['copyright'] = copyright
with session() as c:
# convert data to format viable for url-encoding
data = {}
for k, v in form_data.items():
data['data[StaffContractErrand][%s]' % k] = v
for k, v in header_data.items():
data['data[StaffContractHeader][%s]' % k] = v
# Send the request to the server
try:
request = c.post(WfirmaPlBot._url, data=data, timeout=3)
except:
print('Przekroczono maksymalny czas oczekiwania na odpowiedź serwera')
return None
# There was some error (most likely server-side), so use offline fallback
if request.status_code != codes.ok:
print('Wystąpił błąd podczas pobierania danych do rachunku')
return None
return WfirmaPlBot._parse_results(request.text)
# Parse data returned on request
@staticmethod
def _parse_results(request_body):
# extract wanted data
soup = BeautifulSoup(request_body.replace('\n', ''), 'xml')
interesting_columns = soup.findAll('td')[1:15:2]
# convert to floats
interesting_columns = list(map(lambda x: float(x.get_text().replace(' ', '').replace(',', '.')), interesting_columns))
column_names = [
'net', 'gross', 'all_cost', 'insurance_base', 'cost', 'tax_base', 'tax',
]
result = {}
for i in range(0, 7):
result[column_names[i]] = interesting_columns[i]
return result
# @todo nie można ustalić kosztów uzyskania
class KalkulatoryNfBot:
_url = 'http://kalkulatory.nf.pl/kalkulator/wynagrodzenie/zlecenie'
# Send needed data
@staticmethod
def parse(net, copyright):
return None
from tools import Config
form_data = Config.get('kalkulatory.nf.pl')
form_data = {**form_data, **{
'stawka': 'net',
'kwota': net,
'_method': 'POST',
}}
with session() as c:
# Fix data format
data = {}
for k, v in form_data.items():
data['data[Calculator][%s]' % k] = v
# Try to make a request
try:
request = c.post(KalkulatoryNfBot._url, data=data, timeout=3)
except:
print('Przekroczono maksymalny czas oczekiwania na odpowiedź serwera')
return None
# There was some error (most likely server-side), so use offline fallback
if request.status_code != codes.ok:
print('Wystąpił błąd podczas pobierania danych do rachunku')
return None
return KalkulatoryNfBot._parse_results(request.text)
# Parse data returned on request
@staticmethod
def _parse_results(request_body):
# extract wanted data
soup = BeautifulSoup(request_body)
table = soup.select('div.calc-body.clr')[0].find_next_sibling().findAll('td')[4:]
del table[3:7] # remove unneded
table = list(map(lambda x: float(x.get_text().replace(' zł', '').replace(' ', '').replace(',', '.')), table))
column_names = [
'cost', 'tax_base', 'tax', 'gross', 'net'
]
result = {}
for i in range(0, 5):
result[column_names[i]] = table[i]
return result
|
tomekby/miscellaneous
|
jira-invoices/calculator.py
|
Python
|
mit
| 6,443
|
#!/usr/bin/env python3
"""
Automatically test a given treewidth solver:
./autotest-tw-solver.py path/to/my/solver
The test is run on some corner cases, and on graphs were past PACE submissions
exhibited bugs.
Optional argument:
--full run test on all graphs with min-degree 3 and at most 8 vertices
Requires python3-networkx
Copyright 2016, Holger Dell
Licensed under GPLv3.
"""
import argparse
import glob
import os
import subprocess
import tempfile
import networkx
def read_tw_from_td(ifstream):
"""Return the reported treewidth from a .td file"""
for line in ifstream:
if line[0] == "s":
treewidth = int(line.split(" ")[3]) - 1
return treewidth
def test_case_generator(full=False):
"""
Return a generator for all test cases.
Each test case is a tuple (name, grfilestream, treewidth)
where
- name is a string indicating the name of the test case
- grfilestream is a stream from which the grfile can be read
- treewidth is the known treewidth of the graph (or None if we don't care)
"""
# This covers some corner cases (comments, empty graphs, etc)
for grfile in glob.glob("test/valid/*.gr"):
yield grfile, open(grfile, "r"), None
# Test cases where some tw-solvers were buggy in the past
for grfile in glob.glob("test/tw-solver-bugs/*.gr"):
treewidth = None
with open(grfile[:-3] + ".td") as td_stream:
treewidth = read_tw_from_td(td_stream)
yield grfile, open(grfile, "r"), treewidth
# More test cases where some tw-solvers were buggy in the past
tests = ["test/tw-solver-bugs.graph6"]
if full:
tests.append("test/n_upto_8.graph6")
for fname in tests:
with open(fname) as tests:
for line in tests:
line = line.strip().split(" ")
graph6 = line[0]
treewidth = int(line[1])
G = networkx.from_graph6_bytes(bytes(graph6, "utf-8"))
n = G.order()
m = G.size()
with tempfile.TemporaryFile("w+") as tmp:
tmp.write("p tw {:d} {:d}\n".format(n, m))
for (u, v) in G.edges(data=False):
tmp.write("{:d} {:d}\n".format(u + 1, v + 1))
tmp.flush()
tmp.seek(0)
yield graph6 + " from " + fname, tmp, treewidth
tw_executable = ""
FNULL = open(os.devnull, "w")
def td_validate(grstream, tdstream):
with tempfile.NamedTemporaryFile("w+") as tmp_td:
for line in tdstream:
tmp_td.write(line)
tmp_td.flush()
tmp_td.seek(0)
with tempfile.NamedTemporaryFile("w+") as tmp_gr:
for line in grstream:
tmp_gr.write(line)
tmp_gr.flush()
tmp_gr.seek(0)
p = subprocess.Popen(["./td-validate", tmp_gr.name, tmp_td.name])
p.wait()
return p.returncode == 0
def run_one_testcase(arg):
"""given the name of a testcase, the input stream for a .gr file, and the
correct treewidth, this function runs the test"""
global tw_executable
name, ifstream, treewidth = arg
with tempfile.TemporaryFile("w+") as tmp_td:
p = subprocess.Popen(
[tw_executable], stdin=ifstream, stdout=tmp_td, stderr=FNULL
)
try:
p.wait(timeout=5)
except subprocess.TimeoutExpired:
p.terminate()
try:
p.wait(timeout=5)
except subprocess.TimeoutExpired:
p.kill()
ifstream.seek(0)
tmp_td.flush()
tmp_td.seek(0)
print(name)
valid = td_validate(ifstream, tmp_td)
ifstream.close()
tmp_td.seek(0)
computed_tw = read_tw_from_td(tmp_td)
if treewidth != None and computed_tw != None:
if treewidth > computed_tw:
print(
"!! your program said tw={:d} but we thought it was {:d} -- please send your .td file to the developer of td-validate".format(
computed_tw, treewidth
)
)
elif treewidth < computed_tw:
print(
"non-optimal (your_tw={:d}, optimal_tw={:d})".format(
computed_tw, treewidth
)
)
nonoptimal = (
treewidth != None and computed_tw != None and treewidth < computed_tw
)
print()
return valid, nonoptimal
def main():
parser = argparse.ArgumentParser(
description="Automatically test a given treewidth solver"
)
parser.add_argument(
"twsolver", help="path to the treewidth solver you want to test"
)
parser.add_argument(
"--full",
help="run test on all 2753 graphs with min-degree 3 and at most 8 vertices (this could take a while)",
action="store_true",
)
args = parser.parse_args()
global tw_executable
tw_executable = args.twsolver
f = "./td-validate"
if not os.path.isfile(f):
print("File {:s} not found. Run 'make' first!\n".format(f))
return
print("Automatically testing {:s}...\n".format(tw_executable))
results = list(map(run_one_testcase, test_case_generator(args.full)))
total = len(results)
total_valid = 0
total_nonoptimal = 0
for valid, nonoptimal in results:
if valid:
total_valid += 1
if nonoptimal:
total_nonoptimal += 1
print()
if total == total_valid:
print("Produced a valid .td on all {:d} instances.".format(total))
else:
print("{:d} out of {:d} tests produced a valid .td".format(total_valid, total))
if total_nonoptimal == 0:
print("All tree decompositions were optimal")
else:
print("{:d} tree decompositions were not optimal".format(total_nonoptimal))
if __name__ == "__main__":
main()
|
holgerdell/td-validate
|
autotest-tw-solver.py
|
Python
|
mit
| 6,041
|
from common.persistence import from_pickle
NWORDS = from_pickle('../data/en_dict.pkl')
print(len(NWORDS))
print(NWORDS['word'])
print(NWORDS['spell'])
alphabet = 'abcdefghijklmnopqrstuvwxyz'
def edits1(word):
s = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [a + b[1:] for a, b in s if b]
transposes = [a + b[1] + b[0] + b[2:] for a, b in s if len(b) > 1]
replaces = [a + c + b[1:] for a, b in s for c in alphabet if b]
inserts = [a + c + b for a, b in s for c in alphabet]
return set(deletes + transposes + replaces + inserts)
def known_edits2(word):
return set(e2 for e1 in edits1(word) for e2 in edits1(e1) if e2 in NWORDS)
def known(words): return set(w for w in words if w in NWORDS)
def correct(word):
candidates = known([word]) or known(edits1(word)) or known_edits2(word) or [word]
return max(candidates, key=NWORDS.get)
################ Testing code from here on ################
def spelltest(tests, bias=None, verbose=False):
import time
n, bad, unknown, start = 0, 0, 0, time.clock()
if bias:
for target in tests: NWORDS[target] += bias
for target, wrongs in tests.items():
for wrong in wrongs.split():
n += 1
w = correct(wrong)
if w != target:
bad += 1
unknown += (target not in NWORDS)
if verbose:
print 'correct(%r) => %r (%d); expected %r (%d)' % (
wrong, w, NWORDS[w], target, NWORDS[target])
return dict(bad=bad, n=n, bias=bias, pct=int(100. - 100. * bad / n),
unknown=unknown, secs=int(time.clock() - start))
tests1 = {'access': 'acess', 'accessing': 'accesing', 'accommodation':
'accomodation acommodation acomodation', 'account': 'acount', 'address':
'adress adres', 'addressable': 'addresable', 'arranged': 'aranged arrainged',
'arrangeing': 'aranging', 'arrangement': 'arragment', 'articles': 'articals',
'aunt': 'annt anut arnt', 'auxiliary': 'auxillary', 'available': 'avaible',
'awful': 'awfall afful', 'basically': 'basicaly', 'beginning': 'begining',
'benefit': 'benifit', 'benefits': 'benifits', 'between': 'beetween', 'bicycle':
'bicycal bycicle bycycle', 'biscuits':
'biscits biscutes biscuts bisquits buiscits buiscuts', 'built': 'biult',
'cake': 'cak', 'career': 'carrer',
'cemetery': 'cemetary semetary', 'centrally': 'centraly', 'certain': 'cirtain',
'challenges': 'chalenges chalenges', 'chapter': 'chaper chaphter chaptur',
'choice': 'choise', 'choosing': 'chosing', 'clerical': 'clearical',
'committee': 'comittee', 'compare': 'compair', 'completely': 'completly',
'consider': 'concider', 'considerable': 'conciderable', 'contented':
'contenpted contende contended contentid', 'curtains':
'cartains certans courtens cuaritains curtans curtians curtions', 'decide': 'descide', 'decided':
'descided', 'definitely': 'definately difinately', 'definition': 'defenition',
'definitions': 'defenitions', 'description': 'discription', 'desiccate':
'desicate dessicate dessiccate', 'diagrammatically': 'diagrammaticaally',
'different': 'diffrent', 'driven': 'dirven', 'ecstasy': 'exstacy ecstacy',
'embarrass': 'embaras embarass', 'establishing': 'astablishing establising',
'experience': 'experance experiance', 'experiences': 'experances', 'extended':
'extented', 'extremely': 'extreamly', 'fails': 'failes', 'families': 'familes',
'february': 'febuary', 'further': 'futher', 'gallery': 'galery gallary gallerry gallrey',
'hierarchal': 'hierachial', 'hierarchy': 'hierchy', 'inconvenient':
'inconvienient inconvient inconvinient', 'independent': 'independant independant',
'initial': 'intial', 'initials': 'inetials inistals initails initals intials',
'juice': 'guic juce jucie juise juse', 'latest': 'lates latets latiest latist',
'laugh': 'lagh lauf laught lugh', 'level': 'leval',
'levels': 'levals', 'liaison': 'liaision liason', 'lieu': 'liew', 'literature':
'litriture', 'loans': 'lones', 'locally': 'localy', 'magnificent':
'magnificnet magificent magnifcent magnifecent magnifiscant magnifisent magnificant',
'management': 'managment', 'meant': 'ment', 'minuscule': 'miniscule',
'minutes': 'muinets', 'monitoring': 'monitering', 'necessary':
'neccesary necesary neccesary necassary necassery neccasary', 'occurrence':
'occurence occurence', 'often': 'ofen offen offten ofton', 'opposite':
'opisite oppasite oppesite oppisit oppisite opposit oppossite oppossitte', 'parallel':
'paralel paralell parrallel parralell parrallell', 'particular': 'particulaur',
'perhaps': 'perhapse', 'personnel': 'personnell', 'planned': 'planed', 'poem':
'poame', 'poems': 'poims pomes', 'poetry': 'poartry poertry poetre poety powetry',
'position': 'possition', 'possible': 'possable', 'pretend':
'pertend protend prtend pritend', 'problem': 'problam proble promblem proplen',
'pronunciation': 'pronounciation', 'purple': 'perple perpul poarple',
'questionnaire': 'questionaire', 'really': 'realy relley relly', 'receipt':
'receit receite reciet recipt', 'receive': 'recieve', 'refreshment':
'reafreshment refreshmant refresment refressmunt', 'remember': 'rember remeber rememmer rermember',
'remind': 'remine remined', 'scarcely': 'scarcly scarecly scarely scarsely',
'scissors': 'scisors sissors', 'separate': 'seperate',
'singular': 'singulaur', 'someone': 'somone', 'sources': 'sorces', 'southern':
'southen', 'special': 'speaical specail specal speical', 'splendid':
'spledid splended splened splended', 'standardizing': 'stanerdizing', 'stomach':
'stomac stomache stomec stumache', 'supersede': 'supercede superceed', 'there': 'ther',
'totally': 'totaly', 'transferred': 'transfred', 'transportability':
'transportibility', 'triangular': 'triangulaur', 'understand': 'undersand undistand',
'unexpected': 'unexpcted unexpeted unexspected', 'unfortunately':
'unfortunatly', 'unique': 'uneque', 'useful': 'usefull', 'valuable': 'valubale valuble',
'variable': 'varable', 'variant': 'vairiant', 'various': 'vairious',
'visited': 'fisited viseted vistid vistied', 'visitors': 'vistors',
'voluntary': 'volantry', 'voting': 'voteing', 'wanted': 'wantid wonted',
'whether': 'wether', 'wrote': 'rote wote'}
tests2 = {'forbidden': 'forbiden', 'decisions': 'deciscions descisions',
'supposedly': 'supposidly', 'embellishing': 'embelishing', 'technique':
'tecnique', 'permanently': 'perminantly', 'confirmation': 'confermation',
'appointment': 'appoitment', 'progression': 'progresion', 'accompanying':
'acompaning', 'applicable': 'aplicable', 'regained': 'regined', 'guidelines':
'guidlines', 'surrounding': 'serounding', 'titles': 'tittles', 'unavailable':
'unavailble', 'advantageous': 'advantageos', 'brief': 'brif', 'appeal':
'apeal', 'consisting': 'consisiting', 'clerk': 'cleark clerck', 'component':
'componant', 'favourable': 'faverable', 'separation': 'seperation', 'search':
'serch', 'receive': 'recieve', 'employees': 'emploies', 'prior': 'piror',
'resulting': 'reulting', 'suggestion': 'sugestion', 'opinion': 'oppinion',
'cancellation': 'cancelation', 'criticism': 'citisum', 'useful': 'usful',
'humour': 'humor', 'anomalies': 'anomolies', 'would': 'whould', 'doubt':
'doupt', 'examination': 'eximination', 'therefore': 'therefoe', 'recommend':
'recomend', 'separated': 'seperated', 'successful': 'sucssuful succesful',
'apparent': 'apparant', 'occurred': 'occureed', 'particular': 'paerticulaur',
'pivoting': 'pivting', 'announcing': 'anouncing', 'challenge': 'chalange',
'arrangements': 'araingements', 'proportions': 'proprtions', 'organized':
'oranised', 'accept': 'acept', 'dependence': 'dependance', 'unequalled':
'unequaled', 'numbers': 'numbuers', 'sense': 'sence', 'conversely':
'conversly', 'provide': 'provid', 'arrangement': 'arrangment',
'responsibilities': 'responsiblities', 'fourth': 'forth', 'ordinary':
'ordenary', 'description': 'desription descvription desacription',
'inconceivable': 'inconcievable', 'data': 'dsata', 'register': 'rgister',
'supervision': 'supervison', 'encompassing': 'encompasing', 'negligible':
'negligable', 'allow': 'alow', 'operations': 'operatins', 'executed':
'executted', 'interpretation': 'interpritation', 'hierarchy': 'heiarky',
'indeed': 'indead', 'years': 'yesars', 'through': 'throut', 'committee':
'committe', 'inquiries': 'equiries', 'before': 'befor', 'continued':
'contuned', 'permanent': 'perminant', 'choose': 'chose', 'virtually':
'vertually', 'correspondence': 'correspondance', 'eventually': 'eventully',
'lonely': 'lonley', 'profession': 'preffeson', 'they': 'thay', 'now': 'noe',
'desperately': 'despratly', 'university': 'unversity', 'adjournment':
'adjurnment', 'possibilities': 'possablities', 'stopped': 'stoped', 'mean':
'meen', 'weighted': 'wagted', 'adequately': 'adequattly', 'shown': 'hown',
'matrix': 'matriiix', 'profit': 'proffit', 'encourage': 'encorage', 'collate':
'colate', 'disaggregate': 'disaggreagte disaggreaget', 'receiving':
'recieving reciving', 'proviso': 'provisoe', 'umbrella': 'umberalla', 'approached':
'aproached', 'pleasant': 'plesent', 'difficulty': 'dificulty', 'appointments':
'apointments', 'base': 'basse', 'conditioning': 'conditining', 'earliest':
'earlyest', 'beginning': 'begining', 'universally': 'universaly',
'unresolved': 'unresloved', 'length': 'lengh', 'exponentially':
'exponentualy', 'utilized': 'utalised', 'set': 'et', 'surveys': 'servays',
'families': 'familys', 'system': 'sysem', 'approximately': 'aproximatly',
'their': 'ther', 'scheme': 'scheem', 'speaking': 'speeking', 'repetitive':
'repetative', 'inefficient': 'ineffiect', 'geneva': 'geniva', 'exactly':
'exsactly', 'immediate': 'imediate', 'appreciation': 'apreciation', 'luckily':
'luckeley', 'eliminated': 'elimiated', 'believe': 'belive', 'appreciated':
'apreciated', 'readjusted': 'reajusted', 'were': 'wer where', 'feeling':
'fealing', 'and': 'anf', 'false': 'faulse', 'seen': 'seeen', 'interrogating':
'interogationg', 'academically': 'academicly', 'relatively': 'relativly relitivly',
'traditionally': 'traditionaly', 'studying': 'studing',
'majority': 'majorty', 'build': 'biuld', 'aggravating': 'agravating',
'transactions': 'trasactions', 'arguing': 'aurguing', 'sheets': 'sheertes',
'successive': 'sucsesive sucessive', 'segment': 'segemnt', 'especially':
'especaily', 'later': 'latter', 'senior': 'sienior', 'dragged': 'draged',
'atmosphere': 'atmospher', 'drastically': 'drasticaly', 'particularly':
'particulary', 'visitor': 'vistor', 'session': 'sesion', 'continually':
'contually', 'availability': 'avaiblity', 'busy': 'buisy', 'parameters':
'perametres', 'surroundings': 'suroundings seroundings', 'employed':
'emploied', 'adequate': 'adiquate', 'handle': 'handel', 'means': 'meens',
'familiar': 'familer', 'between': 'beeteen', 'overall': 'overal', 'timing':
'timeing', 'committees': 'comittees commitees', 'queries': 'quies',
'econometric': 'economtric', 'erroneous': 'errounous', 'decides': 'descides',
'reference': 'refereence refference', 'intelligence': 'inteligence',
'edition': 'ediion ediition', 'are': 'arte', 'apologies': 'appologies',
'thermawear': 'thermawere thermawhere', 'techniques': 'tecniques',
'voluntary': 'volantary', 'subsequent': 'subsequant subsiquent', 'currently':
'curruntly', 'forecast': 'forcast', 'weapons': 'wepons', 'routine': 'rouint',
'neither': 'niether', 'approach': 'aproach', 'available': 'availble',
'recently': 'reciently', 'ability': 'ablity', 'nature': 'natior',
'commercial': 'comersial', 'agencies': 'agences', 'however': 'howeverr',
'suggested': 'sugested', 'career': 'carear', 'many': 'mony', 'annual':
'anual', 'according': 'acording', 'receives': 'recives recieves',
'interesting': 'intresting', 'expense': 'expence', 'relevant':
'relavent relevaant', 'table': 'tasble', 'throughout': 'throuout', 'conference':
'conferance', 'sensible': 'sensable', 'described': 'discribed describd',
'union': 'unioun', 'interest': 'intrest', 'flexible': 'flexable', 'refered':
'reffered', 'controlled': 'controled', 'sufficient': 'suficient',
'dissension': 'desention', 'adaptable': 'adabtable', 'representative':
'representitive', 'irrelevant': 'irrelavent', 'unnecessarily': 'unessasarily',
'applied': 'upplied', 'apologised': 'appologised', 'these': 'thees thess',
'choices': 'choises', 'will': 'wil', 'procedure': 'proceduer', 'shortened':
'shortend', 'manually': 'manualy', 'disappointing': 'dissapoiting',
'excessively': 'exessively', 'comments': 'coments', 'containing': 'containg',
'develop': 'develope', 'credit': 'creadit', 'government': 'goverment',
'acquaintances': 'aquantences', 'orientated': 'orentated', 'widely': 'widly',
'advise': 'advice', 'difficult': 'dificult', 'investigated': 'investegated',
'bonus': 'bonas', 'conceived': 'concieved', 'nationally': 'nationaly',
'compared': 'comppared compased', 'moving': 'moveing', 'necessity':
'nessesity', 'opportunity': 'oppertunity oppotunity opperttunity', 'thoughts':
'thorts', 'equalled': 'equaled', 'variety': 'variatry', 'analysis':
'analiss analsis analisis', 'patterns': 'pattarns', 'qualities': 'quaties', 'easily':
'easyly', 'organization': 'oranisation oragnisation', 'the': 'thw hte thi',
'corporate': 'corparate', 'composed': 'compossed', 'enormously': 'enomosly',
'financially': 'financialy', 'functionally': 'functionaly', 'discipline':
'disiplin', 'announcement': 'anouncement', 'progresses': 'progressess',
'except': 'excxept', 'recommending': 'recomending', 'mathematically':
'mathematicaly', 'source': 'sorce', 'combine': 'comibine', 'input': 'inut',
'careers': 'currers carrers', 'resolved': 'resoved', 'demands': 'diemands',
'unequivocally': 'unequivocaly', 'suffering': 'suufering', 'immediately':
'imidatly imediatly', 'accepted': 'acepted', 'projects': 'projeccts',
'necessary': 'necasery nessasary nessisary neccassary', 'journalism':
'journaism', 'unnecessary': 'unessessay', 'night': 'nite', 'output':
'oputput', 'security': 'seurity', 'essential': 'esential', 'beneficial':
'benificial benficial', 'explaining': 'explaning', 'supplementary':
'suplementary', 'questionnaire': 'questionare', 'employment': 'empolyment',
'proceeding': 'proceding', 'decision': 'descisions descision', 'per': 'pere',
'discretion': 'discresion', 'reaching': 'reching', 'analysed': 'analised',
'expansion': 'expanion', 'although': 'athough', 'subtract': 'subtrcat',
'analysing': 'aalysing', 'comparison': 'comparrison', 'months': 'monthes',
'hierarchal': 'hierachial', 'misleading': 'missleading', 'commit': 'comit',
'auguments': 'aurgument', 'within': 'withing', 'obtaining': 'optaning',
'accounts': 'acounts', 'primarily': 'pimarily', 'operator': 'opertor',
'accumulated': 'acumulated', 'extremely': 'extreemly', 'there': 'thear',
'summarys': 'sumarys', 'analyse': 'analiss', 'understandable':
'understadable', 'safeguard': 'safegaurd', 'consist': 'consisit',
'declarations': 'declaratrions', 'minutes': 'muinutes muiuets', 'associated':
'assosiated', 'accessibility': 'accessability', 'examine': 'examin',
'surveying': 'servaying', 'politics': 'polatics', 'annoying': 'anoying',
'again': 'agiin', 'assessing': 'accesing', 'ideally': 'idealy', 'scrutinized':
'scrutiniesed', 'simular': 'similar', 'personnel': 'personel', 'whereas':
'wheras', 'when': 'whn', 'geographically': 'goegraphicaly', 'gaining':
'ganing', 'requested': 'rquested', 'separate': 'seporate', 'students':
'studens', 'prepared': 'prepaired', 'generated': 'generataed', 'graphically':
'graphicaly', 'suited': 'suted', 'variable': 'varible vaiable', 'building':
'biulding', 'required': 'reequired', 'necessitates': 'nessisitates',
'together': 'togehter', 'profits': 'proffits'}
if __name__ == '__main__':
print spelltest(tests1, verbose=True)
print spelltest(tests2, verbose=True)
|
anderscui/spellchecker
|
simple_checker/checker_tests_google_dict.py
|
Python
|
mit
| 17,844
|
"""
mpstest7.py
A test of manipulating matrix product states with numpy.
2014-08-25
"""
import numpy as np
import matplotlib.pyplot as plt
from cmath import *
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
def main():
test3()
def test3():
""" Test MPS conversion functions by computing fidelity between
generated MPS and orginal, with new and old bond dimensions
chi0 and chi1 varied.
"""
print("*** Started testing MPS ***")
N = 5
d = 2
# Points to plot on 3d graph
(X,Y,Z) = ([],[],[])
for chi0 in xrange(1,8):
for chi1 in xrange(1,8):
F = 0
# Run random test for 20 points and take average fidelity
for i in xrange(20):
mps0 = randomMPS(N,chi0,d) # Make random MPS
state0 = getState(mps0) # Convert to state
mps1 = getMPS(state0,chi1) # Convert back to MPS with new bond dimension
state1 = getState(mps1) # Convert back to state
F += fidelityMPS(mps0,mps1) # Compute fidelity and add to sum
# F += fidelity(state0,state1) # Uncomment this to try with vectors
X.append(chi0)
Y.append(chi1)
Z.append(F/20)
X = np.array(X)
Y = np.array(Y)
Z = np.array(Z)
# Plot the surface
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_trisurf(X, Y, Z, cmap=cm.jet, linewidth=0.2)
ax.set_xlabel('chi0')
ax.set_ylabel('chi1')
ax.set_zlabel('fidelity')
plt.show()
print("*** Finished testing MPS ***")
def fidelityMPS(A,B):
""" Fidelity of two MPS representations
f = <A|B><B|A>/(<A|A><B|B>).
"""
return innerProduct(A,B)*innerProduct(B,A)\
/innerProduct(A,A)/innerProduct(B,B)
def fidelity(a,b):
""" Fidelity of two state vectors
f = <a|b><b|a>/(<a|a><b|b>).
"""
return np.inner(np.conj(a),b)*np.inner(np.conj(b),a)\
/np.inner(np.conj(a),a)/np.inner(np.conj(b),b)
def randomMPS(N,chi,d):
""" Returns a random MPS given parameters N, chi, d."""
A = []
for i in xrange(N):
# Each real part of each value varies between -0.5 and 0.5.
A.append((np.random.rand(chi,d,chi)-.5)+1j*(np.random.rand(chi,d,chi)-.5))
return np.array(A)
def getState(A):
""" State vector of a MPS by contracting MPS."""
N = len(A) # Number of spins
chi = A[0].shape[0] # Bond dimension
d = A[0].shape[1] # d = 2 for qubits
c = A[0]
for i in xrange(1,N):
c = np.tensordot(c,A[i],axes=(-1,0))
c = np.trace(c,axis1=0,axis2=-1)
return np.reshape(c,d**N)
def getMPS(state,chi):
""" MPS of a state."""
d = 2 # Qubits have 2 states each
N = int(np.log2(len(state))) # Number of qubits
c = np.reshape(state,cShape(d,N)) # State amplitudes tensor c.
A = [] # List of N matrices of MPS, each of shape (chi,d,chi)
# Start left end with a vector of size (d,chi)
c = np.reshape(c,(d,d**(N-1))) # Reshape c
(ap,sv,c) = np.linalg.svd(c) # Apply SVD
s = np.zeros((d,chi),dtype=complex) # Construct singular value matrix shape
s[:d,:d] = np.diag(sv[:chi]) # Fill s with singular values
# Trim c or fill rest of c with zeros
newc = np.zeros((chi,d**(N-1)),dtype=complex)
newc[:min(chi,d**(N-1)),:] = c[:chi,:]
c = newc
A.append(np.tensordot(ap,s,axes=(-1,0))) # Contract and append to A
# Sweep through the middle, creating matrix products each with
# shape (chi,d,chi)
for i in xrange(1,N-2):
c = np.reshape(c,(d*chi,d**(N-i-1)))
(ap,sv,c) = np.linalg.svd(c)
s = np.zeros((d*chi,chi),dtype=complex)
s[:min(chi,len(sv)),:min(chi,len(sv))] = np.diag(sv[:chi])
A.append(np.reshape(np.dot(ap,s),(chi,d,chi)))
newc = np.zeros((chi,d**(N-i-1)),dtype=complex)
newc[:min(chi,len(sv)),:] = c[:chi,:]
c = newc
# Finish right end with the remaining vector
c = np.reshape(c,(d*chi,d))
(ap,sv,c) = np.linalg.svd(c)
s = np.zeros((chi,d),dtype=complex)
s[:d,:d] = np.diag(sv[:chi])
A.append(np.reshape(ap[:chi,:],(chi,d,chi)))
c = np.dot(s,c)
A.append(c)
# Fix up ends by filling first row of correctly shaped zeros with
# end vectors such that the trace is preserved.
start = np.zeros((chi,d,chi),dtype=complex)
start[0,:,:] = A[0]
A[0] = start
finish = np.zeros((chi,d,chi),dtype=complex)
finish[:,:,0] = A[-1]
A[-1] = finish
# Return MPS as numpy array with shape (N,chi,d,chi)
return np.array(A)
def innerProduct(A,B):
""" Inner product <A|B> using transfer matrices
where A and B are MPS representations of }A> and }B>.
"""
N = len(A) # Number of qubits
chiA = A.shape[1] # bond dimension of MPS in A
chiB = B.shape[1] # bond dimension of MPS in B
d = A.shape[2] # d = 2 for qubits
# Take adjoint of |A> to get <A|
A = np.conj(A)
# Construct list of transfer matrices by contracting pairs of
# tensors from A and B.
transfer = []
for i in xrange(N):
t = np.tensordot(A[i],B[i],axes=(1,1))
t = np.transpose(t,axes=(0,2,1,3))
t = np.reshape(t,(chiA*chiB,chiA*chiB))
transfer.append(t)
# Contract the transfer matrices.
prod = transfer[0]
for i in xrange(1,len(transfer)):
prod = np.tensordot(prod,transfer[i],axes=(-1,0))
return np.trace(prod)
def randomState(d,N):
state = (np.random.rand(d**N)-.5) + (np.random.rand(d**N)-.5)*1j
state = state/np.linalg.norm(state)
return state
def cShape(d,N):
""" Returns the shape of c tensor representation.
I.e. simply just (d,d,...,d) N times.
"""
return tuple([d for i in xrange(N)])
if __name__ == "__main__":
main()
|
ehua7365/RibbonOperators
|
TEBD/mpstest7.py
|
Python
|
mit
| 5,808
|
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import,
division)
import json
import re
import six
import sys
channel_name_re = re.compile('\A[-a-zA-Z0-9_=@,.;]+\Z')
app_id_re = re.compile('\A[0-9]+\Z')
pusher_url_re = re.compile('\A(http|https)://(.*):(.*)@(.*)/apps/([0-9]+)\Z')
socket_id_re = re.compile('\A\d+\.\d+\Z')
if sys.version_info < (3,):
text = 'a unicode string'
else:
text = 'a string'
def ensure_text(obj, name):
if isinstance(obj, six.text_type):
return obj
if isinstance(obj, six.string_types):
return six.text_type(obj)
raise TypeError("%s should be %s" % (name, text))
def validate_channel(channel):
channel = ensure_text(channel, "channel")
if len(channel) > 200:
raise ValueError("Channel too long: %s" % channel)
if not channel_name_re.match(channel):
raise ValueError("Invalid Channel: %s" % channel)
return channel
def validate_socket_id(socket_id):
socket_id = ensure_text(socket_id, "socket_id")
if not socket_id_re.match(socket_id):
raise ValueError("Invalid socket ID: %s" % socket_id)
return socket_id
|
hkjallbring/pusher-http-python
|
pusher/util.py
|
Python
|
mit
| 1,209
|
import contextlib
import functools
import socket
import ssl
import tempfile
import time
from typing import (
Any,
Callable,
Container,
Dict,
Generic,
Hashable,
Iterable,
Iterator,
List,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
import pytest
from . import basecontrollers, client_mock, patma, runner, tls
from .authentication import Authentication
from .basecontrollers import TestCaseControllerConfig
from .exceptions import ConnectionClosed
from .irc_utils import capabilities, message_parser
from .irc_utils.message_parser import Message
from .irc_utils.sasl import sasl_plain_blob
from .numerics import (
ERR_BADCHANNELKEY,
ERR_BANNEDFROMCHAN,
ERR_INVITEONLYCHAN,
ERR_NEEDREGGEDNICK,
ERR_NOSUCHCHANNEL,
ERR_TOOMANYCHANNELS,
RPL_HELLO,
)
from .specifications import Capabilities, IsupportTokens, Specifications
__tracebackhide__ = True # Hide from pytest tracebacks on test failure.
CHANNEL_JOIN_FAIL_NUMERICS = frozenset(
[
ERR_NOSUCHCHANNEL,
ERR_TOOMANYCHANNELS,
ERR_BADCHANNELKEY,
ERR_INVITEONLYCHAN,
ERR_BANNEDFROMCHAN,
ERR_NEEDREGGEDNICK,
]
)
# typevar for decorators
TCallable = TypeVar("TCallable", bound=Callable)
TClass = TypeVar("TClass", bound=Type)
# typevar for the client name used by tests (usually int or str)
TClientName = TypeVar("TClientName", bound=Union[Hashable, int])
TController = TypeVar("TController", bound=basecontrollers._BaseController)
# general-purpose typevar
T = TypeVar("T")
class ChannelJoinException(Exception):
def __init__(self, code: str, params: List[str]):
super().__init__(f"Failed to join channel ({code}): {params}")
self.code = code
self.params = params
class _IrcTestCase(Generic[TController]):
"""Base class for test cases.
It implements various `assert*` method that look like unittest's,
but is actually based on the `assert` statement so derived classes are
pytest-style rather than unittest-style.
It also calls setUp() and tearDown() like unittest would."""
# Will be set by __main__.py
controllerClass: Type[TController]
show_io: bool
controller: TController
__new__ = object.__new__ # pytest won't collect Generic subclasses otherwise
@staticmethod
def config() -> TestCaseControllerConfig:
"""Some configuration to pass to the controllers.
For example, Oragono only enables its MySQL support if
config()["chathistory"]=True.
"""
return TestCaseControllerConfig()
def setUp(self) -> None:
if self.controllerClass is not None:
self.controller = self.controllerClass(self.config())
if self.show_io:
print("---- new test ----")
def tearDown(self) -> None:
pass
def setup_method(self, method: Callable) -> None:
self.setUp()
def teardown_method(self, method: Callable) -> None:
self.tearDown()
def assertMessageMatch(self, msg: Message, **kwargs: Any) -> None:
"""Helper for partially comparing a message.
Takes the message as first arguments, and comparisons to be made
as keyword arguments.
Uses patma.match_list on the params argument.
"""
error = self.messageDiffers(msg, **kwargs)
if error:
raise AssertionError(error)
def messageEqual(self, msg: Message, **kwargs: Any) -> bool:
"""Boolean negation of `messageDiffers` (returns a boolean,
not an optional string)."""
return not self.messageDiffers(msg, **kwargs)
def messageDiffers(
self,
msg: Message,
params: Optional[List[Union[str, None, patma.Operator]]] = None,
target: Optional[str] = None,
tags: Optional[
Dict[Union[str, patma.Operator], Union[str, patma.Operator, None]]
] = None,
nick: Optional[str] = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
**kwargs: Any,
) -> Optional[str]:
"""Returns an error message if the message doesn't match the given arguments,
or None if it matches."""
for (key, value) in kwargs.items():
if getattr(msg, key) != value:
fail_msg = (
fail_msg or "expected {param} to be {expects}, got {got}: {msg}"
)
return fail_msg.format(
*extra_format,
got=getattr(msg, key),
expects=value,
param=key,
msg=msg,
)
if params and not patma.match_list(list(msg.params), params):
fail_msg = (
fail_msg or "expected params to match {expects}, got {got}: {msg}"
)
return fail_msg.format(
*extra_format, got=msg.params, expects=params, msg=msg
)
if tags and not patma.match_dict(msg.tags, tags):
fail_msg = fail_msg or "expected tags to match {expects}, got {got}: {msg}"
return fail_msg.format(*extra_format, got=msg.tags, expects=tags, msg=msg)
if nick:
got_nick = msg.prefix.split("!")[0] if msg.prefix else None
if nick != got_nick:
fail_msg = (
fail_msg
or "expected nick to be {expects}, got {got} instead: {msg}"
)
return fail_msg.format(
*extra_format, got=got_nick, expects=nick, param=key, msg=msg
)
return None
def assertIn(
self,
member: Any,
container: Union[Iterable[Any], Container[Any]],
msg: Optional[str] = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, item=member, list=container, msg=msg)
assert member in container, msg # type: ignore
def assertNotIn(
self,
member: Any,
container: Union[Iterable[Any], Container[Any]],
msg: Optional[str] = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, item=member, list=container, msg=msg)
assert member not in container, msg # type: ignore
def assertEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got == expects, msg
def assertNotEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got != expects, msg
def assertGreater(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got >= expects, msg # type: ignore
def assertGreaterEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got >= expects, msg # type: ignore
def assertLess(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got < expects, msg # type: ignore
def assertLessEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got <= expects, msg # type: ignore
def assertTrue(
self,
got: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, msg=msg)
assert got, msg
def assertFalse(
self,
got: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, msg=msg)
assert not got, msg
@contextlib.contextmanager
def assertRaises(self, exception: Type[Exception]) -> Iterator[None]:
with pytest.raises(exception):
yield
class BaseClientTestCase(_IrcTestCase[basecontrollers.BaseClientController]):
"""Basic class for client tests. Handles spawning a client and exchanging
messages with it."""
conn: Optional[socket.socket]
nick: Optional[str] = None
user: Optional[List[str]] = None
server: socket.socket
protocol_version = Optional[str]
acked_capabilities = Optional[Set[str]]
__new__ = object.__new__ # pytest won't collect Generic[] subclasses otherwise
def setUp(self) -> None:
super().setUp()
self.conn = None
self._setUpServer()
def tearDown(self) -> None:
if self.conn:
try:
self.conn.sendall(b"QUIT :end of test.")
except BrokenPipeError:
pass # client already disconnected
except OSError:
pass # the conn was already closed by the test, or something
self.controller.kill()
if self.conn:
self.conn_file.close()
self.conn.close()
self.server.close()
def _setUpServer(self) -> None:
"""Creates the server and make it listen."""
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind(("", 0)) # Bind any free port
self.server.listen(1)
# Used to check if the client is alive from time to time
self.server.settimeout(1)
def acceptClient(
self,
tls_cert: Optional[str] = None,
tls_key: Optional[str] = None,
server: Optional[socket.socket] = None,
) -> None:
"""Make the server accept a client connection. Blocking."""
server = server or self.server
assert server
# Wait for the client to connect
while True:
try:
(self.conn, addr) = server.accept()
except socket.timeout:
self.controller.check_is_alive()
else:
break
if tls_cert is None and tls_key is None:
pass
else:
assert (
tls_cert and tls_key
), "tls_cert must be provided if and only if tls_key is."
with tempfile.NamedTemporaryFile(
"at"
) as certfile, tempfile.NamedTemporaryFile("at") as keyfile:
certfile.write(tls_cert)
certfile.seek(0)
keyfile.write(tls_key)
keyfile.seek(0)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
context.load_cert_chain(certfile=certfile.name, keyfile=keyfile.name)
self.conn = context.wrap_socket(self.conn, server_side=True)
self.conn_file = self.conn.makefile(newline="\r\n", encoding="utf8")
def getLine(self) -> str:
line = self.conn_file.readline()
if self.show_io:
print("{:.3f} C: {}".format(time.time(), line.strip()))
return line
def getMessage(
self, *args: Any, filter_pred: Optional[Callable[[Message], bool]] = None
) -> Message:
"""Gets a message and returns it. If a filter predicate is given,
fetches messages until the predicate returns a False on a message,
and returns this message."""
while True:
line = self.getLine(*args)
if not line:
raise ConnectionClosed()
msg = message_parser.parse_message(line)
if not filter_pred or filter_pred(msg):
return msg
def sendLine(self, line: str) -> None:
assert self.conn
self.conn.sendall(line.encode())
if not line.endswith("\r\n"):
self.conn.sendall(b"\r\n")
if self.show_io:
print("{:.3f} S: {}".format(time.time(), line.strip()))
def readCapLs(
self, auth: Optional[Authentication] = None, tls_config: tls.TlsConfig = None
) -> None:
(hostname, port) = self.server.getsockname()
self.controller.run(
hostname=hostname, port=port, auth=auth, tls_config=tls_config
)
self.acceptClient()
m = self.getMessage()
self.assertEqual(m.command, "CAP", "First message is not CAP LS.")
if m.params == ["LS"]:
self.protocol_version = 301
elif m.params == ["LS", "302"]:
self.protocol_version = 302
elif m.params == ["END"]:
self.protocol_version = None
else:
raise AssertionError("Unknown CAP params: {}".format(m.params))
def userNickPredicate(self, msg: Message) -> bool:
"""Predicate to be used with getMessage to handle NICK/USER
transparently."""
if msg.command == "NICK":
self.assertEqual(len(msg.params), 1, msg=msg)
self.nick = msg.params[0]
return False
elif msg.command == "USER":
self.assertEqual(len(msg.params), 4, msg=msg)
self.user = msg.params
return False
else:
return True
def negotiateCapabilities(
self,
caps: List[str],
cap_ls: bool = True,
auth: Optional[Authentication] = None,
) -> Optional[Message]:
"""Performes a complete capability negociation process, without
ending it, so the caller can continue the negociation."""
if cap_ls:
self.readCapLs(auth)
if not self.protocol_version:
# No negotiation.
return None
self.sendLine("CAP * LS :{}".format(" ".join(caps)))
capability_names = frozenset(capabilities.cap_list_to_dict(caps))
self.acked_capabilities = set()
while True:
m = self.getMessage(filter_pred=self.userNickPredicate)
if m.command != "CAP":
return m
self.assertGreater(len(m.params), 0, m)
if m.params[0] == "REQ":
self.assertEqual(len(m.params), 2, m)
requested = frozenset(m.params[1].split())
if not requested.issubset(capability_names):
self.sendLine(
"CAP {} NAK :{}".format(self.nick or "*", m.params[1][0:100])
)
else:
self.sendLine(
"CAP {} ACK :{}".format(self.nick or "*", m.params[1])
)
self.acked_capabilities.update(requested) # type: ignore
else:
return m
class BaseServerTestCase(
_IrcTestCase[basecontrollers.BaseServerController], Generic[TClientName]
):
"""Basic class for server tests. Handles spawning a server and exchanging
messages with it."""
show_io: bool # set by conftest.py
password: Optional[str] = None
ssl = False
valid_metadata_keys: Set[str] = set()
invalid_metadata_keys: Set[str] = set()
server_support: Optional[Dict[str, Optional[str]]]
run_services = False
__new__ = object.__new__ # pytest won't collect Generic[] subclasses otherwise
def setUp(self) -> None:
super().setUp()
self.server_support = None
(self.hostname, self.port) = self.controller.get_hostname_and_port()
self.controller.run(
self.hostname,
self.port,
password=self.password,
valid_metadata_keys=self.valid_metadata_keys,
invalid_metadata_keys=self.invalid_metadata_keys,
ssl=self.ssl,
run_services=self.run_services,
)
self.clients: Dict[TClientName, client_mock.ClientMock] = {}
def tearDown(self) -> None:
self.controller.kill()
for client in list(self.clients):
self.removeClient(client)
def addClient(
self, name: Optional[TClientName] = None, show_io: Optional[bool] = None
) -> TClientName:
"""Connects a client to the server and adds it to the dict.
If 'name' is not given, uses the lowest unused non-negative integer."""
self.controller.wait_for_port()
if self.run_services:
self.controller.wait_for_services()
if not name:
new_name: int = (
max(
[int(name) for name in self.clients if isinstance(name, (int, str))]
+ [0]
)
+ 1
)
name = cast(TClientName, new_name)
show_io = show_io if show_io is not None else self.show_io
self.clients[name] = client_mock.ClientMock(name=name, show_io=show_io)
self.clients[name].connect(self.hostname, self.port)
return name
def removeClient(self, name: TClientName) -> None:
"""Disconnects the client, without QUIT."""
assert name in self.clients
self.clients[name].disconnect()
del self.clients[name]
def getMessages(self, client: TClientName, **kwargs: Any) -> List[Message]:
return self.clients[client].getMessages(**kwargs)
def getMessage(self, client: TClientName, **kwargs: Any) -> Message:
return self.clients[client].getMessage(**kwargs)
def getRegistrationMessage(self, client: TClientName) -> Message:
"""Filter notices, do not send pings."""
while True:
msg = self.getMessage(
client,
synchronize=False,
filter_pred=lambda m: m.command not in ("NOTICE", RPL_HELLO),
)
if msg.command == "PING":
# Hi Unreal
self.sendLine(client, "PONG :" + msg.params[0])
else:
return msg
def sendLine(self, client: TClientName, line: Union[str, bytes]) -> None:
return self.clients[client].sendLine(line)
def getCapLs(
self, client: TClientName, as_list: bool = False
) -> Union[List[str], Dict[str, Optional[str]]]:
"""Waits for a CAP LS block, parses all CAP LS messages, and return
the dict capabilities, with their values.
If as_list is given, returns the raw list (ie. key/value not split)
in case the order matters (but it shouldn't)."""
caps = []
while True:
m = self.getRegistrationMessage(client)
self.assertMessageMatch(m, command="CAP")
self.assertEqual(m.params[1], "LS", fail_msg="Expected CAP * LS, got {got}")
if m.params[2] == "*":
caps.extend(m.params[3].split())
else:
caps.extend(m.params[2].split())
if not as_list:
return capabilities.cap_list_to_dict(caps)
return caps
def assertDisconnected(self, client: TClientName) -> None:
try:
self.getMessages(client)
self.getMessages(client)
except (socket.error, ConnectionClosed):
del self.clients[client]
return
else:
raise AssertionError("Client not disconnected.")
def skipToWelcome(self, client: TClientName) -> List[Message]:
"""Skip to the point where we are registered
<https://tools.ietf.org/html/rfc2812#section-3.1>
"""
result = []
while True:
m = self.getMessage(client, synchronize=False)
result.append(m)
if m.command == "001":
return result
elif m.command == "PING":
# Hi, Unreal
self.sendLine(client, "PONG :" + m.params[0])
def requestCapabilities(
self,
client: TClientName,
capabilities: List[str],
skip_if_cap_nak: bool = False,
) -> None:
self.sendLine(client, "CAP REQ :{}".format(" ".join(capabilities)))
m = self.getRegistrationMessage(client)
try:
self.assertMessageMatch(
m, command="CAP", fail_msg="Expected CAP ACK, got: {msg}"
)
self.assertEqual(
m.params[1], "ACK", m, fail_msg="Expected CAP ACK, got: {msg}"
)
except AssertionError:
if skip_if_cap_nak:
raise runner.CapabilityNotSupported(" or ".join(capabilities))
else:
raise
def connectClient(
self,
nick: str,
name: TClientName = None,
capabilities: Optional[List[str]] = None,
skip_if_cap_nak: bool = False,
show_io: Optional[bool] = None,
account: Optional[str] = None,
password: Optional[str] = None,
ident: str = "username",
) -> List[Message]:
"""Connections a new client, does the cap negotiation
and connection registration, and skips to the end of the MOTD.
Returns the list of all messages received after registration,
just like `skipToWelcome`."""
client = self.addClient(name, show_io=show_io)
if capabilities:
self.sendLine(client, "CAP LS 302")
m = self.getRegistrationMessage(client)
self.requestCapabilities(client, capabilities, skip_if_cap_nak)
if password is not None:
if "sasl" not in (capabilities or ()):
raise ValueError("Used 'password' option without sasl capbilitiy")
self.sendLine(client, "AUTHENTICATE PLAIN")
m = self.getRegistrationMessage(client)
self.assertMessageMatch(m, command="AUTHENTICATE", params=["+"])
self.sendLine(client, sasl_plain_blob(account or nick, password))
m = self.getRegistrationMessage(client)
self.assertIn(m.command, ["900", "903"], str(m))
self.sendLine(client, "NICK {}".format(nick))
self.sendLine(client, "USER %s * * :Realname" % (ident,))
if capabilities:
self.sendLine(client, "CAP END")
welcome = self.skipToWelcome(client)
self.sendLine(client, "PING foo")
# Skip all that happy welcoming stuff
self.server_support = {}
while True:
m = self.getMessage(client)
if m.command == "PONG":
break
elif m.command == "005":
for param in m.params[1:-1]:
if "=" in param:
(key, value) = param.split("=")
self.server_support[key] = value
else:
self.server_support[param] = None
welcome.append(m)
return welcome
def joinClient(self, client: TClientName, channel: str) -> None:
self.sendLine(client, "JOIN {}".format(channel))
received = {m.command for m in self.getMessages(client)}
self.assertIn(
"366",
received,
fail_msg="Join to {} failed, {item} is not in the set of "
"received responses: {list}",
extra_format=(channel,),
)
def joinChannel(self, client: TClientName, channel: str) -> None:
self.sendLine(client, "JOIN {}".format(channel))
# wait until we see them join the channel
joined = False
while not joined:
for msg in self.getMessages(client):
if (
msg.command == "JOIN"
and 0 < len(msg.params)
and msg.params[0].lower() == channel.lower()
):
joined = True
break
elif msg.command in CHANNEL_JOIN_FAIL_NUMERICS:
raise ChannelJoinException(msg.command, msg.params)
_TSelf = TypeVar("_TSelf", bound="OptionalityHelper")
_TReturn = TypeVar("_TReturn")
class OptionalityHelper(Generic[TController]):
controller: TController
def checkSaslSupport(self) -> None:
if self.controller.supported_sasl_mechanisms:
return
raise runner.NotImplementedByController("SASL")
def checkMechanismSupport(self, mechanism: str) -> None:
if mechanism in self.controller.supported_sasl_mechanisms:
return
raise runner.OptionalSaslMechanismNotSupported(mechanism)
@staticmethod
def skipUnlessHasMechanism(
mech: str,
) -> Callable[[Callable[[_TSelf], _TReturn]], Callable[[_TSelf], _TReturn]]:
# Just a function returning a function that takes functions and
# returns functions, nothing to see here.
# If Python didn't have such an awful syntax for callables, it would be:
# str -> ((TSelf -> TReturn) -> (TSelf -> TReturn))
def decorator(f: Callable[[_TSelf], _TReturn]) -> Callable[[_TSelf], _TReturn]:
@functools.wraps(f)
def newf(self: _TSelf) -> _TReturn:
self.checkMechanismSupport(mech)
return f(self)
return newf
return decorator
@staticmethod
def skipUnlessHasSasl(
f: Callable[[_TSelf], _TReturn]
) -> Callable[[_TSelf], _TReturn]:
@functools.wraps(f)
def newf(self: _TSelf) -> _TReturn:
self.checkSaslSupport()
return f(self)
return newf
def mark_services(cls: TClass) -> TClass:
cls.run_services = True
return pytest.mark.services(cls) # type: ignore
def mark_specifications(
*specifications_str: str, deprecated: bool = False, strict: bool = False
) -> Callable[[TCallable], TCallable]:
specifications = frozenset(
Specifications.from_name(s) if isinstance(s, str) else s
for s in specifications_str
)
if None in specifications:
raise ValueError("Invalid set of specifications: {}".format(specifications))
def decorator(f: TCallable) -> TCallable:
for specification in specifications:
f = getattr(pytest.mark, specification.value)(f)
if strict:
f = pytest.mark.strict(f)
if deprecated:
f = pytest.mark.deprecated(f)
return f
return decorator
def mark_capabilities(
*capabilities_str: str, deprecated: bool = False, strict: bool = False
) -> Callable[[TCallable], TCallable]:
capabilities = frozenset(
Capabilities.from_name(c) if isinstance(c, str) else c for c in capabilities_str
)
if None in capabilities:
raise ValueError("Invalid set of capabilities: {}".format(capabilities))
def decorator(f: TCallable) -> TCallable:
for capability in capabilities:
f = getattr(pytest.mark, capability.value)(f)
# Support for any capability implies IRCv3
f = pytest.mark.IRCv3(f)
return f
return decorator
def mark_isupport(
*tokens_str: str, deprecated: bool = False, strict: bool = False
) -> Callable[[TCallable], TCallable]:
tokens = frozenset(
IsupportTokens.from_name(c) if isinstance(c, str) else c for c in tokens_str
)
if None in tokens:
raise ValueError("Invalid set of isupport tokens: {}".format(tokens))
def decorator(f: TCallable) -> TCallable:
for token in tokens:
f = getattr(pytest.mark, token.value)(f)
return f
return decorator
|
ProgVal/irctest
|
irctest/cases.py
|
Python
|
mit
| 28,335
|
PLACES = ["Virginia", "Lexington", "Washington", "New York"]
|
jl4ge/cs3240-labdemo
|
places.py
|
Python
|
mit
| 61
|
from onlineticket.generated.ticket import Ticket
from onlineticket.section import SectionParser
class TicketParser:
def parse(self, filename):
parsed = self._parse_kaitai(filename)
return self._map(parsed)
def _parse_kaitai(self, filename):
return Ticket.from_file(filename)
def _map(self, parsed):
p = parsed
return {
'header': {
'version': p.version,
'issuer': p.issuer,
'signature_key_id': p.key_id,
'signature': p.signature.hex(),
'payload_size': p.payload_size
},
'payload': self._map_payload(parsed)
}
def _map_payload(self, parsed):
section_parser = SectionParser()
return list(map(lambda section: section_parser.parse(section), parsed.payload.section))
|
joushx/Online-Ticket-Code
|
onlineticket/ticketparser.py
|
Python
|
mit
| 860
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
import docker
@python_2_unicode_compatible
class DockerServer(models.Model):
name = models.CharField(max_length=255, unique=True)
version = models.CharField(max_length=255, default='auto')
docker_host = models.CharField(max_length=255)
docker_tls_verify = models.BooleanField(default=True)
docker_cert_path = models.CharField(max_length=255, null=True, blank=True)
def get_env(self):
env = {
'DOCKER_HOST': self.docker_host
}
if self.docker_tls_verify:
env['DOCKER_TLS_VERIFY'] = self.docker_tls_verify
if self.docker_cert_path:
env['DOCKER_CERT_PATH'] = self.docker_cert_path
return env
def get_client(self):
client = docker.from_env(
version=self.version,
environment=self.get_env()
)
return client
def __str__(self):
return 'Docker Server: %s' % self.name
|
sourcelair/castor
|
castor/docker_servers/models.py
|
Python
|
mit
| 1,063
|
from flask import Blueprint
main = Blueprint('main', __name__)
from . import errors, views
from ..models import Permission
@main.app_context_processor
def inject_permissions():
return dict(Permission=Permission)
|
caser789/xuejiao-blog
|
app/main/__init__.py
|
Python
|
mit
| 219
|
from .models import Donor
from django.forms import ModelForm, TextInput
class DonorForm(ModelForm):
class Meta:
model = Donor
fields = [
"email",
"donation_date",
"phone_number",
"address",
"observations"
]
widgets = {
'donation_date': TextInput(attrs={'class': 'form-control'}),
'phone_number': TextInput(attrs={'class': 'form-control'}),
'address': TextInput(attrs={'class': 'form-control'}),
'email': TextInput(attrs={'class': 'form-control'}),
'observations': TextInput(attrs={'class': 'form-control'}),
}
|
amigos-do-gesiel/iespv-administrativo
|
users/forms.py
|
Python
|
mit
| 597
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-16 12:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('courses', '0012_userlesson'),
]
operations = [
migrations.AddField(
model_name='exercise',
name='next_exercise',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='next', to='courses.Exercise'),
),
migrations.AddField(
model_name='exercise',
name='prev_exercise',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='prev', to='courses.Exercise'),
),
]
|
RubenSchmidt/giscademy
|
courses/migrations/0013_auto_20161216_1359.py
|
Python
|
mit
| 845
|
import argparse, json
import boto3
from boto.mturk.connection import MTurkConnection
from boto.mturk.qualification import *
from jinja2 import Environment, FileSystemLoader
"""
A bunch of free functions that we use in all scripts.
"""
def get_jinja_env(config):
"""
Get a jinja2 Environment object that we can use to find templates.
"""
return Environment(loader=FileSystemLoader(config['template_directories']))
def json_file(filename):
with open(filename, 'r') as f:
return json.load(f)
def get_parent_parser():
"""
Get an argparse parser with arguments that are always needed
"""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--prod', action='store_false', dest='sandbox',
default=True,
help="Whether to run on the production AMT site.")
parser.add_argument('--hit_ids_file')
parser.add_argument('--config', default='config.json',
type=json_file)
return parser
def get_mturk_connection_from_args(args):
"""
Utility method to get an MTurkConnection from argparse args.
"""
aws_access_key = args.config.get('aws_access_key')
aws_secret_key = args.config.get('aws_secret_key')
return get_mturk_connection(sandbox=args.sandbox,
aws_access_key=aws_access_key,
aws_secret_key=aws_secret_key)
def get_mturk_connection(sandbox=True, aws_access_key=None,
aws_secret_key=None):
"""
Get a boto mturk connection. This is a thin wrapper over the
MTurkConnection constructor; the only difference is a boolean
flag to indicate sandbox or not.
"""
kwargs = {}
if aws_access_key is not None:
kwargs['aws_access_key_id'] = aws_access_key
if aws_secret_key is not None:
kwargs['aws_secret_access_key'] = aws_secret_key
if sandbox:
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host='mechanicalturk.amazonaws.com'
return MTurkConnection(host=host, **kwargs)
def setup_qualifications(hit_properties):
"""
Replace some of the human-readable keys from the raw HIT properties
JSON data structure with boto-specific objects.
"""
qual = Qualifications()
if 'country' in hit_properties:
qual.add(LocaleRequirement('In', hit_properties['country']))
del hit_properties['country']
if 'hits_approved' in hit_properties:
qual.add(NumberHitsApprovedRequirement('GreaterThan',
hit_properties['hits_approved']))
del hit_properties['hits_approved']
if 'percent_approved' in hit_properties:
qual.add(PercentAssignmentsApprovedRequirement('GreaterThan',
hit_properties['percent_approved']))
del hit_properties['percent_approved']
# qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9))
# 3TDQPWMDS877YXAXCWP6LHT0FJRANT
hit_properties['qualifications'] = qual
|
choltz95/story-understanding-amt
|
simpleamt.py
|
Python
|
mit
| 2,932
|
import json
import requests
import logging
logger = logging.getLogger(__name__)
import warnings
import time
import sys
from counterpartylib.lib import config
from counterpartylib.lib import util
from counterpartylib.lib import exceptions
from counterpartylib.lib import backend
from counterpartylib.lib import database
CONSENSUS_HASH_SEED = 'We can only see a short distance ahead, but we can see plenty there that needs to be done.'
CONSENSUS_HASH_VERSION_MAINNET = 2
CHECKPOINTS_MAINNET = {
config.BLOCK_FIRST_MAINNET: {'ledger_hash': '766ff0a9039521e3628a79fa669477ade241fc4c0ae541c3eae97f34b547b0b7', 'txlist_hash': '766ff0a9039521e3628a79fa669477ade241fc4c0ae541c3eae97f34b547b0b7'},
280000: {'ledger_hash': '265719e2770d5a6994f6fe49839069183cd842ee14f56c2b870e56641e8a8725', 'txlist_hash': 'a59b33b4633649db4f14586af47e258ed9b8884dbb7aa308fb1f49a653ee60f4'},
290000: {'ledger_hash': '4612ed7034474b4ff1727eb0e216d533ebe7ac755fb015e0f9a170c063f3e84c', 'txlist_hash': 'c15423c849fd360d38cbd6c6c3ea37a07fece723da92353f3056facc2676d9e7'},
300000: {'ledger_hash': '9a3dd4949780404d61e5ca1929f94a43f08eb0fa19ccb4b5d6a61cafd7943199', 'txlist_hash': 'efa02dbdcc4158a598e3b476ece5ba9cc8d26f3abc8ac3777ac6dde0f0afc7e6'},
310000: {'ledger_hash': '45e43d5cc77ea01129df01d7f55b0c89b2d4e18cd3d626fd92f30bfb37a85f4d', 'txlist_hash': '83cdcf75833d828ded09979b601fde87e2fdb0f5eb1cc6ab5d2042b7ec85f90e'},
320000: {'ledger_hash': '91c1d33626669e8098bc762b1a9e3f616884e4d1cadda4881062c92b0d3d3e98', 'txlist_hash': '761793042d8e7c80e14a16c15bb9d40e237c468a87c207a59730b616bdfde7d4'},
330000: {'ledger_hash': 'dd56aa97e5ca15841407f383ce1d7814536a594d7cfffcb4cf60bee8b362065a', 'txlist_hash': '3c45b4377a99e020550a198daa45c378c488a72ba199b53deb90b320d55a897b'},
334000: {'ledger_hash': '24c4fa4097106031267439eb9fbe8ce2a18560169c67726652b608908c1ca9bb', 'txlist_hash': '764ca9e8d3b9546d1c4ff441a39594548989f60daefc6f28e046996e76a273bf'},
335000: {'ledger_hash': 'e57c9d606a615e7e09bf99148596dd28e64b25cd8b081e226d535a64c1ed08d1', 'txlist_hash': '437d9507185b5e193627edf4998aad2264755af8d13dd3948ce119b32dd50ce2'},
336000: {'ledger_hash': '1329ff5b80d034b64f6ea3481b7c7176437a8837b2a7cb7b8a265fdd1397572d', 'txlist_hash': '33eb8cacd4c750f8132d81e8e43ca13bd565f1734d7d182346364847414da52f'},
337000: {'ledger_hash': '607e6a93e8d97cefea9bd55384898ee90c8477ded8a46017f2294feedbc83409', 'txlist_hash': '20b535a55abcc902ca70c19dd648cbe5149af8b4a4157b94f41b71fc422d428e'},
338000: {'ledger_hash': 'f043914c71e4b711abb1c1002767b9a4e7d605e249facaaf7a2046b0e9741204', 'txlist_hash': 'fa2c3f7f76345278271ed5ec391d582858e10b1f154d9b44e5a1f4896400ee46'},
339000: {'ledger_hash': '49f7240bc90ebc2f242dd599c7d2c427b9d2ac844992131e6e862b638ae4393a', 'txlist_hash': 'c1e3b497c054dcf67ddd0dc223e8b8a6e09a1a05bacb9fef5c03e48bd01e64e7'},
340000: {'ledger_hash': '255760e2abfb79fdd76b65759f1590f582c1747f3eeccc4b2ae37d23e30e0729', 'txlist_hash': '8502004bb63e699b243ac8af072d704c69b817905e74787c2031af971e8cd87c'},
341000: {'ledger_hash': '1369cba3909e564d2e725879a8b2cd987df075db121d1d421c8ce16b65f4bf04', 'txlist_hash': 'd217d0bed190cb27f58fcb96b255f8006bc4b9ed739e1bb08507201c49c426c8'},
342000: {'ledger_hash': '9e7e9b8620717189ccea697ff2f84fe71bc4ae8d991481ff235164d72a9e6e4f', 'txlist_hash': 'adf75d023760101b2b337f6359dd811b12521c83837eb3f7db3bbfd0b095aa54'},
343000: {'ledger_hash': 'aa47312ebe94b35504bec6c74713e404e5f36854e0836839344d13debe50558c', 'txlist_hash': '6bdbbc96364b3c92cea132fe66a0925f9445a249f7062326bdcc4ad4711f0c01'},
344000: {'ledger_hash': '40187263aa96d1362bf7b19c8ba0fff7f0c0f3eb132a40fc90601b5926c7e6e3', 'txlist_hash': '98da8efe705c4b54275bfd25f816a7e7a4ff1f67647e17d7a0aaa2a3fef8bda0'},
345000: {'ledger_hash': 'e4a1e1be4beea63d9740ca166b75bb4e3ffa2af33e1fe282e5b09c4952a7448c', 'txlist_hash': '777f163eaa5ad79dcb738871d4318a0699defec469d8afe91ab6277ff8d3e8b8'},
350000: {'ledger_hash': '6a67e9f2e9d07e7bb3277cf9c24f84c857ed1b8fff4a37e589cd56ade276dd95', 'txlist_hash': '96bcbdbce74b782a845d4fda699846d2d3744044c2870a413c018642b8c7c3bf'},
355000: {'ledger_hash': 'a84b17992217c7845e133a8597dac84eba1ee8c48bcc7f74bcf512837120f463', 'txlist_hash': '210d96b42644432b9e1a3433a29af9acb3bad212b67a7ae1dbc011a11b04bc24'},
360000: {'ledger_hash': 'ddca07ea43b336b703fb8ebab6c0dc30582eb360d6f0eb0446e1fe58b53dee0a', 'txlist_hash': '31d0ff3e3782cf9464081829c5595b3de5ac477290dc069d98672f3f552767f8'},
365000: {'ledger_hash': '2d55b126cca3eca15c07b5da683988f9e01d7346d2ca430e940fd7c07ce84fd7', 'txlist_hash': '7988a823cc1e3234953cc87d261d3c1fede8493d0a31b103357eb23cc7dc2eda'},
366000: {'ledger_hash': '64ce274df2784f9ca88a8d7071613ec6527e506ec31cd434eca64c6a3345a6b7', 'txlist_hash': '0d4374da6100e279b24f4ba4a2d6afbfc4fb0fc2d312330a515806e8c5f49404'},
370000: {'ledger_hash': 'fabb2a2e91fad3fe7734169d554cca396c1030243044cef42fcf65717cf0fa61', 'txlist_hash': '41d1732868c9ac25951ace5ca9f311a15d5eca9bf8d548e0d988c050bd2aff87'},
375000: {'ledger_hash': 'a7ac4e2948cea0c426c8fc201cf57d9c313027ea7bff2b32a25ed28d3dbaa581', 'txlist_hash': '96118a7aa2ca753488755b7419a0f44a7fbc371bc58dcc7ab083c70fc14ef8b3'},
380000: {'ledger_hash': '70453ba04c1c0198c4771e7964cffa25f9456c2f71456a8b05dfe935d5fcdc88', 'txlist_hash': '8bf2070103cca6f0bde507b7d20b0ba0630da6349beb560fa64c926d08dbcaef'},
385000: {'ledger_hash': '93eb0a6e820bee197e7591edbc5ead7bfa38f32c88aabf4785f080fd6ae96c4c', 'txlist_hash': '1f8f17fd5766382a8c10a2a0e995a7d5a5d1bcd5fc0220d1e2691b2a94dcc78f'},
390000: {'ledger_hash': '7d42b98eecbc910a67a5f4ac8dc7d6d9b6995ebc5bdf53663b414965fe7d2c5e', 'txlist_hash': 'b50efc4a4241bf3ec33a38c3b5f34756a9f305fe5fa9a80f7f9b70d5d7b2a780'},
395000: {'ledger_hash': '89f9ac390b35e69dd75d6c34854ba501dce2f662fc707aee63cad5822c7660f2', 'txlist_hash': '2151dd2f0aa14685f3d041727a689d5d242578072a049123b317724fc4f1100c'},
400000: {'ledger_hash': 'eb681a305125e04b6f044b36045e23ee248ce4eb68433cea2b36d15e7e74d5f1', 'txlist_hash': 'b48e9501e8d6f1f1b4127d868860885d3db76698c2c31a567777257df101cf61'},
405000: {'ledger_hash': '3725055b37a8958ade6ca1c277cf50fee6036b4a92befb8da2f7c32f0b210881', 'txlist_hash': '871b2adfd246e3fe69f0fe9098e3251045ed6e9712c4cf90ea8dfdd1eb330ed6'},
410000: {'ledger_hash': '1fa9a34f233695ebd7ebb08703bf8d99812fa099f297efc5d307d1ebef902ffd', 'txlist_hash': 'ee3bd84c728a37e2bbe061c1539c9ee6d71db18733b1ed53ee8d320481f55030'},
415000: {'ledger_hash': '6772a8a1c784db14c0bf111e415919c9da4e5ca142be0b9e323c82c1b13c74e0', 'txlist_hash': 'cfb81785cd48e9ba0e54fee4d62f49b347489da82139fd5e1555ae0bc11a33d5'},
420000: {'ledger_hash': '42167117e16943f44bb8117aa0a39bed2d863a454cd694d0bc5006a7aab23b06', 'txlist_hash': 'a1139870bef8eb9bbe60856029a4f01fce5432eb7aeacd088ba2e033757b86e3'},
}
CONSENSUS_HASH_VERSION_TESTNET = 6
CHECKPOINTS_TESTNET = {
config.BLOCK_FIRST_TESTNET: {'ledger_hash': '3e2cd73017159fdc874453f227e9d0dc4dabba6d10e03458f3399f1d340c4ad1', 'txlist_hash': '3e2cd73017159fdc874453f227e9d0dc4dabba6d10e03458f3399f1d340c4ad1'},
313000: {'ledger_hash': 'f9aa095bef80a768c68912e387268c14a2d16ced915a71d9f0c58fbf8d9554ef', 'txlist_hash': '4e0a83016b0e51df3fb905755f3ff82ae160fa1d0dce63375cf71d594d14d054'},
316000: {'ledger_hash': 'eede57604aab218b5d94c087cc5d1b3b1c3ad92b8d583cc73a249cd31865ab73', 'txlist_hash': '988e569c0452a21efc8f3d2a2553cb7122aa574e3658c67f195262699df76c39'},
319000: {'ledger_hash': '08145b9709f74c3faf7186884b76ace877349571870d8d40a4a185c7bdff31a6', 'txlist_hash': 'a635c17858960679b8a5787648f59f08de5fa6b362c45a66125e6bc55705a6f4'},
322000: {'ledger_hash': 'a029da7e7f25721dd111f3fb3a3791074aaf276a26c4ef5858a0ddfc82360938', 'txlist_hash': '7da60d3080a3a74027be619106e1b9c3f963880344c26a5f93f13bc48b8a76e9'},
325000: {'ledger_hash': '94a5d06bf7c815fac477b075893d8bb5aaabdf2a6e28ca77274bbcafaefa874e', 'txlist_hash': '2db52b2b1dae5132f14e65a97c8c95a5375630916f5129eaa9057efabd18e808'},
329000: {'ledger_hash': 'e4f5f8c330e8d2f515d2cfef92759aef7f92dd397df5869d9ffcfe2749b45c5c', 'txlist_hash': 'c002aada68aae93cd2670d04317caf7de6a7935d8f8b71d4044e359804108d00'},
350000: {'ledger_hash': '03000561ca9871223836a214ec1200fb035b70388fbd108bb9351d891844cd9e', 'txlist_hash': '0716337ad4b354823aab46e46f316161adab4fc083f315d4b2c2e7c7b17e0a67'},
400000: {'ledger_hash': '7a1bbf50517d098afbb3ecdc77d41f8bd35555e0937a71c2a2b1a4d072416f4e', 'txlist_hash': 'e28fbecaac4d82ed1d9a8eb2a4a43ab9b2b32c1ca8ce20ca300cc8848a690966'},
450000: {'ledger_hash': 'ce34985ad5400195edc90a5cd50aaa07c3fb746b663aafefb4ff3bb5990fa837', 'txlist_hash': '1667c7a08471cffcccb55056a8e080d0141486b430b673bee5b7cda54ee2387c'},
500000: {'ledger_hash': '703632461af220490f6f9cb006a4741ed07d54dd8d5f0da81297308934745819', 'txlist_hash': '5f32a0d9c49c7788ce0f154c72e9e227c42f7d1ab8a2ff5031701fd46c15eec5'},
550000: {'ledger_hash': '042f52c7944512e4386dd4a3a5c4666ae1ba6234ef9d7d7c14bcba1b39bd75c7', 'txlist_hash': '362613cc234336cb30f645894f3587db636c8b9cba45a01e74934e349063714c'},
600000: {'ledger_hash': '5dfb8ca53d6820f268378cc4313890c14e86ed12623616bfce3800b288749847', 'txlist_hash': 'f7c4f03135d68d139ad4152d46f0a9ffa44c4f2a562d2b7abe15055a43b91dc2'},
650000: {'ledger_hash': '451a04386454fe0f9f1ff81770f70c762be675b88d1cec4fc1ec3d1fbe74d08c', 'txlist_hash': '7ac0d7fa2ec4553ca83c3687b9e16367f468c315df838aef55ae4fd1135adae9'},
700000: {'ledger_hash': 'aecd0c7e882b1770402678a96c413a3c7eb0141c3832e1a2f0ec13a0fa2e7e15', 'txlist_hash': '14acc1dd95aff8348b47866487f85e689dd40920178cf56cdd455d768dbad5cd'},
750000: {'ledger_hash': '955fb9d714eb6c05e3d64f05ddb2f2ff18423e1e16c05dfc6aea8838ce71b807', 'txlist_hash': '54f06faab3e94d30fda98df2a95f3f0a7ad7527b7301bfc824ba847c1b06bd17'},
800000: {'ledger_hash': '0a29c190507ba4cfbeb028e79060352788317143bb907532b0c202c25bf9b2b8', 'txlist_hash': 'd251bb3f1b7be7d6e912bbcb83e3bd554ce82e2e5fd9e725114107a4e2864602'},
850000: {'ledger_hash': '681aa15524dc39d864787362d1eac60a48b10a1d8a689fed4ef33e6f2780ba4d', 'txlist_hash': '94a37d05251ab4749722976929b888853b1b7662595955f94dd8dc97c50cb032'},
}
class ConsensusError(Exception):
pass
def consensus_hash(db, field, previous_consensus_hash, content):
cursor = db.cursor()
block_index = util.CURRENT_BLOCK_INDEX
# Initialise previous hash on first block.
if block_index <= config.BLOCK_FIRST:
assert not previous_consensus_hash
previous_consensus_hash = util.dhash_string(CONSENSUS_HASH_SEED)
# Get previous hash.
if not previous_consensus_hash:
try:
previous_consensus_hash = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index - 1,)))[0][field]
except IndexError:
previous_consensus_hash = None
if not previous_consensus_hash:
raise ConsensusError('Empty previous {} for block {}. Please launch a `reparse`.'.format(field, block_index))
# Calculate current hash.
consensus_hash_version = CONSENSUS_HASH_VERSION_TESTNET if config.TESTNET else CONSENSUS_HASH_VERSION_MAINNET
calculated_hash = util.dhash_string(previous_consensus_hash + '{}{}'.format(consensus_hash_version, ''.join(content)))
# Verify hash (if already in database) or save hash (if not).
# NOTE: do not enforce this for messages_hashes, those are more informational (for now at least)
found_hash = list(cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,)))[0][field] or None
if found_hash and field != 'messages_hash':
# Check against existing value.
if calculated_hash != found_hash:
raise ConsensusError('Inconsistent {} for block {} (calculated {}, vs {} in database).'.format(
field, block_index, calculated_hash, found_hash))
else:
# Save new hash.
cursor.execute('''UPDATE blocks SET {} = ? WHERE block_index = ?'''.format(field), (calculated_hash, block_index))
# Check against checkpoints.
checkpoints = CHECKPOINTS_TESTNET if config.TESTNET else CHECKPOINTS_MAINNET
if field != 'messages_hash' and block_index in checkpoints and checkpoints[block_index][field] != calculated_hash:
raise ConsensusError('Incorrect {} for block {}.'.format(field, block_index))
return calculated_hash, found_hash
class SanityError(Exception):
pass
def asset_conservation(db):
logger.debug('Checking for conservation of assets.')
supplies = util.supplies(db)
held = util.held(db)
for asset in supplies.keys():
asset_issued = supplies[asset]
asset_held = held[asset] if asset in held and held[asset] != None else 0
if asset_issued != asset_held:
raise SanityError('{} {} issued ≠ {} {} held'.format(util.value_out(db, asset_issued, asset), asset, util.value_out(db, asset_held, asset), asset))
logger.debug('{} has been conserved ({} {} both issued and held)'.format(asset, util.value_out(db, asset_issued, asset), asset))
class VersionError(Exception):
pass
class VersionUpdateRequiredError(VersionError):
pass
def check_change(protocol_change, change_name):
# Check client version.
passed = True
if config.VERSION_MAJOR < protocol_change['minimum_version_major']:
passed = False
elif config.VERSION_MAJOR == protocol_change['minimum_version_major']:
if config.VERSION_MINOR < protocol_change['minimum_version_minor']:
passed = False
elif config.VERSION_MINOR == protocol_change['minimum_version_minor']:
if config.VERSION_REVISION < protocol_change['minimum_version_revision']:
passed = False
if not passed:
explanation = 'Your version of {} is v{}, but, as of block {}, the minimum version is v{}.{}.{}. Reason: ‘{}’. Please upgrade to the latest version and restart the server.'.format(
config.APP_NAME, config.VERSION_STRING, protocol_change['block_index'], protocol_change['minimum_version_major'], protocol_change['minimum_version_minor'],
protocol_change['minimum_version_revision'], change_name)
if util.CURRENT_BLOCK_INDEX >= protocol_change['block_index']:
raise VersionUpdateRequiredError(explanation)
else:
warnings.warn(explanation)
def software_version():
if config.FORCE:
return
logger.debug('Checking version.')
try:
host = 'https://counterpartyxcp.github.io/counterparty-lib/counterpartylib/protocol_changes.json'
response = requests.get(host, headers={'cache-control': 'no-cache'})
versions = json.loads(response.text)
except (requests.exceptions.ConnectionError, ConnectionRefusedError, ValueError) as e:
logger.warning('Unable to check version! ' + str(sys.exc_info()[1]))
return
for change_name in versions:
protocol_change = versions[change_name]
try:
check_change(protocol_change, change_name)
except VersionUpdateRequiredError as e:
logger.error("Version Update Required", exc_info=sys.exc_info())
sys.exit(config.EXITCODE_UPDATE_REQUIRED)
logger.debug('Version check passed.')
class DatabaseVersionError(Exception):
def __init__(self, message, reparse_block_index):
super(DatabaseVersionError, self).__init__(message)
self.reparse_block_index = reparse_block_index
def database_version(db):
if config.FORCE:
return
logger.debug('Checking database version.')
version_major, version_minor = database.version(db)
if version_major != config.VERSION_MAJOR:
# Rollback database if major version has changed.
raise DatabaseVersionError('Client major version number mismatch ({} ≠ {}).'.format(version_major, config.VERSION_MAJOR), config.BLOCK_FIRST)
elif version_minor != config.VERSION_MINOR:
# Reparse all transactions if minor version has changed.
raise DatabaseVersionError('Client minor version number mismatch ({} ≠ {}).'.format(version_minor, config.VERSION_MINOR), None)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
F483/counterparty-lib
|
counterpartylib/lib/check.py
|
Python
|
mit
| 16,154
|
# -*- coding: utf-8 -*-
from django import forms
from giza.models import Giza
class GizaEditForm(forms.ModelForm):
"""Giza edit form"""
class Meta:
"""Meta for GizaEditForm"""
model = Giza
exclude = ('user',)
def __init__(self, *args, **kwargs):
"""Init"""
self.user = kwargs.pop('user', None)
super(GizaEditForm, self).__init__(*args, **kwargs)
|
genonfire/portality
|
giza/forms.py
|
Python
|
mit
| 412
|
#!/usr/bin/env python
"""
Project-wide application configuration.
DO NOT STORE SECRETS, PASSWORDS, ETC. IN THIS FILE.
They will be exposed to users. Use environment variables instead.
See get_secrets() below for a fast way to access them.
"""
import os
"""
NAMES
"""
# Project name to be used in urls
# Use dashes, not underscores!
PROJECT_SLUG = 'lunchbox'
# Project name to be used in file paths
PROJECT_FILENAME = 'lunchbox'
# The name of the repository containing the source
REPOSITORY_NAME = 'lunchbox'
GITHUB_USERNAME = 'nprapps'
REPOSITORY_URL = 'git@github.com:%s/%s.git' % (GITHUB_USERNAME, REPOSITORY_NAME)
REPOSITORY_ALT_URL = None # 'git@bitbucket.org:nprapps/%s.git' % REPOSITORY_NAME'
DEV_CONTACT = 'Nathan Lawrence, KBIA Digital Content Director'
"""
DEPLOYMENT
"""
PRODUCTION_S3_BUCKET = 'lunchbox.kbia.org'
STAGING_S3_BUCKET = 'stage-lunchbox.kbia.org'
DEFAULT_MAX_AGE = 20
FILE_SERVER_USER = 'ubuntu'
FILE_SERVER = 'tools.apps.npr.org'
FILE_SERVER_PATH = '~/www'
# These variables will be set at runtime. See configure_targets() below
S3_BUCKET = None
S3_BASE_URL = None
S3_DEPLOY_URL = None
DEBUG = True
"""
Utilities
"""
def get_secrets():
"""
A method for accessing our secrets.
"""
secrets_dict = {}
for k,v in os.environ.items():
if k.startswith(PROJECT_SLUG):
k = k[len(PROJECT_SLUG) + 1:]
secrets_dict[k] = v
return secrets_dict
def configure_targets(deployment_target):
"""
Configure deployment targets. Abstracted so this can be
overriden for rendering before deployment.
"""
global S3_BUCKET
global S3_BASE_URL
global S3_DEPLOY_URL
global DEBUG
global DEPLOYMENT_TARGET
global ASSETS_MAX_AGE
if deployment_target == 'electron':
S3_BUCKET = None
S3_BASE_URL = None
S3_DEPLOY_URL = None
DEBUG = False
ASSETS_MAX_AGE = 0
if deployment_target == 'fileserver':
S3_BUCKET = None
S3_BASE_URL = None
S3_DEPLOY_URL = None
DEBUG = False
ASSETS_MAX_AGE = 0
if deployment_target == 'production':
S3_BUCKET = PRODUCTION_S3_BUCKET
S3_BASE_URL = 'http://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
S3_DEPLOY_URL = 's3://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
DEBUG = False
ASSETS_MAX_AGE = 86400
elif deployment_target == 'staging':
S3_BUCKET = STAGING_S3_BUCKET
S3_BASE_URL = 'http://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
S3_DEPLOY_URL = 's3://%s/%s' % (S3_BUCKET, PROJECT_SLUG)
DEBUG = True
ASSETS_MAX_AGE = 20
else:
S3_BUCKET = None
S3_BASE_URL = 'http://127.0.0.1:8000'
S3_DEPLOY_URL = None
DEBUG = True
ASSETS_MAX_AGE = 20
DEPLOYMENT_TARGET = deployment_target
"""
Run automated configuration
"""
DEPLOYMENT_TARGET = os.environ.get('DEPLOYMENT_TARGET', None)
configure_targets(DEPLOYMENT_TARGET)
|
NathanLawrence/lunchbox
|
app_config.py
|
Python
|
mit
| 2,936
|
# encoding: utf8
|
CooperLuan/sasoup
|
examples/__init__.py
|
Python
|
mit
| 16
|
import theano
import numpy as np
from sklearn.preprocessing import OneHotEncoder
from sklearn import cross_validation, metrics, datasets
from neupy import algorithms, layers, environment
environment.reproducible()
theano.config.floatX = 'float32'
mnist = datasets.fetch_mldata('MNIST original')
target_scaler = OneHotEncoder()
target = mnist.target.reshape((-1, 1))
target = target_scaler.fit_transform(target).todense()
data = mnist.data / 255.
data = data - data.mean(axis=0)
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
data.astype(np.float32),
target.astype(np.float32),
train_size=(6 / 7.)
)
network = algorithms.Momentum(
[
layers.Relu(784),
layers.Relu(500),
layers.Softmax(300),
layers.ArgmaxOutput(10),
],
error='categorical_crossentropy',
step=0.01,
verbose=True,
shuffle_data=True,
momentum=0.99,
nesterov=True,
)
network.train(x_train, y_train, x_test, y_test, epochs=20)
y_predicted = network.predict(x_test)
y_test = np.asarray(y_test.argmax(axis=1)).reshape(len(y_test))
print(metrics.classification_report(y_test, y_predicted))
score = metrics.accuracy_score(y_test, y_predicted)
print("Validation accuracy: {:.2f}%".format(100 * score))
|
stczhc/neupy
|
examples/gd/mnist_mlp.py
|
Python
|
mit
| 1,266
|
import numpy as np
import tensorflow as tf
from .module import Module
class RBFExpansion(Module):
def __init__(self, low, high, gap, dim=1, name=None):
self.low = low
self.high = high
self.gap = gap
self.dim = dim
xrange = high - low
self.centers = np.linspace(low, high, int(np.ceil(xrange / gap)))
self.centers = self.centers[:, np.newaxis]
self.n_centers = len(self.centers)
self.fan_out = self.dim * self.n_centers
super(RBFExpansion, self).__init__(name)
def _forward(self, d):
cshape = tf.shape(d)
CS = d.get_shape()
centers = self.centers.reshape((1, -1)).astype(np.float32)
d -= tf.constant(centers)
rbf = tf.exp(-(d ** 2) / self.gap)
# rbf = tf.reshape(rbf, (
# cshape[0], cshape[1], cshape[2],
# self.dim * centers.shape[-1]))
rbf.set_shape([CS[0], self.fan_out])
return rbf
|
atomistic-machine-learning/SchNet
|
src/schnet/nn/layers/rbf.py
|
Python
|
mit
| 966
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-15 08:06
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
import users.managers
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('email', models.EmailField(max_length=255, unique=True, verbose_name='email')),
('last_login', models.DateTimeField(auto_now=True)),
('date_joined', models.DateTimeField(auto_now_add=True)),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
managers=[
('objects', users.managers.UserManager()),
],
),
]
|
spiralsyzygy/django-drf-base-app
|
users/migrations/0001_initial.py
|
Python
|
mit
| 2,755
|
import os
DIRNAME = os.path.dirname(__file__)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}
}
INSTALLED_APPS = (
"django.contrib.auth",
"django.contrib.contenttypes",
"pqauth.pqauth_django_server"
)
SECRET_KEY = "chicken butt"
PQAUTH_SERVER_KEY = os.path.join(DIRNAME, "server.key")
ROOT_URLCONF = "pqauth.pqauth_django_server.urls"
TEST_CLIENT_KEY = os.path.join(DIRNAME, "client.key")
TEST_EVIL_KEY = os.path.join(DIRNAME, "evil.key")
|
teddziuba/pqauth
|
python/pqauth/pqauth_django_server/tests/settings.py
|
Python
|
mit
| 495
|
from twitter import *
import simplejson
import serial
import datetime
import time
import threading
QUIT = 0
prevtweet = ""
def centerstring(string,width):
""" Pad a string to a specific width """
return " "*((width-len(string))/2)+string
def padstring(string,width):
"""pad a string to a maximum length"""
if len(string) > width:
result = string[0:width]
else:
result = string + " "*(width-len(string))
return result
def runtime():
rangthebell = 0
while QUIT == 0:
# dates = centerstring(datetime.datetime.now().strftime("%B %d, %Y"),20)
# times = centerstring(datetime.datetime.now().strftime("%I:%M:%S %p"),20)
#
# p.write("\x80")
# p.write("%s\r%s" % (dates,times))
dates = datetime.datetime.now().isoformat(' ')[0:19]
p.write("\x80") # move to 0,0 on the display
p.write(padstring(dates,20)) # make sure to have a nice clean line by filling it all out
if datetime.datetime.now().strftime("%M")[-1:] == "5":
if rangthebell == 0:
p.write("\xD2\xE1\xD1\xE4\xD2\xE1") # do an anoying beep at the minute mark
rangthebell = 1
else:
rangthebell = 0
time.sleep(1)
def checktweet():
turl = 'http://api.twitter.com/1.1/search/tweets.json?q='
CONSUMER_KEY = 'xxx'
CONSUMER_SECRET = 'xxx'
OAUTH_TOKEN = 'XXX'
OAUTH_SECRET = 'XXX'
t = Twitter( auth=OAuth(OAUTH_TOKEN,OAUTH_SECRET,CONSUMER_KEY,CONSUMER_SECRET) )
prevtweet = ""
while QUIT == 0:
twitter_results = t.statuses.home_timeline()
tweet = twitter_results[0]['text'].encode('ascii','ignore') # convert to ascii and ignore unicode conv. errors
if prevtweet != tweet:
# p.write("\xA8") # second line 0 position (line 3 on the display)
p.write("\x94") # first line 0 position (line 2 on the display)
p.write(padstring(tweet,60))
p.write("\xD2\xE7\xD1\xE1\xD2\xE5")
print "-"*150
print "From: %s" % twitter_results[0]['user']['screen_name']
print tweet
print "-"*150
prevtweet = tweet
seconds = 0
while seconds < 180:
time.sleep (1)
seconds += 1
p.write("\xCD")
p.write("%03d" % (180-seconds))
if QUIT:
break
p.write("\xD0\xE7\xE2\xE2")
#time.sleep(60)
if __name__ == "__main__":
# open up the serial port
p = serial.Serial("/dev/ttyAMA0", baudrate=19200, timeout=2)
p.write("starting the clock!")
# clear the screen and get ready to display with backlight on
p.write("\x16") # turns it on with no cursor blink
p.write("\x11") # turn on the back light
p.write("\x0C") # clear the screen. Must wait 5ms before we move on
t1 = threading.Thread(target = runtime, args=())
t2 = threading.Thread(target = checktweet, args=())
t1.start()
t2.start()
try:
while 1:
time.sleep(.1)
except KeyboardInterrupt:
print "Quiting"
QUIT = 1
print "Exiting clock"
t1.join()
print "Exiting tweet"
t2.join()
print "Exits complete"
p.write("\x15") # turns display off, but not backlight
p.write("\x12") # turns backlight off
p.close()
QUIT = 1
pass
print 'exiting'
|
tachijuan/python
|
myscripts/timeandtweet.py
|
Python
|
mit
| 3,145
|
#encoding:utf-8
import pymongo
import yaml
import utils
from utils import SupplyResult
from utils.tech import get_dev_channel, short_sleep
subreddit = 'all'
t_channel = '@r_channels'
SETTING_NAME = 1
def send_post(submission, r2t):
config_filename = 'configs/prod.yml'
with open(config_filename) as config_file:
config = yaml.safe_load(config_file.read())
settings = pymongo.MongoClient(host=config['db']['host'])[config['db']['name']]['settings']
settings.ensure_index([('setting', pymongo.ASCENDING)])
last_update_doc = settings.find_one({
'setting': SETTING_NAME,
})
if last_update_doc is None:
last_update_doc = {
'last_update': 0
}
settings.insert_one({
'setting': SETTING_NAME,
'last_update': 0
})
updates = r2t.telegram_bot.get_updates(offset=last_update_doc['last_update'])
last_update = 0
for update in updates:
# print(update)
update = update.to_dict()
# short_sleep()
if 'qwerrty' in str(update):
print(update)
last_update = update['update_id']
if 'message' not in update:
continue
if 'chat' not in update['message']:
continue
if 'text' not in update['message']:
continue
# print(update)
user_id = update['message']['chat']['id']
if not isinstance(user_id, int) or user_id < 0:
continue
message_id = update['message']['message_id']
r2t.telegram_bot.forward_message(chat_id=get_dev_channel(), from_chat_id=user_id, message_id=message_id)
if int(update['message']['chat']['id']) == int(config['telegram']['papa']):
# print('>>>>>>>>>>>>>>>>>^^^^^^^^^^^^^^')
text = update['message']['text']
lines = text.split('\n')
if 'please' not in lines[0].lower():
continue
new_channel_name = lines[1].split(': ')[-1]
new_subreddit = lines[2].split('/')[-1]
new_tags = lines[3].split(': ')[-1]
utils.channels_stuff.set_new_channel(new_channel_name, subreddit=new_subreddit, tags=new_tags)
settings.find_one_and_update(
{
'setting': SETTING_NAME
},
{
'$set':
{
'last_update': last_update
}
}
)
# It's not a proper supply, so just stop.
return SupplyResult.STOP_THIS_SUPPLY
|
Fillll/reddit2telegram
|
reddit2telegram/channels/tech_receiver/app.py
|
Python
|
mit
| 2,506
|
def diff_int(d=0.01*u.cm,a=0.001*u.cm,wl=400*u.nm):
'''
function that returns the intensity of a double slit interference pattern
'''
theta = arange(-10,10,1e-5)*u.degree
x = pi*a*sin(theta)/wl*u.radian
xnew = x.decompose()
i_single = (sin(xnew)/xnew)**2
y = pi*d*sin(theta)/wl*u.radian
ynew = y.decompose()
i_double = (cos(ynew))**2
I = i_single*i_double
plot(theta,I)
return
|
kfollette/ASTR200-Spring2017
|
Homework/diff_int.py
|
Python
|
mit
| 441
|
# encoding: UTF-8
'''
本文件中包含的是CTA模块的回测引擎,回测引擎的API和CTA引擎一致,
可以使用和实盘相同的代码进行回测。
'''
from datetime import datetime, timedelta
from collections import OrderedDict
from itertools import product
import pymongo
# import MySQLdb
import json
import os
import cPickle
import csv
from ctaBase import *
from ctaSetting import *
from eventEngine import *
from vtConstant import *
from vtGateway import VtOrderData, VtTradeData
from vtFunction import loadMongoSetting
import logging
import copy
import pandas as pd
from ctaBase import *
########################################################################
class BacktestingEngine(object):
"""
CTA回测引擎
函数接口和策略引擎保持一样,
从而实现同一套代码从回测到实盘。
# modified by IncenseLee:
1.增加Mysql数据库的支持;
2.修改装载数据为批量式后加载模式。
3.增加csv 读取bar的回测模式
4.增加csv 读取tick合并价差的回测模式
5.增加EventEngine,并对newBar增加发送OnBar事件,供外部的回测主体显示Bar线。
"""
TICK_MODE = 'tick' # 数据模式,逐Tick回测
BAR_MODE = 'bar' # 数据模式,逐Bar回测
REALTIME_MODE = 'RealTime' # 逐笔交易计算资金,供策略获取资金容量,计算开仓数量
FINAL_MODE = 'Final' # 最后才统计交易,不适合按照百分比等开仓数量计算
# ----------------------------------------------------------------------
def __init__(self, eventEngine=None, initCapital = 100000):
"""Constructor"""
self.eventEngine = eventEngine
# 本地停止单编号计数
self.stopOrderCount = 0
# stopOrderID = STOPORDERPREFIX + str(stopOrderCount)
# 本地停止单字典
# key为stopOrderID,value为stopOrder对象
self.stopOrderDict = {} # 停止单撤销后不会从本字典中删除
self.workingStopOrderDict = {} # 停止单撤销后会从本字典中删除
# 引擎类型为回测
self.engineType = ENGINETYPE_BACKTESTING
# 回测相关
self.strategy = None # 回测策略
self.mode = self.BAR_MODE # 回测模式,默认为K线
self.slippage = 0 # 回测时假设的滑点
self.rate = 0 # 回测时假设的佣金比例(适用于百分比佣金)
self.size = 1 # 合约大小,默认为1
self.dbClient = None # 数据库客户端
self.dbCursor = None # 数据库指针
self.historyData = [] # 历史数据的列表,回测用
self.initData = [] # 初始化用的数据
self.backtestingData = [] # 回测用的数据
self.dbName = '' # 回测数据库名
self.symbol = '' # 回测集合名
self.dataStartDate = None # 回测数据开始日期,datetime对象
self.dataEndDate = None # 回测数据结束日期,datetime对象
self.strategyStartDate = None # 策略启动日期(即前面的数据用于初始化),datetime对象
self.limitOrderDict = OrderedDict() # 限价单字典
self.workingLimitOrderDict = OrderedDict() # 活动限价单字典,用于进行撮合用
self.limitOrderCount = 0 # 限价单编号
# 持仓缓存字典
# key为vtSymbol,value为PositionBuffer对象
self.posBufferDict = {}
self.tradeCount = 0 # 成交编号
self.tradeDict = OrderedDict() # 成交字典
self.logList = [] # 日志记录
# 当前最新数据,用于模拟成交用
self.tick = None
self.bar = None
self.dt = None # 最新的时间
self.gatewayName = u'BackTest'
# csvFile相关
self.barTimeInterval = 60 # csv文件,属于K线类型,K线的周期(秒数),缺省是1分钟
# 回测计算相关
self.calculateMode = self.FINAL_MODE
self.usageCompounding = False # 是否使用简单复利 (只针对FINAL_MODE有效)
self.initCapital = 100000 # 期初资金
self.capital = self.initCapital # 资金 (相当于Balance)
self.maxCapital = self.initCapital # 资金最高净值
# 费用情况
self.avaliable = self.initCapital
self.percent = EMPTY_FLOAT
self.percentLimit = 30 # 投资仓位比例上限
self.maxPnl = 0 # 最高盈利
self.minPnl = 0 # 最大亏损
self.maxVolume = 1 # 最大仓位数
self.winningResult = 0 # 盈利次数
self.losingResult = 0 # 亏损次数
self.totalResult = 0 # 总成交数量
self.totalWinning = 0 # 总盈利
self.totalLosing = 0 # 总亏损
self.totalTurnover = 0 # 总成交金额(合约面值)
self.totalCommission = 0 # 总手续费
self.totalSlippage = 0 # 总滑点
self.timeList = [] # 时间序列
self.pnlList = [] # 每笔盈亏序列
self.capitalList = [] # 盈亏汇总的时间序列
self.drawdownList = [] # 回撤的时间序列
self.drawdownRateList = [] # 最大回撤比例的时间序列
self.exportTradeList = [] # 导出交易记录列表
self.fixCommission = EMPTY_FLOAT # 固定交易费用
def getAccountInfo(self):
"""返回账号的实时权益,可用资金,仓位比例,投资仓位比例上限"""
if self.capital == EMPTY_FLOAT:
self.percent = EMPTY_FLOAT
return self.capital, self.avaliable, self.percent, self.percentLimit
# ----------------------------------------------------------------------
def setStartDate(self, startDate='20100416', initDays=10):
"""设置回测的启动日期"""
self.dataStartDate = datetime.strptime(startDate, '%Y%m%d')
# 初始化天数
initTimeDelta = timedelta(initDays)
self.strategyStartDate = self.dataStartDate + initTimeDelta
# ----------------------------------------------------------------------
def setEndDate(self, endDate=''):
"""设置回测的结束日期"""
if endDate:
self.dataEndDate = datetime.strptime(endDate, '%Y%m%d')
else:
self.dataEndDate = datetime.now()
def setMinDiff(self, minDiff):
"""设置回测品种的最小跳价,用于修正数据"""
self.minDiff = minDiff
# ----------------------------------------------------------------------
def setBacktestingMode(self, mode):
"""设置回测模式"""
self.mode = mode
# ----------------------------------------------------------------------
def setDatabase(self, dbName, symbol):
"""设置历史数据所用的数据库"""
self.dbName = dbName
self.symbol = symbol
# ----------------------------------------------------------------------
def loadHistoryDataFromMongo(self):
"""载入历史数据"""
host, port, log = loadMongoSetting()
self.dbClient = pymongo.MongoClient(host, port)
collection = self.dbClient[self.dbName][self.symbol]
self.output(u'开始载入数据')
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
dataClass = CtaBarData
func = self.newBar
else:
dataClass = CtaTickData
func = self.newTick
# 载入初始化需要用的数据
flt = {'datetime': {'$gte': self.dataStartDate,
'$lt': self.strategyStartDate}}
initCursor = collection.find(flt)
# 将数据从查询指针中读取出,并生成列表
for d in initCursor:
data = dataClass()
data.__dict__ = d
self.initData.append(data)
# 载入回测数据
if not self.dataEndDate:
flt = {'datetime': {'$gte': self.strategyStartDate}} # 数据过滤条件
else:
flt = {'datetime': {'$gte': self.strategyStartDate,
'$lte': self.dataEndDate}}
self.dbCursor = collection.find(flt)
self.output(u'载入完成,数据量:%s' % (initCursor.count() + self.dbCursor.count()))
# ----------------------------------------------------------------------
def connectMysql(self):
"""连接MysqlDB"""
# 载入json文件
fileName = 'mysql_connect.json'
try:
f = file(fileName)
except IOError:
self.writeCtaLog(u'回测引擎读取Mysql_connect.json失败')
return
# 解析json文件
setting = json.load(f)
try:
mysql_host = str(setting['host'])
mysql_port = int(setting['port'])
mysql_user = str(setting['user'])
mysql_passwd = str(setting['passwd'])
mysql_db = str(setting['db'])
except IOError:
self.writeCtaLog(u'回测引擎读取Mysql_connect.json,连接配置缺少字段,请检查')
return
try:
self.__mysqlConnection = MySQLdb.connect(host=mysql_host, user=mysql_user,
passwd=mysql_passwd, db=mysql_db, port=mysql_port)
self.__mysqlConnected = True
self.writeCtaLog(u'回测引擎连接MysqlDB成功')
except Exception:
self.writeCtaLog(u'回测引擎连接MysqlDB失败')
# ----------------------------------------------------------------------
def loadDataHistoryFromMysql(self, symbol, startDate, endDate):
"""载入历史TICK数据
如果加载过多数据会导致加载失败,间隔不要超过半年
"""
if not endDate:
endDate = datetime.today()
# 看本地缓存是否存在
if self.__loadDataHistoryFromLocalCache(symbol, startDate, endDate):
self.writeCtaLog(u'历史TICK数据从Cache载入')
return
# 每次获取日期周期
intervalDays = 10
for i in range(0, (endDate - startDate).days + 1, intervalDays):
d1 = startDate + timedelta(days=i)
if (endDate - d1).days > 10:
d2 = startDate + timedelta(days=i + intervalDays - 1)
else:
d2 = endDate
# 从Mysql 提取数据
self.__qryDataHistoryFromMysql(symbol, d1, d2)
self.writeCtaLog(u'历史TICK数据共载入{0}条'.format(len(self.historyData)))
# 保存本地cache文件
self.__saveDataHistoryToLocalCache(symbol, startDate, endDate)
def __loadDataHistoryFromLocalCache(self, symbol, startDate, endDate):
"""看本地缓存是否存在
added by IncenseLee
"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# cache文件
cacheFile = u'{0}/{1}_{2}_{3}.pickle'. \
format(cacheFolder, symbol, startDate.strftime('%Y-%m-%d'), endDate.strftime('%Y-%m-%d'))
if not os.path.isfile(cacheFile):
return False
else:
try:
# 从cache文件加载
cache = open(cacheFile, mode='r')
self.historyData = cPickle.load(cache)
cache.close()
return True
except Exception as e:
self.writeCtaLog(u'读取文件{0}失败'.format(cacheFile))
return False
def __saveDataHistoryToLocalCache(self, symbol, startDate, endDate):
"""保存本地缓存
added by IncenseLee
"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# 创建cache子目录
if not os.path.isdir(cacheFolder):
os.mkdir(cacheFolder)
# cache 文件名
cacheFile = u'{0}/{1}_{2}_{3}.pickle'. \
format(cacheFolder, symbol, startDate.strftime('%Y-%m-%d'), endDate.strftime('%Y-%m-%d'))
# 重复存在 返回
if os.path.isfile(cacheFile):
return False
else:
# 写入cache文件
cache = open(cacheFile, mode='w')
cPickle.dump(self.historyData, cache)
cache.close()
return True
# ----------------------------------------------------------------------
def __qryDataHistoryFromMysql(self, symbol, startDate, endDate):
"""从Mysql载入历史TICK数据
added by IncenseLee
"""
try:
self.connectMysql()
if self.__mysqlConnected:
# 获取指针
cur = self.__mysqlConnection.cursor(MySQLdb.cursors.DictCursor)
if endDate:
# 开始日期 ~ 结束日期
sqlstring = ' select \'{0}\' as InstrumentID, str_to_date(concat(ndate,\' \', ntime),' \
'\'%Y-%m-%d %H:%i:%s\') as UpdateTime,price as LastPrice,vol as Volume, day_vol as DayVolume,' \
'position_vol as OpenInterest,bid1_price as BidPrice1,bid1_vol as BidVolume1, ' \
'sell1_price as AskPrice1, sell1_vol as AskVolume1 from TB_{0}MI ' \
'where ndate between cast(\'{1}\' as date) and cast(\'{2}\' as date) order by UpdateTime'. \
format(symbol, startDate, endDate)
elif startDate:
# 开始日期 - 当前
sqlstring = ' select \'{0}\' as InstrumentID,str_to_date(concat(ndate,\' \', ntime),' \
'\'%Y-%m-%d %H:%i:%s\') as UpdateTime,price as LastPrice,vol as Volume, day_vol as DayVolume,' \
'position_vol as OpenInterest,bid1_price as BidPrice1,bid1_vol as BidVolume1, ' \
'sell1_price as AskPrice1, sell1_vol as AskVolume1 from TB__{0}MI ' \
'where ndate > cast(\'{1}\' as date) order by UpdateTime'. \
format(symbol, startDate)
else:
# 所有数据
sqlstring = ' select \'{0}\' as InstrumentID,str_to_date(concat(ndate,\' \', ntime),' \
'\'%Y-%m-%d %H:%i:%s\') as UpdateTime,price as LastPrice,vol as Volume, day_vol as DayVolume,' \
'position_vol as OpenInterest,bid1_price as BidPrice1,bid1_vol as BidVolume1, ' \
'sell1_price as AskPrice1, sell1_vol as AskVolume1 from TB__{0}MI order by UpdateTime'. \
format(symbol)
self.writeCtaLog(sqlstring)
# 执行查询
count = cur.execute(sqlstring)
self.writeCtaLog(u'历史TICK数据共{0}条'.format(count))
# 分批次读取
fetch_counts = 0
fetch_size = 1000
while True:
results = cur.fetchmany(fetch_size)
if not results:
break
fetch_counts = fetch_counts + len(results)
if not self.historyData:
self.historyData = results
else:
self.historyData = self.historyData + results
self.writeCtaLog(u'{1}~{2}历史TICK数据载入共{0}条'.format(fetch_counts, startDate, endDate))
else:
self.writeCtaLog(u'MysqlDB未连接,请检查')
except MySQLdb.Error as e:
self.writeCtaLog(u'MysqlDB载入数据失败,请检查.Error {0}'.format(e))
def __dataToTick(self, data):
"""
数据库查询返回的data结构,转换为tick对象
added by IncenseLee """
tick = CtaTickData()
symbol = data['InstrumentID']
tick.symbol = symbol
# 创建TICK数据对象并更新数据
tick.vtSymbol = symbol
# tick.openPrice = data['OpenPrice']
# tick.highPrice = data['HighestPrice']
# tick.lowPrice = data['LowestPrice']
tick.lastPrice = float(data['LastPrice'])
# bug fix:
# ctp日常传送的volume数据,是交易日日内累加值。数据库的volume,是数据商自行计算整理的
# 因此,改为使用DayVolume,与CTP实盘一致
# tick.volume = data['Volume']
tick.volume = data['DayVolume']
tick.openInterest = data['OpenInterest']
# tick.upperLimit = data['UpperLimitPrice']
# tick.lowerLimit = data['LowerLimitPrice']
tick.datetime = data['UpdateTime']
tick.date = tick.datetime.strftime('%Y-%m-%d')
tick.time = tick.datetime.strftime('%H:%M:%S')
# 数据库中并没有tradingDay的数据,回测时,暂时按照date授予。
tick.tradingDay = tick.date
tick.bidPrice1 = float(data['BidPrice1'])
# tick.bidPrice2 = data['BidPrice2']
# tick.bidPrice3 = data['BidPrice3']
# tick.bidPrice4 = data['BidPrice4']
# tick.bidPrice5 = data['BidPrice5']
tick.askPrice1 = float(data['AskPrice1'])
# tick.askPrice2 = data['AskPrice2']
# tick.askPrice3 = data['AskPrice3']
# tick.askPrice4 = data['AskPrice4']
# tick.askPrice5 = data['AskPrice5']
tick.bidVolume1 = data['BidVolume1']
# tick.bidVolume2 = data['BidVolume2']
# tick.bidVolume3 = data['BidVolume3']
# tick.bidVolume4 = data['BidVolume4']
# tick.bidVolume5 = data['BidVolume5']
tick.askVolume1 = data['AskVolume1']
# tick.askVolume2 = data['AskVolume2']
# tick.askVolume3 = data['AskVolume3']
# tick.askVolume4 = data['AskVolume4']
# tick.askVolume5 = data['AskVolume5']
return tick
# ----------------------------------------------------------------------
def getMysqlDeltaDate(self, symbol, startDate, decreaseDays):
"""从mysql库中获取交易日前若干天
added by IncenseLee
"""
try:
if self.__mysqlConnected:
# 获取mysql指针
cur = self.__mysqlConnection.cursor()
sqlstring = 'select distinct ndate from TB_{0}MI where ndate < ' \
'cast(\'{1}\' as date) order by ndate desc limit {2},1'.format(symbol, startDate,
decreaseDays - 1)
# self.writeCtaLog(sqlstring)
count = cur.execute(sqlstring)
if count > 0:
# 提取第一条记录
result = cur.fetchone()
return result[0]
else:
self.writeCtaLog(u'MysqlDB没有查询结果,请检查日期')
else:
self.writeCtaLog(u'MysqlDB未连接,请检查')
except MySQLdb.Error as e:
self.writeCtaLog(u'MysqlDB载入数据失败,请检查.Error {0}: {1}'.format(e.arg[0], e.arg[1]))
# 出错后缺省返回
return startDate - timedelta(days=3)
# ----------------------------------------------------------------------
def runBackTestingWithArbTickFile(self, mainPath, arbSymbol):
"""运行套利回测(使用本地tickcsv数据)
参数:套利代码 SP rb1610&rb1701
added by IncenseLee
原始的tick,分别存放在白天目录1和夜盘目录2中,每天都有各个合约的数据
Z:\ticks\SHFE\201606\RB\0601\
RB1610.txt
RB1701.txt
....
Z:\ticks\SHFE_night\201606\RB\0601
RB1610.txt
RB1701.txt
....
夜盘目录为自然日,不是交易日。
按照回测的开始日期,到结束日期,循环每一天。
每天优先读取日盘数据,再读取夜盘数据。
读取eg1(如RB1610),读取Leg2(如RB701),合并成价差tick,灌输到策略的onTick中。
"""
self.capital = self.initCapital # 更新设置期初资金
if len(arbSymbol) < 1:
self.writeCtaLog(u'套利合约为空')
return
if not (arbSymbol.upper().index("SP") == 0 and arbSymbol.index(" ") > 0 and arbSymbol.index("&") > 0):
self.writeCtaLog(u'套利合约格式不符合')
return
# 获得Leg1,leg2
legs = arbSymbol[arbSymbol.index(" "):]
leg1 = legs[1:legs.index("&")]
leg2 = legs[legs.index("&") + 1:]
self.writeCtaLog(u'Leg1:{0},Leg2:{1}'.format(leg1, leg2))
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
# RB
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
self.writeCtaLog(u'本回测仅支持tick模式')
return
testdays = (self.dataEndDate - self.dataStartDate).days
if testdays < 1:
self.writeCtaLog(u'回测时间不足')
return
for i in range(0, testdays):
testday = self.dataStartDate + timedelta(days=i)
self.output(u'回测日期:{0}'.format(testday))
# 白天数据
self.__loadArbTicks(mainPath, testday, leg1, leg2)
# 夜盘数据
self.__loadArbTicks(mainPath + '_night', testday, leg1, leg2)
def __loadArbTicks(self, mainPath, testday, leg1, leg2):
self.writeCtaLog(u'加载回测日期:{0}\{1}的价差tick'.format(mainPath, testday))
cachefilename = u'{0}_{1}_{2}_{3}_{4}'.format(self.symbol, leg1, leg2, mainPath, testday.strftime('%Y%m%d'))
arbTicks = self.__loadArbTicksFromLocalCache(cachefilename)
dt = None
if len(arbTicks) < 1:
leg1File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(mainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg1)
if not os.path.isfile(leg1File):
self.writeCtaLog(u'{0}文件不存在'.format(leg1File))
return
leg2File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(mainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg2)
if not os.path.isfile(leg2File):
self.writeCtaLog(u'{0}文件不存在'.format(leg2File))
return
# 先读取leg2的数据到目录,以日期时间为key
leg2Ticks = {}
leg2CsvReadFile = file(leg2File, 'rb')
# reader = csv.DictReader((line.replace('\0',' ') for line in leg2CsvReadFile), delimiter=",")
reader = csv.DictReader(leg2CsvReadFile, delimiter=",")
self.writeCtaLog(u'加载{0}'.format(leg2File))
for row in reader:
tick = CtaTickData()
tick.vtSymbol = self.symbol
tick.symbol = self.symbol
tick.date = testday.strftime('%Y%m%d')
tick.tradingDay = tick.date
tick.time = row['Time']
try:
tick.datetime = datetime.strptime(tick.date + ' ' + tick.time, '%Y%m%d %H:%M:%S.%f')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(tick.date + ' ' + tick.time, Exception, ex))
continue
# 修正毫秒
if tick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
tick.datetime = tick.datetime.replace(microsecond=500)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
else:
tick.datetime = tick.datetime.replace(microsecond=0)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
dt = tick.datetime
tick.lastPrice = float(row['LastPrice'])
tick.volume = int(float(row['LVolume']))
tick.bidPrice1 = float(row['BidPrice']) # 叫买价(价格低)
tick.bidVolume1 = int(float(row['BidVolume']))
tick.askPrice1 = float(row['AskPrice']) # 叫卖价(价格高)
tick.askVolume1 = int(float(row['AskVolume']))
# 排除涨停/跌停的数据
if (tick.bidPrice1 == float('1.79769E308') and tick.bidVolume1 == 0) \
or (tick.askPrice1 == float('1.79769E308') and tick.askVolume1 == 0):
continue
dtStr = tick.date + ' ' + tick.time
if dtStr in leg2Ticks:
self.writeCtaError(u'日内数据重复,异常,数据时间为:{0}'.format(dtStr))
else:
leg2Ticks[dtStr] = tick
leg1CsvReadFile = file(leg1File, 'rb')
# reader = csv.DictReader((line.replace('\0',' ') for line in leg1CsvReadFile), delimiter=",")
reader = csv.DictReader(leg1CsvReadFile, delimiter=",")
self.writeCtaLog(u'加载{0}'.format(leg1File))
dt = None
for row in reader:
arbTick = CtaTickData()
arbTick.date = testday.strftime('%Y%m%d')
arbTick.time = row['Time']
try:
arbTick.datetime = datetime.strptime(arbTick.date + ' ' + arbTick.time, '%Y%m%d %H:%M:%S.%f')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(arbTick.date + ' ' + arbTick.time, Exception, ex))
continue
# 修正毫秒
if arbTick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
arbTick.datetime = arbTick.datetime.replace(microsecond=500)
arbTick.time = arbTick.datetime.strftime('%H:%M:%S.%f')
else:
arbTick.datetime = arbTick.datetime.replace(microsecond=0)
arbTick.time = arbTick.datetime.strftime('%H:%M:%S.%f')
dt = arbTick.datetime
dtStr = ' '.join([arbTick.date, arbTick.time])
if dtStr in leg2Ticks:
leg2Tick = leg2Ticks[dtStr]
arbTick.vtSymbol = self.symbol
arbTick.symbol = self.symbol
arbTick.lastPrice = EMPTY_FLOAT
arbTick.volume = EMPTY_INT
leg1AskPrice1 = float(row['AskPrice'])
leg1AskVolume1 = int(float(row['AskVolume']))
leg1BidPrice1 = float(row['BidPrice'])
leg1BidVolume1 = int(float(row['BidVolume']))
# 排除涨停/跌停的数据
if ((leg1AskPrice1 == float('1.79769E308') or leg1AskPrice1 == 0) and leg1AskVolume1 == 0) \
or ((leg1BidPrice1 == float('1.79769E308') or leg1BidPrice1 == 0) and leg1BidVolume1 == 0):
continue
# 叫卖价差=leg1.askPrice1 - leg2.bidPrice1,volume为两者最小
arbTick.askPrice1 = leg1AskPrice1 - leg2Tick.bidPrice1
arbTick.askVolume1 = min(leg1AskVolume1, leg2Tick.bidVolume1)
# 叫买价差=leg1.bidPrice1 - leg2.askPrice1,volume为两者最小
arbTick.bidPrice1 = leg1BidPrice1 - leg2Tick.askPrice1
arbTick.bidVolume1 = min(leg1BidVolume1, leg2Tick.askVolume1)
arbTicks.append(arbTick)
del leg2Ticks[dtStr]
# 保存到历史目录
if len(arbTicks) > 0:
self.__saveArbTicksToLocalCache(cachefilename, arbTicks)
for t in arbTicks:
# 推送到策略中
self.newTick(t)
def __loadArbTicksFromLocalCache(self, filename):
"""从本地缓存中,加载数据"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# cache文件
cacheFile = u'{0}/{1}.pickle'. \
format(cacheFolder, filename)
if not os.path.isfile(cacheFile):
return []
else:
# 从cache文件加载
cache = open(cacheFile, mode='r')
l = cPickle.load(cache)
cache.close()
return l
def __saveArbTicksToLocalCache(self, filename, arbticks):
"""保存价差tick到本地缓存目录"""
# 运行路径下cache子目录
cacheFolder = os.getcwd() + '/cache'
# 创建cache子目录
if not os.path.isdir(cacheFolder):
os.mkdir(cacheFolder)
# cache 文件名
cacheFile = u'{0}/{1}.pickle'. \
format(cacheFolder, filename)
# 重复存在 返回
if os.path.isfile(cacheFile):
return False
else:
# 写入cache文件
cache = open(cacheFile, mode='w')
cPickle.dump(arbticks, cache)
cache.close()
return True
def runBackTestingWithNonStrArbTickFile(self, leg1MainPath, leg2MainPath, leg1Symbol, leg2Symbol):
"""运行套利回测(使用本地tickcsv数据)
参数:
leg1MainPath: leg1合约所在的市场路径
leg2MainPath: leg2合约所在的市场路径
leg1Symbol: leg1合约
Leg2Symbol:leg2合约
added by IncenseLee
原始的tick,分别存放在白天目录1和夜盘目录2中,每天都有各个合约的数据
Z:\ticks\SHFE\201606\RB\0601\
RB1610.txt
RB1701.txt
....
Z:\ticks\SHFE_night\201606\RB\0601
RB1610.txt
RB1701.txt
....
夜盘目录为自然日,不是交易日。
按照回测的开始日期,到结束日期,循环每一天。
每天优先读取日盘数据,再读取夜盘数据。
读取eg1(如RB1610),读取Leg2(如RB701),根据两者tick的时间优先顺序,逐一tick灌输到策略的onTick中。
"""
self.capital = self.initCapital # 更新设置期初资金
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
# RB
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
self.writeCtaLog(u'本回测仅支持tick模式')
return
testdays = (self.dataEndDate - self.dataStartDate).days
if testdays < 1:
self.writeCtaLog(u'回测时间不足')
return
for i in range(0, testdays):
testday = self.dataStartDate + timedelta(days=i)
self.output(u'回测日期:{0}'.format(testday))
# 加载运行白天数据
self.__loadNotStdArbTicks(leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol)
# 加载运行夜盘数据
self.__loadNotStdArbTicks(leg1MainPath + '_night', leg2MainPath + '_night', testday, leg1Symbol, leg2Symbol)
def __loadTicksFromFile(self, filepath, tickDate, vtSymbol):
"""从文件中读取tick"""
# 先读取数据到Dict,以日期时间为key
ticks = OrderedDict()
if not os.path.isfile(filepath):
self.writeCtaLog(u'{0}文件不存在'.format(filepath))
return ticks
dt = None
csvReadFile = file(filepath, 'rb')
reader = csv.DictReader(csvReadFile, delimiter=",")
self.writeCtaLog(u'加载{0}'.format(filepath))
for row in reader:
tick = CtaTickData()
tick.vtSymbol = vtSymbol
tick.symbol = vtSymbol
tick.date = tickDate.strftime('%Y%m%d')
tick.tradingDay = tick.date
tick.time = row['Time']
try:
tick.datetime = datetime.strptime(tick.date + ' ' + tick.time, '%Y%m%d %H:%M:%S.%f')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(tick.date + ' ' + tick.time, Exception, ex))
continue
# 修正毫秒
if tick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
tick.datetime = tick.datetime.replace(microsecond=500)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
else:
tick.datetime = tick.datetime.replace(microsecond=0)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
dt = tick.datetime
tick.lastPrice = float(row['LastPrice'])
tick.volume = int(float(row['LVolume']))
tick.bidPrice1 = float(row['BidPrice']) # 叫买价(价格低)
tick.bidVolume1 = int(float(row['BidVolume']))
tick.askPrice1 = float(row['AskPrice']) # 叫卖价(价格高)
tick.askVolume1 = int(float(row['AskVolume']))
# 排除涨停/跌停的数据
if (tick.bidPrice1 == float('1.79769E308') and tick.bidVolume1 == 0) \
or (tick.askPrice1 == float('1.79769E308') and tick.askVolume1 == 0):
continue
dtStr = tick.date + ' ' + tick.time
if dtStr in ticks:
self.writeCtaError(u'日内数据重复,异常,数据时间为:{0}'.format(dtStr))
else:
ticks[dtStr] = tick
return ticks
def __loadNotStdArbTicks(self, leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol):
self.writeCtaLog(u'加载回测日期:{0}的价差tick'.format(testday))
leg1File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(leg1MainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg1Symbol)
if not os.path.isfile(leg1File):
self.writeCtaLog(u'{0}文件不存在'.format(leg1File))
return
leg2File = u'z:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}.txt' \
.format(leg2MainPath, testday.strftime('%Y%m'), self.symbol, testday.strftime('%m%d'), leg2Symbol)
if not os.path.isfile(leg2File):
self.writeCtaLog(u'{0}文件不存在'.format(leg2File))
return
leg1Ticks = self.__loadTicksFromFile(filepath=leg1File, tickDate=testday, vtSymbol=leg1Symbol)
if len(leg1Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg2Ticks = self.__loadTicksFromFile(filepath=leg2File, tickDate=testday, vtSymbol=leg2Symbol)
if len(leg2Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg1_tick = None
leg2_tick = None
while not (len(leg1Ticks) == 0 or len(leg2Ticks) == 0):
if leg1_tick is None and len(leg1Ticks) > 0:
leg1_tick = leg1Ticks.popitem(last=False)
if leg2_tick is None and len(leg2Ticks) > 0:
leg2_tick = leg2Ticks.popitem(last=False)
if leg1_tick is None and leg2_tick is not None:
self.newTick(leg2_tick[1])
leg2_tick = None
elif leg1_tick is not None and leg2_tick is None:
self.newTick(leg1_tick[1])
leg1_tick = None
elif leg1_tick is not None and leg2_tick is not None:
leg1 = leg1_tick[1]
leg2 = leg2_tick[1]
if leg1.datetime <= leg2.datetime:
self.newTick(leg1)
leg1_tick = None
else:
self.newTick(leg2)
leg2_tick = None
def runBackTestingWithNonStrArbTickFile2(self, leg1MainPath, leg2MainPath, leg1Symbol, leg2Symbol):
"""运行套利回测(使用本地tickcsv数据,数据从taobao标普购买)
参数:
leg1MainPath: leg1合约所在的市场路径
leg2MainPath: leg2合约所在的市场路径
leg1Symbol: leg1合约
Leg2Symbol:leg2合约
added by IncenseLee
原始的tick,存放在相应市场下每天的目录中,目录包含市场各个合约的数据
E:\ticks\SQ\201606\20160601\
RB10.csv
RB01.csv
....
目录为交易日。
按照回测的开始日期,到结束日期,循环每一天。
读取eg1(如RB1610),读取Leg2(如RB701),根据两者tick的时间优先顺序,逐一tick灌输到策略的onTick中。
"""
self.capital = self.initCapital # 更新设置期初资金
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
# RB
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
self.writeCtaLog(u'本回测仅支持tick模式')
return
testdays = (self.dataEndDate - self.dataStartDate).days
if testdays < 1:
self.writeCtaLog(u'回测时间不足')
return
for i in range(0, testdays):
testday = self.dataStartDate + timedelta(days=i)
self.output(u'回测日期:{0}'.format(testday))
# 加载运行每天数据
self.__loadNotStdArbTicks2(leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol)
def __loadTicksFromFile2(self, filepath, tickDate, vtSymbol):
"""从csv文件中UnicodeDictReader读取tick"""
# 先读取数据到Dict,以日期时间为key
ticks = OrderedDict()
if not os.path.isfile(filepath):
self.writeCtaLog(u'{0}文件不存在'.format(filepath))
return ticks
dt = None
csvReadFile = file(filepath, 'rb')
df = pd.read_csv(filepath, encoding='gbk')
df.columns = ['date', 'time', 'lastPrice', 'lastVolume', 'totalInterest', 'position',
'bidPrice1', 'bidVolume1', 'bidPrice2', 'bidVolume2', 'bidPrice3', 'bidVolume3',
'askPrice1', 'askVolume1', 'askPrice2', 'askVolume2', 'askPrice3', 'askVolume3', 'BS']
self.writeCtaLog(u'加载{0}'.format(filepath))
for i in range(0, len(df)):
# 日期, 时间, 成交价, 成交量, 总量, 属性(持仓增减), B1价, B1量, B2价, B2量, B3价, B3量, S1价, S1量, S2价, S2量, S3价, S3量, BS
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
row = df.iloc[i].to_dict()
tick = CtaTickData()
tick.vtSymbol = vtSymbol
tick.symbol = vtSymbol
tick.date = row['date']
tick.tradingDay = tickDate.strftime('%Y%m%d')
tick.time = row['time']
try:
tick.datetime = datetime.strptime(tick.date + ' ' + tick.time, '%Y-%m-%d %H:%M:%S')
except Exception as ex:
self.writeCtaError(u'日期转换错误:{0},{1}:{2}'.format(tick.date + ' ' + tick.time, Exception, ex))
continue
tick.date = tick.datetime.strftime('%Y%m%d')
# 修正毫秒
if tick.datetime.replace(microsecond=0) == dt:
# 与上一个tick的时间(去除毫秒后)相同,修改为500毫秒
tick.datetime = tick.datetime.replace(microsecond=500)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
else:
tick.datetime = tick.datetime.replace(microsecond=0)
tick.time = tick.datetime.strftime('%H:%M:%S.%f')
dt = tick.datetime
tick.lastPrice = float(row['lastPrice'])
tick.volume = int(float(row['lastVolume']))
tick.bidPrice1 = float(row['bidPrice1']) # 叫买价(价格低)
tick.bidVolume1 = int(float(row['bidVolume1']))
tick.askPrice1 = float(row['askPrice1']) # 叫卖价(价格高)
tick.askVolume1 = int(float(row['askVolume1']))
# 排除涨停/跌停的数据
if (tick.bidPrice1 == float('1.79769E308') and tick.bidVolume1 == 0) \
or (tick.askPrice1 == float('1.79769E308') and tick.askVolume1 == 0):
continue
dtStr = tick.date + ' ' + tick.time
if dtStr in ticks:
self.writeCtaError(u'日内数据重复,异常,数据时间为:{0}'.format(dtStr))
else:
ticks[dtStr] = tick
return ticks
def __loadNotStdArbTicks2(self, leg1MainPath, leg2MainPath, testday, leg1Symbol, leg2Symbol):
self.writeCtaLog(u'加载回测日期:{0}的价差tick'.format(testday))
# E:\Ticks\SQ\2014\201401\20140102\ag01_20140102.csv
leg1File = u'e:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}{5}_{3}.csv' \
.format(leg1MainPath, testday.strftime('%Y'), testday.strftime('%Y%m'), testday.strftime('%Y%m%d'),
self.symbol, leg1Symbol[-2:])
if not os.path.isfile(leg1File):
self.writeCtaLog(u'{0}文件不存在'.format(leg1File))
return
leg2File = u'e:\\ticks\\{0}\\{1}\\{2}\\{3}\\{4}{5}_{3}.csv' \
.format(leg2MainPath, testday.strftime('%Y'), testday.strftime('%Y%m'), testday.strftime('%Y%m%d'),
self.symbol, leg2Symbol[-2:])
if not os.path.isfile(leg2File):
self.writeCtaLog(u'{0}文件不存在'.format(leg2File))
return
leg1Ticks = self.__loadTicksFromFile2(filepath=leg1File, tickDate=testday, vtSymbol=leg1Symbol)
if len(leg1Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg2Ticks = self.__loadTicksFromFile2(filepath=leg2File, tickDate=testday, vtSymbol=leg2Symbol)
if len(leg2Ticks) == 0:
self.writeCtaLog(u'{0}读取tick数为空'.format(leg1File))
return
leg1_tick = None
leg2_tick = None
while not (len(leg1Ticks) == 0 or len(leg2Ticks) == 0):
if leg1_tick is None and len(leg1Ticks) > 0:
leg1_tick = leg1Ticks.popitem(last=False)
if leg2_tick is None and len(leg2Ticks) > 0:
leg2_tick = leg2Ticks.popitem(last=False)
if leg1_tick is None and leg2_tick is not None:
self.newTick(leg2_tick[1])
leg2_tick = None
elif leg1_tick is not None and leg2_tick is None:
self.newTick(leg1_tick[1])
leg1_tick = None
elif leg1_tick is not None and leg2_tick is not None:
leg1 = leg1_tick[1]
leg2 = leg2_tick[1]
if leg1.datetime <= leg2.datetime:
self.newTick(leg1)
leg1_tick = None
else:
self.newTick(leg2)
leg2_tick = None
# ----------------------------------------------------------------------
def runBackTestingWithBarFile(self, filename):
"""运行回测(使用本地csv数据)
added by IncenseLee
"""
self.capital = self.initCapital # 更新设置期初资金
if not filename:
self.writeCtaLog(u'请指定回测数据文件')
return
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
import os
if not os.path.isfile(filename):
self.writeCtaLog(u'{0}文件不存在'.format(filename))
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
# 首先根据回测模式,确认要使用的数据类
if not self.mode == self.BAR_MODE:
self.writeCtaLog(u'文件仅支持bar模式,若扩展tick模式,需要修改本方法')
return
self.output(u'开始回测')
# self.strategy.inited = True
self.strategy.onInit()
self.output(u'策略初始化完成')
self.strategy.trading = True
self.strategy.onStart()
self.output(u'策略启动完成')
self.output(u'开始回放数据')
import csv
csvfile = file(filename, 'rb')
reader = csv.DictReader((line.replace('\0', '') for line in csvfile), delimiter=",")
for row in reader:
try:
bar = CtaBarData()
bar.symbol = self.symbol
bar.vtSymbol = self.symbol
# 从tb导出的csv文件
# bar.open = float(row['Open'])
# bar.high = float(row['High'])
# bar.low = float(row['Low'])
# bar.close = float(row['Close'])
# bar.volume = float(row['TotalVolume'])#
# barEndTime = datetime.strptime(row['Date']+' ' + row['Time'], '%Y/%m/%d %H:%M:%S')
# 从ricequant导出的csv文件
bar.open = float(row['open'])
bar.high = float(row['high'])
bar.low = float(row['low'])
bar.close = float(row['close'])
bar.volume = float(row['volume'])
barEndTime = datetime.strptime(row['index'], '%Y-%m-%d %H:%M:%S')
bar.tradingDay = row['trading_date']
# 使用Bar的开始时间作为datetime
bar.datetime = barEndTime - timedelta(seconds=self.barTimeInterval)
bar.date = bar.datetime.strftime('%Y-%m-%d')
bar.time = bar.datetime.strftime('%H:%M:%S')
if not (bar.datetime < self.dataStartDate or bar.datetime >= self.dataEndDate):
self.newBar(bar)
except Exception as ex:
self.writeCtaLog(u'{0}:{1}'.format(Exception, ex))
continue
# ----------------------------------------------------------------------
def runBacktestingWithMysql(self):
"""运行回测(使用Mysql数据)
added by IncenseLee
"""
self.capital = self.initCapital # 更新设置期初资金
if not self.dataStartDate:
self.writeCtaLog(u'回测开始日期未设置。')
return
if not self.dataEndDate:
self.dataEndDate = datetime.today()
if len(self.symbol) < 1:
self.writeCtaLog(u'回测对象未设置。')
return
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
dataClass = CtaBarData
func = self.newBar
else:
dataClass = CtaTickData
func = self.newTick
self.output(u'开始回测')
# self.strategy.inited = True
self.strategy.onInit()
self.output(u'策略初始化完成')
self.strategy.trading = True
self.strategy.onStart()
self.output(u'策略启动完成')
self.output(u'开始回放数据')
# 每次获取日期周期
intervalDays = 10
for i in range(0, (self.dataEndDate - self.dataStartDate).days + 1, intervalDays):
d1 = self.dataStartDate + timedelta(days=i)
if (self.dataEndDate - d1).days > intervalDays:
d2 = self.dataStartDate + timedelta(days=i + intervalDays - 1)
else:
d2 = self.dataEndDate
# 提取历史数据
self.loadDataHistoryFromMysql(self.symbol, d1, d2)
self.output(u'数据日期:{0} => {1}'.format(d1, d2))
# 将逐笔数据推送
for data in self.historyData:
# 记录最新的TICK数据
self.tick = self.__dataToTick(data)
self.dt = self.tick.datetime
# 处理限价单
self.crossLimitOrder()
self.crossStopOrder()
# 推送到策略引擎中
self.strategy.onTick(self.tick)
# 清空历史数据
self.historyData = []
self.output(u'数据回放结束')
# ----------------------------------------------------------------------
def runBacktesting(self):
"""运行回测"""
self.capital = self.initCapital # 更新设置期初资金
# 载入历史数据
# self.loadHistoryData()
self.loadHistoryDataFromMongo()
# 首先根据回测模式,确认要使用的数据类
if self.mode == self.BAR_MODE:
dataClass = CtaBarData
func = self.newBar
else:
dataClass = CtaTickData
func = self.newTick
self.output(u'开始回测')
self.strategy.inited = True
self.strategy.onInit()
self.output(u'策略初始化完成')
self.strategy.trading = True
self.strategy.onStart()
self.output(u'策略启动完成')
self.output(u'开始回放数据')
for d in self.dbCursor:
data = dataClass()
data.__dict__ = d
func(data)
self.output(u'数据回放结束')
def __sendOnBarEvent(self, bar):
"""发送Bar的事件"""
if self.eventEngine is not None:
eventType = EVENT_ON_BAR + '_' + self.symbol
event = Event(type_=eventType)
event.dict_['data'] = bar
self.eventEngine.put(event)
# ----------------------------------------------------------------------
def newBar(self, bar):
"""新的K线"""
self.bar = bar
self.dt = bar.datetime
self.crossLimitOrder() # 先撮合限价单
self.crossStopOrder() # 再撮合停止单
self.strategy.onBar(bar) # 推送K线到策略中
self.__sendOnBarEvent(bar) # 推送K线到事件
# ----------------------------------------------------------------------
def newTick(self, tick):
"""新的Tick"""
self.tick = tick
self.dt = tick.datetime
self.crossLimitOrder()
self.crossStopOrder()
self.strategy.onTick(tick)
# ----------------------------------------------------------------------
def initStrategy(self, strategyClass, setting=None):
"""
初始化策略
setting是策略的参数设置,如果使用类中写好的默认设置则可以不传该参数
"""
self.strategy = strategyClass(self, setting)
self.strategy.name = self.strategy.className
# ----------------------------------------------------------------------
def sendOrder(self, vtSymbol, orderType, price, volume, strategy):
"""发单"""
self.writeCtaLog(u'{0},{1},{2}@{3}'.format(vtSymbol, orderType, price, volume))
self.limitOrderCount += 1
orderID = str(self.limitOrderCount)
order = VtOrderData()
order.vtSymbol = vtSymbol
order.price = price
order.totalVolume = volume
order.status = STATUS_NOTTRADED # 刚提交尚未成交
order.orderID = orderID
order.vtOrderID = orderID
order.orderTime = str(self.dt)
# added by IncenseLee
order.gatewayName = self.gatewayName
# CTA委托类型映射
if orderType == CTAORDER_BUY:
order.direction = DIRECTION_LONG
order.offset = OFFSET_OPEN
elif orderType == CTAORDER_SELL:
order.direction = DIRECTION_SHORT
order.offset = OFFSET_CLOSE
elif orderType == CTAORDER_SHORT:
order.direction = DIRECTION_SHORT
order.offset = OFFSET_OPEN
elif orderType == CTAORDER_COVER:
order.direction = DIRECTION_LONG
order.offset = OFFSET_CLOSE
# modified by IncenseLee
key = u'{0}.{1}'.format(order.gatewayName, orderID)
# 保存到限价单字典中
self.workingLimitOrderDict[key] = order
self.limitOrderDict[key] = order
return key
# ----------------------------------------------------------------------
def cancelOrder(self, vtOrderID):
"""撤单"""
if vtOrderID in self.workingLimitOrderDict:
order = self.workingLimitOrderDict[vtOrderID]
order.status = STATUS_CANCELLED
order.cancelTime = str(self.dt)
del self.workingLimitOrderDict[vtOrderID]
def cancelOrders(self, symbol, offset=EMPTY_STRING):
"""撤销所有单"""
# Symbol参数:指定合约的撤单;
# OFFSET参数:指定Offset的撤单,缺省不填写时,为所有
self.writeCtaLog(u'从所有订单中撤销{0}\{1}'.format(offset, symbol))
for vtOrderID in self.workingLimitOrderDict.keys():
order = self.workingLimitOrderDict[vtOrderID]
if offset == EMPTY_STRING:
offsetCond = True
else:
offsetCond = order.offset == offset
if order.symbol == symbol and offsetCond:
self.writeCtaLog(
u'撤销订单:{0},{1} {2}@{3}'.format(vtOrderID, order.direction, order.price, order.totalVolume))
order.status = STATUS_CANCELLED
order.cancelTime = str(self.dt)
del self.workingLimitOrderDict[vtOrderID]
# ----------------------------------------------------------------------
def sendStopOrder(self, vtSymbol, orderType, price, volume, strategy):
"""发停止单(本地实现)"""
self.stopOrderCount += 1
stopOrderID = STOPORDERPREFIX + str(self.stopOrderCount)
so = StopOrder()
so.vtSymbol = vtSymbol
so.price = price
so.volume = volume
so.strategy = strategy
so.stopOrderID = stopOrderID
so.status = STOPORDER_WAITING
if orderType == CTAORDER_BUY:
so.direction = DIRECTION_LONG
so.offset = OFFSET_OPEN
elif orderType == CTAORDER_SELL:
so.direction = DIRECTION_SHORT
so.offset = OFFSET_CLOSE
elif orderType == CTAORDER_SHORT:
so.direction = DIRECTION_SHORT
so.offset = OFFSET_OPEN
elif orderType == CTAORDER_COVER:
so.direction = DIRECTION_LONG
so.offset = OFFSET_CLOSE
# 保存stopOrder对象到字典中
self.stopOrderDict[stopOrderID] = so
self.workingStopOrderDict[stopOrderID] = so
return stopOrderID
# ----------------------------------------------------------------------
def cancelStopOrder(self, stopOrderID):
"""撤销停止单"""
# 检查停止单是否存在
if stopOrderID in self.workingStopOrderDict:
so = self.workingStopOrderDict[stopOrderID]
so.status = STOPORDER_CANCELLED
del self.workingStopOrderDict[stopOrderID]
# ----------------------------------------------------------------------
def crossLimitOrder(self):
"""基于最新数据撮合限价单"""
# 先确定会撮合成交的价格
if self.mode == self.BAR_MODE:
buyCrossPrice = self.bar.low # 若买入方向限价单价格高于该价格,则会成交
sellCrossPrice = self.bar.high # 若卖出方向限价单价格低于该价格,则会成交
buyBestCrossPrice = self.bar.open # 在当前时间点前发出的买入委托可能的最优成交价
sellBestCrossPrice = self.bar.open # 在当前时间点前发出的卖出委托可能的最优成交价
vtSymbol = self.bar.vtSymbol
else:
buyCrossPrice = self.tick.askPrice1
sellCrossPrice = self.tick.bidPrice1
buyBestCrossPrice = self.tick.askPrice1
sellBestCrossPrice = self.tick.bidPrice1
vtSymbol = self.tick.vtSymbol
# 遍历限价单字典中的所有限价单
for orderID, order in self.workingLimitOrderDict.items():
# 判断是否会成交
buyCross = order.direction == DIRECTION_LONG and order.price >= buyCrossPrice and vtSymbol == order.vtSymbol
sellCross = order.direction == DIRECTION_SHORT and order.price <= sellCrossPrice and vtSymbol == order.vtSymbol
# 如果发生了成交
if buyCross or sellCross:
# 推送成交数据
self.tradeCount += 1 # 成交编号自增1
tradeID = str(self.tradeCount)
trade = VtTradeData()
trade.vtSymbol = order.vtSymbol
trade.tradeID = tradeID
trade.vtTradeID = tradeID
trade.orderID = order.orderID
trade.vtOrderID = order.orderID
trade.direction = order.direction
trade.offset = order.offset
# 以买入为例:
# 1. 假设当根K线的OHLC分别为:100, 125, 90, 110
# 2. 假设在上一根K线结束(也是当前K线开始)的时刻,策略发出的委托为限价105
# 3. 则在实际中的成交价会是100而不是105,因为委托发出时市场的最优价格是100
if buyCross:
trade.price = min(order.price, buyBestCrossPrice)
self.strategy.pos += order.totalVolume
else:
trade.price = max(order.price, sellBestCrossPrice)
self.strategy.pos -= order.totalVolume
trade.volume = order.totalVolume
trade.tradeTime = str(self.dt)
trade.dt = self.dt
self.strategy.onTrade(trade)
self.tradeDict[tradeID] = trade
self.writeCtaLog(u'TradeId:{0}'.format(tradeID))
# 推送委托数据
order.tradedVolume = order.totalVolume
order.status = STATUS_ALLTRADED
self.strategy.onOrder(order)
# 从字典中删除该限价单
try:
del self.workingLimitOrderDict[orderID]
except Exception as ex:
self.writeCtaError(u'{0}:{1}'.format(Exception, ex))
# 实时计算模式
if self.calculateMode == self.REALTIME_MODE:
self.realtimeCalculate()
# ----------------------------------------------------------------------
def crossStopOrder(self):
"""基于最新数据撮合停止单"""
# 先确定会撮合成交的价格,这里和限价单规则相反
if self.mode == self.BAR_MODE:
buyCrossPrice = self.bar.high # 若买入方向停止单价格低于该价格,则会成交
sellCrossPrice = self.bar.low # 若卖出方向限价单价格高于该价格,则会成交
bestCrossPrice = self.bar.open # 最优成交价,买入停止单不能低于,卖出停止单不能高于
vtSymbol = self.bar.vtSymbol
else:
buyCrossPrice = self.tick.lastPrice
sellCrossPrice = self.tick.lastPrice
bestCrossPrice = self.tick.lastPrice
vtSymbol = self.tick.vtSymbol
# 遍历停止单字典中的所有停止单
for stopOrderID, so in self.workingStopOrderDict.items():
# 判断是否会成交
buyCross = so.direction == DIRECTION_LONG and so.price <= buyCrossPrice and vtSymbol == so.vtSymbol
sellCross = so.direction == DIRECTION_SHORT and so.price >= sellCrossPrice and vtSymbol == so.vtSymbol
# 如果发生了成交
if buyCross or sellCross:
# 推送成交数据
self.tradeCount += 1 # 成交编号自增1
tradeID = str(self.tradeCount)
trade = VtTradeData()
trade.vtSymbol = so.vtSymbol
trade.tradeID = tradeID
trade.vtTradeID = tradeID
if buyCross:
self.strategy.pos += so.volume
trade.price = max(bestCrossPrice, so.price)
else:
self.strategy.pos -= so.volume
trade.price = min(bestCrossPrice, so.price)
self.limitOrderCount += 1
orderID = str(self.limitOrderCount)
trade.orderID = orderID
trade.vtOrderID = orderID
trade.direction = so.direction
trade.offset = so.offset
trade.volume = so.volume
trade.tradeTime = str(self.dt)
trade.dt = self.dt
self.strategy.onTrade(trade)
self.tradeDict[tradeID] = trade
# 推送委托数据
so.status = STOPORDER_TRIGGERED
order = VtOrderData()
order.vtSymbol = so.vtSymbol
order.symbol = so.vtSymbol
order.orderID = orderID
order.vtOrderID = orderID
order.direction = so.direction
order.offset = so.offset
order.price = so.price
order.totalVolume = so.volume
order.tradedVolume = so.volume
order.status = STATUS_ALLTRADED
order.orderTime = trade.tradeTime
self.strategy.onOrder(order)
self.limitOrderDict[orderID] = order
# 从字典中删除该限价单
del self.workingStopOrderDict[stopOrderID]
# 若采用实时计算净值
if self.calculateMode == self.REALTIME_MODE:
self.realtimeCalculate()
# ----------------------------------------------------------------------
def insertData(self, dbName, collectionName, data):
"""考虑到回测中不允许向数据库插入数据,防止实盘交易中的一些代码出错"""
pass
# ----------------------------------------------------------------------
def loadBar(self, dbName, collectionName, startDate):
"""直接返回初始化数据列表中的Bar"""
return self.initData
# ----------------------------------------------------------------------
def loadTick(self, dbName, collectionName, startDate):
"""直接返回初始化数据列表中的Tick"""
return self.initData
# ----------------------------------------------------------------------
def writeCtaLog(self, content):
"""记录日志"""
# log = str(self.dt) + ' ' + content
# self.logList.append(log)
# 写入本地log日志
logging.info(content)
def writeCtaError(self, content):
"""记录异常"""
self.output(content)
self.writeCtaLog(content)
# ----------------------------------------------------------------------
def output(self, content):
"""输出内容"""
print str(datetime.now()) + "\t" + content
#TODO understand this function
## this is where strategy runs
def realtimeCalculate(self):
"""实时计算交易结果"""
resultDict = OrderedDict() # 交易结果记录
longTrade = [] # 未平仓的多头交易
shortTrade = [] # 未平仓的空头交易
longid = EMPTY_STRING
shortid = EMPTY_STRING
# 对交易记录逐一处理
for tradeid in self.tradeDict.keys():
trade = self.tradeDict[tradeid]
# 多头交易
if trade.direction == DIRECTION_LONG:
# 如果尚无空头交易
if not shortTrade:
longTrade.append(trade)
longid = tradeid
# 当前多头交易为平空
else:
gId = tradeid # 交易组(多个平仓数为一组)
gr = None # 组合的交易结果
coverVolume = trade.volume
while coverVolume > 0:
if len(shortTrade) == 0:
self.writeCtaError(u'异常,没有开空仓的数据')
break
pop_indexs = [i for i, val in enumerate(shortTrade) if val.vtSymbol == trade.vtSymbol]
if len(pop_indexs) < 1:
self.writeCtaLog(u'没有对应的symbol:{0}开空仓数据'.format(trade.vtSymbol))
break
pop_index = pop_indexs[0]
# 从未平仓的空头交易
entryTrade = shortTrade.pop(pop_index)
# 开空volume,不大于平仓volume
if coverVolume >= entryTrade.volume:
self.writeCtaLog(
u'coverVolume:{0} >= entryTrade.volume:{1}'.format(coverVolume, entryTrade.volume))
coverVolume = coverVolume - entryTrade.volume
result = TradingResult(entryTrade.price, trade.price, -entryTrade.volume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Short'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = entryTrade.volume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开空{0},short:{1}]-[{8}:平空{2},cover:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, entryTrade.volume, result.pnl,
gId, shortid, tradeid))
if type(gr) == type(None):
if coverVolume > 0:
# 属于组合
gr = copy.deepcopy(result)
# 删除开空交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 不属于组合
resultDict[entryTrade.dt] = result
# 删除平空交易单,
del self.tradeDict[trade.tradeID]
# 删除开空交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
# 删除开空交易单
del self.tradeDict[entryTrade.tradeID]
# 所有仓位平完
if coverVolume == 0:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平空交易单,
del self.tradeDict[trade.tradeID]
# 开空volume,大于平仓volume,需要更新减少tradeDict的数量。
else:
self.writeCtaLog(
u'Short volume:{0} > Cover volume:{1},需要更新减少tradeDict的数量。'.format(entryTrade.volume,
coverVolume))
shortVolume = entryTrade.volume - coverVolume
result = TradingResult(entryTrade.price, trade.price, -coverVolume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Short'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = coverVolume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开空{0},short:{1}]-[{8}:平空{2},cover:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, coverVolume, result.pnl,
gId, shortid, tradeid))
# 更新(减少)开仓单的volume,重新推进开仓单列表中
entryTrade.volume = shortVolume
shortTrade.append(entryTrade)
coverVolume = 0
if type(gr) == type(None):
resultDict[entryTrade.dt] = result
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平空交易单,
del self.tradeDict[trade.tradeID]
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 空头交易
else:
# 如果尚无多头交易
if not longTrade:
shortTrade.append(trade)
shortid = tradeid
# 当前空头交易为平多
else:
gId = tradeid # 交易组(多个平仓数为一组) s
gr = None # 组合的交易结果
sellVolume = trade.volume
self.output(u'多平:{0}'.format(sellVolume))
self.writeCtaLog(u'多平:{0}'.format(sellVolume))
while sellVolume > 0:
if len(longTrade) == 0:
self.writeCtaError(u'异常,没有开多单')
break
pop_indexs = [i for i, val in enumerate(longTrade) if val.vtSymbol == trade.vtSymbol]
if len(pop_indexs) < 1:
self.writeCtaLog(u'没有对应的symbol{0}开多仓数据'.format(trade.vtSymbol))
break
pop_index = pop_indexs[0]
entryTrade = longTrade.pop(pop_index)
# 开多volume,不大于平仓volume
if sellVolume >= entryTrade.volume:
self.writeCtaLog(
u'Sell Volume:{0} >= Entry Volume:{1}'.format(sellVolume, entryTrade.volume))
sellVolume = sellVolume - entryTrade.volume
result = TradingResult(entryTrade.price, trade.price, entryTrade.volume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Long'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = entryTrade.volume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开多{0},buy:{1}]-[{8}.平多{2},sell:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, entryTrade.volume, result.pnl,
gId, longid, tradeid))
if type(gr) == type(None):
if sellVolume > 0:
# 属于组合
gr = copy.deepcopy(result)
# 删除开多交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 不属于组合
resultDict[entryTrade.dt] = result
# 删除平多交易单,
del self.tradeDict[trade.tradeID]
# 删除开多交易单
del self.tradeDict[entryTrade.tradeID]
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
# 删除开多交易单
del self.tradeDict[entryTrade.tradeID]
if sellVolume == 0:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平多交易单,
del self.tradeDict[trade.tradeID]
# 开多volume,大于平仓volume,需要更新减少tradeDict的数量。
else:
longVolume = entryTrade.volume - sellVolume
self.writeCtaLog(u'Long Volume:{0} > sell Volume:{1}'.format(entryTrade.volume, sellVolume))
result = TradingResult(entryTrade.price, trade.price, sellVolume,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
t = {}
t['vtSymbol'] = entryTrade.vtSymbol
t['OpenTime'] = entryTrade.tradeTime
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Long'
t['CloseTime'] = trade.tradeTime
t['ClosePrice'] = trade.price
t['Volume'] = sellVolume
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{6} [{7}:开多{0},buy:{1}]-[{8}.平多{2},sell:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, sellVolume, result.pnl,
gId, longid, tradeid))
# 减少开多volume,重新推进开多单列表中
entryTrade.volume = longVolume
longTrade.append(entryTrade)
sellVolume = 0
if type(gr) == type(None):
resultDict[entryTrade.dt] = result
else:
# 更新组合的数据
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
# 删除平多交易单,
del self.tradeDict[trade.tradeID]
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 计算仓位比例
occupyMoney = EMPTY_FLOAT
occupyLongVolume = EMPTY_INT
occupyShortVolume = EMPTY_INT
if len(longTrade) > 0: # 如果未平仓的多头交易大于0
for t in longTrade:
# 账户已占用保证金=合约价格*开仓手数*合约大小*保证金比例
if t.vtSymbol in BZJ_DL:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_DL[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_ZZ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_ZZ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_SQ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_SQ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if len(shortTrade) > 0: # 如果未平仓的空头交易大于0
for t in shortTrade:
if t.vtSymbol in BZJ_DL:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_DL[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_ZZ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_ZZ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
if t.vtSymbol in BZJ_SQ:
occupyMoney += t.price * abs(t.volume) * self.size * BZJ_SQ[t.vtSymbol]
occupyLongVolume += abs(t.volume)
# TODO:这里关于t.symbol的数据可能不够准确,下方的是最初版本
# if len(shortTrade) > 0: # 如果未平仓的空头交易大于0
# for t in shortTrade:
# occupyMoney += t.price * abs(t.volume) * self.size * 0.11
# occupyLongVolume += abs(t.volume)
self.output(u'occupyLongVolume:{0},occupyShortVolume:{1}'.format(occupyLongVolume, occupyShortVolume))
self.writeCtaLog(u'occupyLongVolume:{0},occupyShortVolume:{1}'.format(occupyLongVolume, occupyShortVolume))
# 最大持仓
self.maxVolume = max(self.maxVolume, max(occupyLongVolume, occupyShortVolume))
# 账户剩余可用资金
self.avaliable = self.capital - occupyMoney
# 资金剩余可用比例
self.percent = round(float(occupyMoney * 100 / self.capital), 2)
# 检查是否有平交易
if not resultDict:
if len(longTrade) > 0:
msg = u'持多仓{0},资金占用:{1},仓位比例:{2}'.format(occupyLongVolume, occupyMoney, self.percent)
self.output(msg)
self.writeCtaLog(msg)
elif len(shortTrade) > 0:
msg = u'持空仓{0},资金占用:{1},仓位比例:{2}'.format(occupyShortVolume, occupyMoney, self.percent)
self.output(msg)
self.writeCtaLog(msg)
return
# 对交易结果汇总统计
for time, result in resultDict.items():
if result.pnl > 0:
self.winningResult += 1
self.totalWinning += result.pnl
else:
self.losingResult += 1
self.totalLosing += result.pnl
self.capital += result.pnl
self.maxCapital = max(self.capital, self.maxCapital)
# self.maxVolume = max(self.maxVolume, result.volume)
drawdown = self.capital - self.maxCapital
drawdownRate = round(float(drawdown * 100 / self.maxCapital), 4)
self.pnlList.append(result.pnl)
self.timeList.append(time)
self.capitalList.append(self.capital)
self.drawdownList.append(drawdown)
self.drawdownRateList.append(drawdownRate)
self.totalResult += 1
self.totalTurnover += result.turnover
self.totalCommission += result.commission
self.totalSlippage += result.slippage
self.output(u'[{5}],{6} Vol:{0},盈亏:{1},回撤:{2}/{3},权益:{4}'.
format(abs(result.volume), result.pnl, drawdown,
drawdownRate, self.capital, result.groupId, time))
# 重新计算一次avaliable
self.avaliable = self.capital - occupyMoney
self.percent = round(float(occupyMoney * 100 / self.capital), 2)
# ----------------------------------------------------------------------
def calculateBacktestingResult(self):
"""
计算回测结果
Modified by Incense Lee
增加了支持逐步加仓的计算:
例如,前面共有6次开仓(1手开仓+5次加仓,每次1手),平仓只有1次(六手)。那么,交易次数是6次(开仓+平仓)。
暂不支持每次加仓数目不一致的核对(因为比较复杂)
增加组合的支持。(组合中,仍然按照1手逐步加仓和多手平仓的方法,即使启用了复利模式,也仍然按照这个规则,只是在计算收益时才乘以系数)
增加期初权益,每次交易后的权益,可用资金,仓位比例。
"""
self.output(u'计算回测结果')
# 首先基于回测后的成交记录,计算每笔交易的盈亏
resultDict = OrderedDict() # 交易结果记录
longTrade = [] # 未平仓的多头交易
shortTrade = [] # 未平仓的空头交易
i = 1
tradeUnit = 1
longid = EMPTY_STRING
shortid = EMPTY_STRING
for tradeid in self.tradeDict.keys():
trade = self.tradeDict[tradeid]
# 多头交易
if trade.direction == DIRECTION_LONG:
# 如果尚无空头交易
if not shortTrade:
longTrade.append(trade)
longid = tradeid
# 当前多头交易为平空
else:
gId = i # 交易组(多个平仓数为一组)
gt = 1 # 组合的交易次数
gr = None # 组合的交易结果
if trade.volume > tradeUnit:
self.writeCtaLog(u'平仓数{0},组合编号:{1}'.format(trade.volume, gId))
gt = int(trade.volume / tradeUnit)
for tv in range(gt):
entryTrade = shortTrade.pop(0)
result = TradingResult(entryTrade.price, trade.price, -tradeUnit,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
if tv == 0:
if gt == 1:
resultDict[entryTrade.dt] = result
else:
gr = copy.deepcopy(result)
else:
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
if tv == gt - 1:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
t = {}
t['OpenTime'] = entryTrade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Short'
t['CloseTime'] = trade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['ClosePrice'] = trade.price
t['Volume'] = tradeUnit
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{9}@{6} [{7}:开空{0},short:{1}]-[{8}:平空{2},cover:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, tradeUnit, result.pnl,
i, shortid, tradeid, gId))
i = i + 1
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 空头交易
else:
# 如果尚无多头交易
if not longTrade:
shortTrade.append(trade)
shortid = tradeid
# 当前空头交易为平多
else:
gId = i # 交易组(多个平仓数为一组)
gt = 1 # 组合的交易次数
gr = None # 组合的交易结果
if trade.volume > tradeUnit:
self.writeCtaLog(u'平仓数{0},组合编号:{1}'.format(trade.volume, gId))
gt = int(trade.volume / tradeUnit)
for tv in range(gt):
entryTrade = longTrade.pop(0)
result = TradingResult(entryTrade.price, trade.price, tradeUnit,
self.rate, self.slippage, self.size,
groupId=gId, fixcommission=self.fixCommission)
if tv == 0:
if gt == 1:
resultDict[entryTrade.dt] = result
else:
gr = copy.deepcopy(result)
else:
gr.turnover = gr.turnover + result.turnover
gr.commission = gr.commission + result.commission
gr.slippage = gr.slippage + result.slippage
gr.pnl = gr.pnl + result.pnl
if tv == gt - 1:
gr.volume = trade.volume
resultDict[entryTrade.dt] = gr
t = {}
t['OpenTime'] = entryTrade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['OpenPrice'] = entryTrade.price
t['Direction'] = u'Long'
t['CloseTime'] = trade.tradeTime.strftime('%Y/%m/%d %H:%M:%S')
t['ClosePrice'] = trade.price
t['Volume'] = tradeUnit
t['Profit'] = result.pnl
self.exportTradeList.append(t)
self.writeCtaLog(u'{9}@{6} [{7}:开多{0},buy:{1}]-[{8}.平多{2},sell:{3},vol:{4}],净盈亏:{5}'
.format(entryTrade.tradeTime, entryTrade.price,
trade.tradeTime, trade.price, tradeUnit, result.pnl,
i, longid, tradeid, gId))
i = i + 1
if type(gr) != type(None):
self.writeCtaLog(u'组合净盈亏:{0}'.format(gr.pnl))
self.writeCtaLog(u'-------------')
# 检查是否有交易
if not resultDict:
self.output(u'无交易结果')
return {}
# 然后基于每笔交易的结果,我们可以计算具体的盈亏曲线和最大回撤等
"""
initCapital = 40000 # 期初资金
capital = initCapital # 资金
maxCapital = initCapital # 资金最高净值
maxPnl = 0 # 最高盈利
minPnl = 0 # 最大亏损
maxVolume = 1 # 最大仓位数
wins = 0
totalResult = 0 # 总成交数量
totalTurnover = 0 # 总成交金额(合约面值)
totalCommission = 0 # 总手续费
totalSlippage = 0 # 总滑点
timeList = [] # 时间序列
pnlList = [] # 每笔盈亏序列
capitalList = [] # 盈亏汇总的时间序列
drawdownList = [] # 回撤的时间序列
drawdownRateList = [] # 最大回撤比例的时间序列
"""
drawdown = 0 # 回撤
compounding = 1 # 简单的复利基数(如果资金是期初资金的x倍,就扩大开仓比例,例如3w开1手,6w开2手,12w开4手)
for time, result in resultDict.items():
# 是否使用简单复利
if self.usageCompounding:
compounding = int(self.capital / self.initCapital)
if result.pnl > 0:
self.winningResult += 1
self.totalWinning += result.pnl
else:
self.losingResult += 1
self.totalLosing += result.pnl
self.capital += result.pnl * compounding
self.maxCapital = max(self.capital, self.maxCapital)
self.maxVolume = max(self.maxVolume, result.volume * compounding)
drawdown = self.capital - self.maxCapital
drawdownRate = round(float(drawdown * 100 / self.maxCapital), 4)
self.pnlList.append(result.pnl * compounding)
self.timeList.append(time)
self.capitalList.append(self.capningital)
self.drawdownList.append(drawdown)
self.drawdownRateList.append(drawdownRate)
self.totalResult += 1
self.totalTurnover += result.turnover * compounding
self.totalCommission += result.commission * compounding
self.totalSlippage += result.slippage * compounding
# ---------------------------------------------------------------------
def exportTradeResult(self):
"""到处回测结果表"""
if not self.exportTradeList:
return
csvOutputFile = os.getcwd() + '/TestLogs/Output_{0}.csv'.format(datetime.now().strftime('%Y%m%d_%H%M'))
import csv
csvWriteFile = file(csvOutputFile, 'wb')
fieldnames = ['vtSymbol', 'OpenTime', 'OpenPrice', 'Direction', 'CloseTime', 'ClosePrice', 'Volume', 'Profit']
writer = csv.DictWriter(f=csvWriteFile, fieldnames=fieldnames, dialect='excel')
writer.writeheader()
for row in self.exportTradeList:
writer.writerow(row)
def getResult(self):
# 返回回测结果
d = {}
d['initCapital'] = self.initCapital # 初始资金
d['capital'] = self.capital - self.initCapital # 总资金
d['maxCapital'] = self.maxCapital # 账户最大资金
if len(self.pnlList) == 0:
return {}
d['maxPnl'] = max(self.pnlList) # 每笔最大盈利
d['minPnl'] = min(self.pnlList) # 每笔最大亏损
d['maxVolume'] = self.maxVolume
d['totalResult'] = self.totalResult
d['totalTurnover'] = self.totalTurnover
d['totalCommission'] = self.totalCommission
d['totalSlippage'] = self.totalSlippage
d['timeList'] = self.timeList
d['pnlList'] = self.pnlList
d['capitalList'] = self.capitalList
d['drawdownList'] = self.drawdownList
d['drawdownRateList'] = self.drawdownRateList
d['winningRate'] = round(100 * self.winningResult / len(self.pnlList), 4)
averageWinning = 0 # 这里把数据都初始化为0
averageLosing = 0
profitLossRatio = 0
if self.winningResult:
averageWinning = self.totalWinning / self.winningResult # 平均每笔盈利
if self.losingResult:
averageLosing = self.totalLosing / self.losingResult # 平均每笔亏损
if averageLosing:
profitLossRatio = -averageWinning / averageLosing # 盈亏比
d['averageWinning'] = averageWinning
d['averageLosing'] = averageLosing
d['profitLossRatio'] = profitLossRatio
return d
# ----------------------------------------------------------------------
def showBacktestingResult(self):
"""显示回测结果"""
if self.calculateMode != self.REALTIME_MODE:
self.calculateBacktestingResult()
d = self.getResult()
if len(d) == 0:
self.output(u'无交易结果')
return
# 导出交易清单
self.exportTradeResult()
# 输出
self.output('-' * 30)
self.output(u'第一笔交易:\t%s' % d['timeList'][0])
self.output(u'最后一笔交易:\t%s' % d['timeList'][-1])
self.output(u'总交易次数:\t%s' % formatNumber(d['totalResult']))
self.output(u'期初资金:\t%s' % formatNumber(d['initCapital']))
self.output(u'总盈亏:\t%s' % formatNumber(d['capital']))
self.output(u'资金最高净值:\t%s' % formatNumber(d['maxCapital']))
self.output(u'每笔最大盈利:\t%s' % formatNumber(d['maxPnl']))
self.output(u'每笔最大亏损:\t%s' % formatNumber(d['minPnl']))
self.output(u'净值最大回撤: \t%s' % formatNumber(min(d['drawdownList'])))
self.output(u'净值最大回撤率: \t%s' % formatNumber(min(d['drawdownRateList'])))
self.output(u'胜率:\t%s' % formatNumber(d['winningRate']))
self.output(u'盈利交易平均值\t%s' % formatNumber(d['averageWinning']))
self.output(u'亏损交易平均值\t%s' % formatNumber(d['averageLosing']))
self.output(u'盈亏比:\t%s' % formatNumber(d['profitLossRatio']))
self.output(u'最大持仓:\t%s' % formatNumber(d['maxVolume']))
self.output(u'平均每笔盈利:\t%s' % formatNumber(d['capital'] / d['totalResult']))
self.output(u'盈利总额:\t%s' % formatNumber(d['capital']))
self.output(u'平均每笔滑点成本:\t%s' % formatNumber(d['totalSlippage'] / d['totalResult']))
self.output(u'滑点成本总额:\t%s' % formatNumber(d['totalSlippage']))
self.output(u'平均每笔佣金:\t%s' % formatNumber(d['totalCommission'] / d['totalResult']))
self.output(u'佣金总额:\t%s' % formatNumber(d['totalCommission']))
# 绘图
import matplotlib.pyplot as plt
pCapital = plt.subplot(3, 1, 1)
pCapital.set_ylabel("capital")
pCapital.plot(d['capitalList'])
pDD = plt.subplot(3, 1, 2)
pDD.set_ylabel("DD")
pDD.bar(range(len(d['drawdownList'])), d['drawdownList'])
pPnl = plt.subplot(3, 1, 3)
pPnl.set_ylabel("pnl")
pPnl.hist(d['pnlList'], bins=50)
plt.show()
# ----------------------------------------------------------------------
def putStrategyEvent(self, name):
"""发送策略更新事件,回测中忽略"""
pass
# ----------------------------------------------------------------------
def setSlippage(self, slippage):
"""设置滑点点数"""
self.slippage = slippage
# ----------------------------------------------------------------------
def setSize(self, size):
"""设置合约大小"""
self.size = size
# ----------------------------------------------------------------------
def setRate(self, rate):
"""设置佣金比例"""
self.rate = float(rate)
# ----------------------------------------------------------------------
def runOptimization(self, strategyClass, optimizationSetting):
"""优化参数"""
# 获取优化设置
settingList = optimizationSetting.generateSetting()
targetName = optimizationSetting.optimizeTarget
# 检查参数设置问题
if not settingList or not targetName:
self.output(u'优化设置有问题,请检查')
# 遍历优化
resultList = []
for setting in settingList:
self.clearBacktestingResult()
self.output('-' * 30)
self.output('setting: %s' % str(setting))
self.initStrategy(strategyClass, setting)
self.runBacktesting()
d = self.calculateBacktestingResult()
try:
targetValue = d[targetName]
except KeyError:
targetValue = 0
resultList.append(([str(setting)], targetValue))
# 显示结果
resultList.sort(reverse=True, key=lambda result: result[1])
self.output('-' * 30)
self.output(u'优化结果:')
for result in resultList:
self.output(u'%s: %s' % (result[0], result[1]))
# ----------------------------------------------------------------------
def clearBacktestingResult(self):
"""清空之前回测的结果"""
# 清空限价单相关
self.limitOrderCount = 0
self.limitOrderDict.clear()
self.workingLimitOrderDict.clear()
# 清空停止单相关
self.stopOrderCount = 0
self.stopOrderDict.clear()
self.workingStopOrderDict.clear()
# 清空成交相关
self.tradeCount = 0
self.tradeDict.clear()
#add by xy 14 Aug 2017
#get lot shares by money
def moneyPerLot(self, price, symbol):
oneLotM = None
if symbol in BZJ_DL:
oneLotM = price * self.size * BZJ_DL[symbol]
if symbol in BZJ_ZZ:
oneLotM = price * self.size * BZJ_ZZ[symbol]
if symbol in BZJ_SQ:
oneLotM = price * self.size * BZJ_SQ[symbol]
return oneLotM
########################################################################
class TradingResult(object):
"""每笔交易的结果"""
# ----------------------------------------------------------------------
def __init__(self, entry, exit, volume, rate, slippage, size, groupId, fixcommission=EMPTY_FLOAT):
"""Constructor"""
self.entry = entry # 开仓价格
self.exit = exit # 平仓价格
self.volume = volume # 交易数量(+/-代表方向)
self.groupId = groupId # 主交易ID(针对多手平仓)
self.turnover = (self.entry + self.exit) * size # 成交金额
if fixcommission:
self.commission = fixcommission * self.volume
else:
self.commission = round(float(self.turnover * rate), 4) # 手续费成本
self.slippage = slippage * 2 * size # 滑点成本
self.pnl = ((self.exit - self.entry) * volume * size
- self.commission - self.slippage) # 净盈亏
########################################################################
class OptimizationSetting(object):
"""优化设置"""
# ----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.paramDict = OrderedDict()
self.optimizeTarget = '' # 优化目标字段
# ----------------------------------------------------------------------
def addParameter(self, name, start, end, step):
"""增加优化参数"""
if end <= start:
print u'参数起始点必须小于终止点'
return
if step <= 0:
print u'参数布进必须大于0'
return
l = []
param = start
while param <= end:
l.append(param)
param += step
self.paramDict[name] = l
# ----------------------------------------------------------------------
def generateSetting(self):
"""生成优化参数组合"""
# 参数名的列表
nameList = self.paramDict.keys()
paramList = self.paramDict.values()
# 使用迭代工具生产参数对组合
productList = list(product(*paramList))
# 把参数对组合打包到一个个字典组成的列表中
settingList = []
for p in productList:
d = dict(zip(nameList, p))
settingList.append(d)
return settingList
# ----------------------------------------------------------------------
def setOptimizeTarget(self, target):
"""设置优化目标字段"""
self.optimizeTarget = target
# ----------------------------------------------------------------------
def formatNumber(n):
"""格式化数字到字符串"""
n = round(n, 2) # 保留两位小数
return format(n, ',') # 加上千分符
if __name__ == '__main__':
# 以下内容是一段回测脚本的演示,用户可以根据自己的需求修改
# 建议使用ipython notebook或者spyder来做回测
# 同样可以在命令模式下进行回测(一行一行输入运行)
from ctaDemo import *
# 创建回测引擎
engine = BacktestingEngine()
# 设置引擎的回测模式为K线
engine.setBacktestingMode(engine.BAR_MODE)
# 设置回测用的数据起始日期
engine.setStartDate('20110101')
# 载入历史数据到引擎中
engine.setDatabase(MINUTE_DB_NAME, 'IF0000')
# 设置产品相关参数
engine.setSlippage(0.2) # 股指1跳
engine.setRate(0.3 / 10000) # 万0.3
engine.setSize(300) # 股指合约大小
# 在引擎中创建策略对象
engine.initStrategy(DoubleEmaDemo, {})
# 开始跑回测
engine.runBacktesting()
# 显示回测结果
# spyder或者ipython notebook中运行时,会弹出盈亏曲线图
# 直接在cmd中回测则只会打印一些回测数值
engine.showBacktestingResult()
|
kanchenxi04/vnpy-app
|
vn.trader/ctaAlgo/ctaBacktesting.py
|
Python
|
mit
| 105,820
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import json
import time
def keys(filename):
with open(filename) as f:
secret_keys = json.load(f)
return secret_keys
def access_listpage(secret_keys, driver):
driver.get("https://wine.wul.waseda.ac.jp/patroninfo*jpn")
time.sleep(5)
elem = driver.find_element_by_name("extpatid")
elem.send_keys(secret_keys["user"])
elem2 = driver.find_element_by_name("extpatpw")
elem2.send_keys(secret_keys["pw"])
submit = driver.find_element_by_link_text("送信")
submit.click()
time.sleep(5)
list_all = driver.find_element_by_partial_link_text("貸出中です")
list_all.click()
time.sleep(5)
def send_extension(driver):
"""
前提:借りている書籍のリストにいること
"""
driver.find_element_by_name("requestRenewAll").click()
time.sleep(5)
driver.find_element_by_name("renewall").click()
time.sleep(5)
|
Accent-/WasedaU_Library
|
wine.py
|
Python
|
mit
| 1,004
|
#!/usr/bin/python
from flask import Flask, session, render_template, url_for, redirect, request, flash, g
from flask.ext import assets
import pyxb
import json
import json
import os
import paypalrestsdk
app = Flask(__name__)
paypal_client_id = "AacMHTvbcCGRzaeuHY6i6zwqGvveuhN4X_2sZ2mZJi76ZGtSZATh7XggfVuVixzyrRuG-bJTLOJIXltg"
paypal_client_secret = "EOLqrOVlYbzBeQIXIu_lQiB2Idh7fpK71hemdmlrfV1UwkW9EfDIuHOYS9lZYcxDKj4BzKO08b-CdDt9"
#Assets
env = assets.Environment(app)
env.load_path = [
os.path.join(os.path.dirname(__file__), 'assets')
]
env.register (
'js_all',
assets.Bundle(
'js/jquery.js',
'js/bootstrap.min.js',
'js/moment-with-locales.min.js',
'js/bootstrap-datetimepicker.min.js',
'js/slider.js',
'js/amounts.js',
'js/landing.js',
output='js_all.js'
)
)
env.register(
'css_all',
assets.Bundle(
'css/bootstrap.min.css',
'css/bootstrap-datetimepicker.min.css',
'css/slider.css',
'css/landing-page.css',
output='css_all.css'
)
)
# Paypal lib
paypalrestsdk.configure(
mode="sandbox", # sandbox or live
client_id=paypal_client_id,
client_secret= paypal_client_secret
)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/payment/donation/create', methods=["POST"])
def paypal_process():
amount = 0
categories = {
'amount-homeless': 'homeless people',
'amount-refugees': 'refugees people',
'amount-orphans': 'orphans people',
'amount-poverished': 'perverished people'
}
items = []
for key, value in categories.iteritems():
amount += float(request.form[key])
if request.form[key] != 0:
items.append({
"name": "Donation to " + value,
"price": "%.2f" % float(request.form[key]),
"currency": "GBP",
"quantity": 1
})
if amount == 0:
raise Exception("Invalid amount")
# Payment
# A Payment Resource; create one using
# the above types and intent as 'sale'
payment = paypalrestsdk.Payment({
"intent": "sale",
# Payer
# A resource representing a Payer that funds a payment
# Payment Method as 'paypal'
"payer": {
"payment_method": "paypal"},
# Redirect URLs
"redirect_urls": {
"return_url": "http://localhost:5000/payment/donation/done",
"cancel_url": "http://localhost:5000/"},
# Transaction
# A transaction defines the contract of a
# payment - what is the payment for and who
# is fulfilling it.
"transactions": [{
# ItemList
"item_list": {
"items": items
},
# Amount
# Let's you specify a payment amount.
"amount": {
"total": "%.2f" % amount,
"currency": "GBP"
},
"description": "Donation to Railaid"
}]
})
print(payment)
# Create Payment and return status
if payment.create():
print("Payment[%s] created successfully" % (payment.id))
# Redirect the user to given approval url
for link in payment.links:
if link.method == "REDIRECT":
# Convert to str to avoid google appengine unicode issue
# https://github.com/paypal/rest-api-sdk-python/pull/58
redirect_url = str(link.href)
return redirect(redirect_url)
else:
print(payment.error)
@app.route('/payment/donation/done')
def paypal_success():
# Don't know what to do with it for now
payment_id = request.args.get('paymentId')
payment = paypalrestsdk.Payment.find(payment_id)
print(payment.transactions[0].amount.total);
return "Thank you for your donation of " + payment.transactions[0].amount.total + "!"
# @app.route('/search/tickets')
# def search_tickets():
# p1 = Passenger(age=30)
#
# tp1 = TravelPoint(
# origin="GBQQU",
# destination="GBQQM",
# departure=datetime(2015, 11, 23, 8))
#
# fq = FareSearch(
# travel_points = [tp1],
# fare_filter = FARE_FILTER.CHEAPEST,
# passengers = [p1])
#
# fares_result = sc.search_fare(fq)
# fr = fares_result.results
# print(fr)
# return render_template('search-result.html', data=fr)
if __name__ == '__main__':
app.run(debug=True)
|
IshavanBaar/railaid
|
app.py
|
Python
|
mit
| 4,184
|
#!/usr/bin/env python3
from .proc_base import ProcBase
class ProcMaps(ProcBase):
'''Object represents the /proc/[pid]/maps file.'''
def __init__(self, pid):
'''
Read file by calling base class constructor
which populates self.content. This file is
already ASCII printable, so no further
parsing is needed.
'''
super().__init__('/proc/{0}/maps'.format(pid))
def dump(self):
'''Print information gathered to stdout.'''
super().dump() # Print file header
if self.content:
print(self.content)
|
EwanC/pyProc
|
proc_scraper/proc_maps.py
|
Python
|
mit
| 600
|
#!/usr/bin/env python
"""
Reduce samples that have too high energies by comparing
between the same group of samples.
The group is defined according to the name of directory before the last 5 digits.
For example, the directory `smpl_XX_YYYYYY_#####` where `#####` is the
last 5 digits and the group name would be `smpl_XX_YYYYY`.
Usage:
reduce_high_energy_samples.py [options] DIRS...
Options:
-h,--help Show this message and exit.
-o OUT Output file name. [default: out.high_energy_samples]
--threshold=THRESHOLD
Threshold of energy/atom that determines high energy samples.
[default: 1.0]
"""
from __future__ import print_function
import os,sys
from docopt import docopt
from datetime import datetime
from nappy.napsys import NAPSystem
__author__ = "RYO KOBAYASHI"
__version__ = "160727"
def get_obsolete_dirname():
prefix = "obsolete_"
today = datetime.today()
return prefix+today.strftime("%y%m%d")
def get_groups(smpldirs):
groups = {}
ns = len(smpldirs)
if ns < 100:
ms = 1
else:
ms = ns/100
for i,s in enumerate(smpldirs):
if i%ms == 0:
print('.',end=".")
try:
with open(s+'/erg.ref','r') as f:
erg = float(f.readline())
except:
print('Failed to read erg.ref, so skip '+s)
continue
key = s[:-6]
if not key in groups:
groups[key] = []
groups[key].append([s,erg])
print('')
return groups
def get_list_high_energy(gsmpls,threshold):
emin = 1e+30
highsmpls = []
ergs = []
for i,s in enumerate(gsmpls):
smpldir = s[0]
erg = s[1]
#atoms = read(smpldir+'/POSCAR',format='vasp')
atoms = NAPSystem(fname=smpldir+"/pos",format='pmd')
natm = atoms.num_atoms()
erg /= natm
ergs.append(erg)
emin = min(erg,emin)
for i,s in enumerate(gsmpls):
smpldir = s[0]
erg = ergs[i]
if erg-emin > threshold:
highsmpls.append(smpldir)
return highsmpls
if __name__ == "__main__":
args = docopt(__doc__)
smpldirs = args['DIRS']
outfname = args['-o']
threshold = float(args['--threshold'])
print('grouping samples...')
groups = get_groups(smpldirs)
print('looking for high-energy samples...')
highsmpls = []
for g,smpls in groups.items():
print('.',end='')
highsmpls.extend(get_list_high_energy(smpls,threshold))
print('')
with open(outfname,'w') as f:
for s in highsmpls:
f.write(s+'\n')
print('number of samples to be reduced = ',len(highsmpls))
print('check '+outfname+' and run the following commands:')
print('')
# obsdir = get_obsolete_dirname()
# print(' mkdir '+obsdir)
# print(' for d in `cat '+outfname+'`; do mv $d '+obsdir
# +'/; done')
# print('')
|
ryokbys/nap
|
nappy/fitpot/reduce_high_energy_samples.py
|
Python
|
mit
| 2,920
|
from os import makedirs
from os.path import join
from posix import listdir
from django.conf import settings
from django.core.management.base import BaseCommand
from libavwrapper.avconv import Input, Output, AVConv
from libavwrapper.codec import AudioCodec, NO_VIDEO
from 匯入.族語辭典 import 代碼對應
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'語言',
type=str,
help='選擇的族語'
)
def handle(self, *args, **參數):
# 檢查avconv有裝無
代碼 = 代碼對應[參數['語言']]
語料目錄 = join(settings.BASE_DIR, '語料', '族語辭典', 代碼)
目標目錄 = join(settings.BASE_DIR, '語料', '族語辭典wav', 代碼)
makedirs(目標目錄, exist_ok=True)
for 檔名 in sorted(listdir(語料目錄)):
if 檔名.endswith('.mp3'):
來源 = join(語料目錄, 檔名)
目標 = join(目標目錄, 檔名[:-4] + '.wav')
目標聲音格式 = AudioCodec('pcm_s16le')
目標聲音格式.channels(1)
目標聲音格式.frequence(16000)
原始檔案 = Input(來源)
網頁檔案 = Output(目標).overwrite()
指令 = AVConv('avconv', 原始檔案, 目標聲音格式, NO_VIDEO, 網頁檔案)
程序 = 指令.run()
程序.wait()
|
sih4sing5hong5/hue7jip8
|
匯入/management/commands/族語辭典1轉檔.py
|
Python
|
mit
| 1,474
|
import numpy as np
import numpy.random as rng
import theano
import theano.tensor as T
from theano.tensor.nnet import conv2d
minibatch = 3
image_height,image_width = 28,28
filter_height,filter_width = 3,3
n_filters = 1
n_channels = 1
n = 1/(np.sqrt(image_height*image_width))
X = T.tensor4(name='X')
X_shape = (minibatch,n_channels,image_height,image_width)
W_shape = (n_filters,n_channels,filter_height,filter_width)
W = theano.shared(n*rng.randn(*W_shape),name='W')
conv_out = conv2d(X,
W,
input_shape=X_shape,
filter_shape=W_shape,
border_mode='valid')
f = theano.function([X],[conv_out])
X_data = np.array(rng.randint(low=0,high=256,size=X_shape))
conv_out = f(X_data)
|
nzufelt/theano_nn
|
min_work_ex.py
|
Python
|
mit
| 757
|
from .max import max
from pyramda.private.asserts import assert_equal
def max_test():
assert_equal(max([1, 3, 4, 2]), 4)
|
jackfirth/pyramda
|
pyramda/relation/max_test.py
|
Python
|
mit
| 127
|
import __settings__
from __settings__ import INSTALLED_APPS
assert hasattr(__settings__, 'BASE_DIR'), 'BASE_DIR required'
INSTALLED_APPS += (
'post',
)
|
novafloss/django-compose-settings
|
tests/fixtures/my_app/settings/post.py
|
Python
|
mit
| 161
|
import _plotly_utils.basevalidators
class MetaValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="meta", parent_name="surface", **kwargs):
super(MetaValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/surface/_meta.py
|
Python
|
mit
| 480
|
from typing import List
from backend.common.cache_clearing import get_affected_queries
from backend.common.manipulators.manipulator_base import ManipulatorBase
from backend.common.models.cached_model import TAffectedReferences
from backend.common.models.media import Media
class MediaManipulator(ManipulatorBase[Media]):
"""
Handle Media database writes.
"""
@classmethod
def getCacheKeysAndQueries(
cls, affected_refs: TAffectedReferences
) -> List[get_affected_queries.TCacheKeyAndQuery]:
return get_affected_queries.media_updated(affected_refs)
@classmethod
def updateMerge(
cls, new_model: Media, old_model: Media, auto_union: bool = True
) -> Media:
cls._update_attrs(new_model, old_model, auto_union)
return old_model
|
the-blue-alliance/the-blue-alliance
|
src/backend/common/manipulators/media_manipulator.py
|
Python
|
mit
| 805
|
import _plotly_utils.basevalidators
class SeparatethousandsValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name="separatethousands",
parent_name="scattermapbox.marker.colorbar",
**kwargs
):
super(SeparatethousandsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/scattermapbox/marker/colorbar/_separatethousands.py
|
Python
|
mit
| 487
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .health_evaluation import HealthEvaluation
class ApplicationsHealthEvaluation(HealthEvaluation):
"""Represents health evaluation for applications, containing health
evaluations for each unhealthy application that impacted current aggregated
health state.
:param aggregated_health_state: Possible values include: 'Invalid', 'Ok',
'Warning', 'Error', 'Unknown'
:type aggregated_health_state: str or :class:`enum
<azure.servicefabric.models.enum>`
:param description: Description of the health evaluation, which represents
a summary of the evaluation process.
:type description: str
:param kind: Polymorphic Discriminator
:type kind: str
:param max_percent_unhealthy_applications: Maximum allowed percentage of
unhealthy applications from the ClusterHealthPolicy.
:type max_percent_unhealthy_applications: int
:param total_count: Total number of applications from the health store.
:type total_count: long
:param unhealthy_evaluations:
:type unhealthy_evaluations: list of :class:`HealthEvaluationWrapper
<azure.servicefabric.models.HealthEvaluationWrapper>`
"""
_validation = {
'kind': {'required': True},
}
_attribute_map = {
'aggregated_health_state': {'key': 'AggregatedHealthState', 'type': 'str'},
'description': {'key': 'Description', 'type': 'str'},
'kind': {'key': 'Kind', 'type': 'str'},
'max_percent_unhealthy_applications': {'key': 'MaxPercentUnhealthyApplications', 'type': 'int'},
'total_count': {'key': 'TotalCount', 'type': 'long'},
'unhealthy_evaluations': {'key': 'UnhealthyEvaluations', 'type': '[HealthEvaluationWrapper]'},
}
def __init__(self, aggregated_health_state=None, description=None, max_percent_unhealthy_applications=None, total_count=None, unhealthy_evaluations=None):
super(ApplicationsHealthEvaluation, self).__init__(aggregated_health_state=aggregated_health_state, description=description)
self.max_percent_unhealthy_applications = max_percent_unhealthy_applications
self.total_count = total_count
self.unhealthy_evaluations = unhealthy_evaluations
self.kind = 'Applications'
|
AutorestCI/azure-sdk-for-python
|
azure-servicefabric/azure/servicefabric/models/applications_health_evaluation.py
|
Python
|
mit
| 2,699
|
import chainer
from chainer import cuda
import chainer.utils
class Send(chainer.Function):
"""Send elements to target process."""
def __init__(self, comm, peer_rank, peer_tag):
chainer.utils.experimental('chainermn.functions.Send')
self.comm = comm
self.peer_rank = peer_rank
self.peer_tag = peer_tag
@property
def label(self):
return "{} (peer_rank: {})".format(
self.__class__.__name__,
self.peer_rank)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
# The last input is dummy variable, to retain gradient computation
# of this function.
xs = inputs[:-1]
if len(xs) == 1:
xs = xs[0]
self.comm.send(xs, self.peer_rank, self.peer_tag)
# Return an empty variable, which serves as "delegate_variable."
return xp.array([], dtype=xp.float32),
def backward(self, inputs, grad_outputs):
xp = cuda.get_array_module(*inputs)
dummy_grad = xp.array([], dtype=xp.float32)
grad = self.comm.recv(self.peer_rank, self.peer_tag)
if isinstance(grad, tuple):
return tuple([xp.array(gy) for gy in grad] + [dummy_grad])
else:
return xp.array(grad), dummy_grad
class Recv(chainer.Function):
"""Receive elements from target process."""
def __init__(self, comm, peer_rank, peer_tag):
chainer.utils.experimental('chainermn.functions.Recv')
self.comm = comm
self.peer_rank = peer_rank
self.peer_tag = peer_tag
def __call__(self, *inputs):
xp = cuda.get_array_module(*inputs)
if inputs == ():
# Expected to be invoked without any args in usual case.
if chainer.__version__.startswith('1.'):
# For backward compatibility.
dummy_var = chainer.Variable(
xp.array([], dtype=xp.float32),
volatile='auto')
else:
# This variable is necessary to backprop correctly
# in Chainer v2. This trick relies on the fact
# chainer.Variable.requires_grad is True by default
# in Chainer v2.0.0.
dummy_var = chainer.Variable(xp.array([], dtype=xp.float32))
dummy_var.name = 'dummy_var'
return super(Recv, self).__call__(dummy_var)
else:
# Used for retaining computational graph.
return super(Recv, self).__call__(*inputs)
@property
def label(self):
return "{} (peer_rank: {})".format(
self.__class__.__name__,
self.peer_rank)
def forward(self, inputs):
data = self.comm.recv(self.peer_rank, self.peer_tag)
if not isinstance(data, tuple):
data = tuple([data])
return data
def backward(self, inputs, grad_outputs):
xp = cuda.get_array_module(*inputs)
self.comm.send(grad_outputs, self.peer_rank, self.peer_tag)
# dummy_var is needed to maintain Chainer's constraint.
if inputs == ():
dummy_var = tuple([xp.array([], dtype=xp.float32)])
else:
dummy_var = tuple([xp.zeros(x.shape, dtype=xp.float32)
for x in inputs])
return dummy_var
def send(x, communicator, rank, tag=0):
"""Send elements to target process.
This function returns a dummy variable only holding the computational
graph. If ``backward()`` is invoked by this dummy variable, it will
try to receive gradients from the target process and send them back
to the parent nodes.
Args:
x (Variable): Variable holding a matrix which you would like to send.
communicator (chainer.communicators.CommunicatorBase):
ChainerMN communicator.
rank (int): Target process specifier.
tag (int): Optional message ID (MPI feature).
Returns:
~chainer.Variable:
A dummy variable with no actual data, only holding the
computational graph. Please refer
``chainermn.functions.pseudo_connect`` for detail.
"""
chainer.utils.experimental('chainermn.functions.send')
if rank == communicator.rank:
raise ValueError(
'rank must be different from communicator rank, '
'otherwise deadlock occurs')
xp = cuda.get_array_module(*x)
# Dummy variable to retain gradient computation of send,
# otherwise the corresponding recv will cause deadlock in backward
# in the case where all inputs for this function does not require_grad.
dummy_var = chainer.Variable(xp.array([], dtype=xp.float32))
if isinstance(x, list) or isinstance(x, tuple):
inputs = x + type(x)([dummy_var])
delegate_variable = Send(
communicator, peer_rank=rank, peer_tag=tag)(*inputs)
else:
delegate_variable = Send(
communicator, peer_rank=rank, peer_tag=tag)(x, dummy_var)
delegate_variable.name = 'delegate_variable'
return delegate_variable
def recv(communicator, rank, delegate_variable=None, tag=0, force_tuple=False):
"""Receive elements from target process.
This function returns data received from target process. If ``backward()``
is invoked, it will try to send gradients to the target process.
The received array will be on the current CUDA device if the corresponding
``send()`` is invoked with arrays on GPU.
Please be aware that the current CUDA device is intended one.
(``https://docs-cupy.chainer.org/en/stable/tutorial/basic.html#current-device``)
.. note::
If you define non-connected computational graph on one process,
you have to use ``delegate_variable`` to specify the output of
previous computational graph component.
Otherwise ``backward()`` does not work well.
Please refer ``chainermn.functions.pseudo_connect`` for detail.
Args:
communicator (chainer.communicators.CommunicatorBase):
ChainerMN communicator.
rank (int): Target process specifier.
delegate_variable (chainer.Variable):
Pointer to the other non-connected component.
tag (int): Optional message ID (MPI feature).
force_tuple (bool): If ``False`` (the default) a Variable will be
returned when the number of outputs is one. Otherwise, this
method returns a tuple even when the number of outputs is one.
Returns:
~chainer.Variable:
Data received from target process. If ``backward()`` is invoked
by this variable, it will send gradients to the target process.
"""
chainer.utils.experimental('chainermn.functions.recv')
if rank == communicator.rank:
raise ValueError(
'rank must be different from communicator rank, '
'otherwise deadlock occurs')
if delegate_variable is None:
res = Recv(
communicator,
peer_rank=rank,
peer_tag=tag)()
else:
delegate_variable.name = 'delegate_variable'
res = Recv(
communicator,
peer_rank=rank,
peer_tag=tag)(delegate_variable)
if force_tuple and not isinstance(res, tuple):
return tuple([res])
else:
return res
|
rezoo/chainer
|
chainermn/functions/point_to_point_communication.py
|
Python
|
mit
| 7,375
|
from bottle import Bottle, run
app = Bottle()
@app.route('/')
def index():
return 'PService Running'
#
# Start a server instance
#
run(
app, # Run |app| Bottle() instance
host = '0.0.0.0',
port = 8080,
reloader = True, # restarts the server every time edit a module file
debug = True # Comment out it before deploy
)
|
Kjuly/iPokeMon-Server
|
test.py
|
Python
|
mit
| 417
|
from __future__ import absolute_import, print_function, division
from petl.test.helpers import ieq
from petl.util import expr, empty, coalesce
from petl.transform.basics import cut, cat, addfield, rowslice, head, tail, \
cutout, skipcomments, annex, addrownumbers, addcolumn, \
addfieldusingcontext, movefield, stack
def test_cut():
table = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
cut1 = cut(table, 'foo')
expectation = (('foo',),
('A',),
('B',),
(u'B',),
('D',),
('E',))
ieq(expectation, cut1)
cut2 = cut(table, 'foo', 'baz')
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut2)
cut3 = cut(table, 0, 2)
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut3)
cut4 = cut(table, 'bar', 0)
expectation = (('bar', 'foo'),
(1, 'A'),
('2', 'B'),
(u'3', u'B'),
('xyz', 'D'),
(None, 'E'))
ieq(expectation, cut4)
cut5 = cut(table, ('foo', 'baz'))
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut5)
def test_cut_empty():
table = (('foo', 'bar'),)
expect = (('bar',),)
actual = cut(table, 'bar')
ieq(expect, actual)
def test_cutout():
table = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
cut1 = cutout(table, 'bar', 'baz')
expectation = (('foo',),
('A',),
('B',),
(u'B',),
('D',),
('E',))
ieq(expectation, cut1)
cut2 = cutout(table, 'bar')
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut2)
cut3 = cutout(table, 1)
expectation = (('foo', 'baz'),
('A', 2),
('B', '3.4'),
(u'B', u'7.8'),
('D', 9.0),
('E', None))
ieq(expectation, cut3)
def test_cat():
table1 = (('foo', 'bar'),
(1, 'A'),
(2, 'B'))
table2 = (('bar', 'baz'),
('C', True),
('D', False))
cat1 = cat(table1, table2, missing=None)
expectation = (('foo', 'bar', 'baz'),
(1, 'A', None),
(2, 'B', None),
(None, 'C', True),
(None, 'D', False))
ieq(expectation, cat1)
# how does cat cope with uneven rows?
table3 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
cat3 = cat(table3, missing=None)
expectation = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8'),
('D', 'xyz', 9.0),
('E', None, None))
ieq(expectation, cat3)
# cat more than two tables?
cat4 = cat(table1, table2, table3)
expectation = (('foo', 'bar', 'baz'),
(1, 'A', None),
(2, 'B', None),
(None, 'C', True),
(None, 'D', False),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8'),
('D', 'xyz', 9.0),
('E', None, None))
ieq(expectation, cat4)
def test_cat_with_header():
table1 = (('bar', 'foo'),
('A', 1),
('B', 2))
table2 = (('bar', 'baz'),
('C', True),
('D', False))
actual = cat(table1, header=['A', 'foo', 'B', 'bar', 'C'])
expect = (('A', 'foo', 'B', 'bar', 'C'),
(None, 1, None, 'A', None),
(None, 2, None, 'B', None))
ieq(expect, actual)
ieq(expect, actual)
actual = cat(table1, table2, header=['A', 'foo', 'B', 'bar', 'C'])
expect = (('A', 'foo', 'B', 'bar', 'C'),
(None, 1, None, 'A', None),
(None, 2, None, 'B', None),
(None, None, None, 'C', None),
(None, None, None, 'D', None))
ieq(expect, actual)
ieq(expect, actual)
def test_cat_empty():
table1 = (('foo', 'bar'),
(1, 'A'),
(2, 'B'))
table2 = (('bar', 'baz'),)
expect = (('foo', 'bar', 'baz'),
(1, 'A', None),
(2, 'B', None))
actual = cat(table1, table2)
ieq(expect, actual)
def test_cat_dupfields():
table1 = (('foo', 'foo'),
(1, 'A'),
(2,),
(3, 'B', True))
# these cases are pathological, including to confirm expected behaviour,
# but user needs to rename fields to get something sensible
actual = cat(table1)
expect = (('foo', 'foo'),
(1, 1),
(2, 2),
(3, 3))
ieq(expect, actual)
table2 = (('foo', 'foo', 'bar'),
(4, 'C', True),
(5, 'D', False))
actual = cat(table1, table2)
expect = (('foo', 'foo', 'bar'),
(1, 1, None),
(2, 2, None),
(3, 3, None),
(4, 4, True),
(5, 5, False))
ieq(expect, actual)
def test_stack_dupfields():
table1 = (('foo', 'foo'),
(1, 'A'),
(2,),
(3, 'B', True))
actual = stack(table1)
expect = (('foo', 'foo'),
(1, 'A'),
(2, None),
(3, 'B'))
ieq(expect, actual)
table2 = (('foo', 'foo', 'bar'),
(4, 'C', True),
(5, 'D', False))
actual = stack(table1, table2)
expect = (('foo', 'foo'),
(1, 'A'),
(2, None),
(3, 'B'),
(4, 'C'),
(5, 'D'))
ieq(expect, actual)
def test_addfield():
table = (('foo', 'bar'),
('M', 12),
('F', 34),
('-', 56))
result = addfield(table, 'baz', 42)
expectation = (('foo', 'bar', 'baz'),
('M', 12, 42),
('F', 34, 42),
('-', 56, 42))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', lambda row: '%s,%s' % (row.foo, row.bar))
expectation = (('foo', 'bar', 'baz'),
('M', 12, 'M,12'),
('F', 34, 'F,34'),
('-', 56, '-,56'))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', lambda rec: rec['bar'] * 2)
expectation = (('foo', 'bar', 'baz'),
('M', 12, 24),
('F', 34, 68),
('-', 56, 112))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', expr('{bar} * 2'))
expectation = (('foo', 'bar', 'baz'),
('M', 12, 24),
('F', 34, 68),
('-', 56, 112))
ieq(expectation, result)
ieq(expectation, result)
result = addfield(table, 'baz', 42, index=0)
expectation = (('baz', 'foo', 'bar'),
(42, 'M', 12),
(42, 'F', 34),
(42, '-', 56))
ieq(expectation, result)
ieq(expectation, result)
def test_addfield_empty():
table = (('foo', 'bar'),)
expect = (('foo', 'bar', 'baz'),)
actual = addfield(table, 'baz', 42)
ieq(expect, actual)
ieq(expect, actual)
def test_addfield_coalesce():
table = (('foo', 'bar', 'baz', 'quux'),
('M', 12, 23, 44),
('F', None, 23, 11),
('-', None, None, 42))
result = addfield(table, 'spong', coalesce('bar', 'baz', 'quux'))
expect = (('foo', 'bar', 'baz', 'quux', 'spong'),
('M', 12, 23, 44, 12),
('F', None, 23, 11, 23),
('-', None, None, 42, 42))
ieq(expect, result)
ieq(expect, result)
result = addfield(table, 'spong', coalesce(1, 2, 3))
expect = (('foo', 'bar', 'baz', 'quux', 'spong'),
('M', 12, 23, 44, 12),
('F', None, 23, 11, 23),
('-', None, None, 42, 42))
ieq(expect, result)
ieq(expect, result)
def test_addfield_uneven_rows():
table = (('foo', 'bar'),
('M',),
('F', 34),
('-', 56, 'spong'))
result = addfield(table, 'baz', 42)
expectation = (('foo', 'bar', 'baz'),
('M', None, 42),
('F', 34, 42),
('-', 56, 42))
ieq(expectation, result)
ieq(expectation, result)
def test_addfield_dupfield():
table = (('foo', 'foo'),
('M', 12),
('F', 34),
('-', 56))
result = addfield(table, 'bar', 42)
expectation = (('foo', 'foo', 'bar'),
('M', 12, 42),
('F', 34, 42),
('-', 56, 42))
ieq(expectation, result)
ieq(expectation, result)
def test_rowslice():
table = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
result = rowslice(table, 2)
expectation = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'))
ieq(expectation, result)
result = rowslice(table, 1, 2)
expectation = (('foo', 'bar', 'baz'),
('B', '2', '3.4'))
ieq(expectation, result)
result = rowslice(table, 1, 5, 2)
expectation = (('foo', 'bar', 'baz'),
('B', '2', '3.4'),
('D', 'xyz', 9.0))
ieq(expectation, result)
def test_rowslice_empty():
table = (('foo', 'bar'),)
expect = (('foo', 'bar'),)
actual = rowslice(table, 1, 2)
ieq(expect, actual)
def test_head():
table1 = (('foo', 'bar'),
('a', 1),
('b', 2),
('c', 5),
('d', 7),
('f', 42),
('f', 3),
('h', 90),
('k', 12),
('l', 77),
('q', 2))
table2 = head(table1, 4)
expect = (('foo', 'bar'),
('a', 1),
('b', 2),
('c', 5),
('d', 7))
ieq(expect, table2)
def test_tail():
table1 = (('foo', 'bar'),
('a', 1),
('b', 2),
('c', 5),
('d', 7),
('f', 42),
('f', 3),
('h', 90),
('k', 12),
('l', 77),
('q', 2))
table2 = tail(table1, 4)
expect = (('foo', 'bar'),
('h', 90),
('k', 12),
('l', 77),
('q', 2))
ieq(expect, table2)
def test_tail_empty():
table = (('foo', 'bar'),)
expect = (('foo', 'bar'),)
actual = tail(table)
ieq(expect, actual)
def test_skipcomments():
table1 = (('##aaa', 'bbb', 'ccc'),
('##mmm',),
('#foo', 'bar'),
('##nnn', 1),
('a', 1),
('b', 2))
table2 = skipcomments(table1, '##')
expect2 = (('#foo', 'bar'),
('a', 1),
('b', 2))
ieq(expect2, table2)
ieq(expect2, table2) # can iterate twice?
def test_skipcomments_empty():
table1 = (('##aaa', 'bbb', 'ccc'),
('##mmm',),
('#foo', 'bar'),
('##nnn', 1))
table2 = skipcomments(table1, '##')
expect2 = (('#foo', 'bar'),)
ieq(expect2, table2)
def test_annex():
table1 = (('foo', 'bar'),
('A', 9),
('C', 2),
('F', 1))
table2 = (('foo', 'baz'),
('B', 3),
('D', 10))
expect = (('foo', 'bar', 'foo', 'baz'),
('A', 9, 'B', 3),
('C', 2, 'D', 10),
('F', 1, None, None))
actual = annex(table1, table2)
ieq(expect, actual)
ieq(expect, actual)
expect21 = (('foo', 'baz', 'foo', 'bar'),
('B', 3, 'A', 9),
('D', 10, 'C', 2),
(None, None, 'F', 1))
actual21 = annex(table2, table1)
ieq(expect21, actual21)
ieq(expect21, actual21)
def test_annex_uneven_rows():
table1 = (('foo', 'bar'),
('A', 9, True),
('C', 2),
('F',))
table2 = (('foo', 'baz'),
('B', 3),
('D', 10))
expect = (('foo', 'bar', 'foo', 'baz'),
('A', 9, 'B', 3),
('C', 2, 'D', 10),
('F', None, None, None))
actual = annex(table1, table2)
ieq(expect, actual)
ieq(expect, actual)
def test_addrownumbers():
table1 = (('foo', 'bar'),
('A', 9),
('C', 2),
('F', 1))
expect = (('row', 'foo', 'bar'),
(1, 'A', 9),
(2, 'C', 2),
(3, 'F', 1))
actual = addrownumbers(table1)
ieq(expect, actual)
ieq(expect, actual)
def test_addcolumn():
table1 = (('foo', 'bar'),
('A', 1),
('B', 2))
col = [True, False]
expect2 = (('foo', 'bar', 'baz'),
('A', 1, True),
('B', 2, False))
table2 = addcolumn(table1, 'baz', col)
ieq(expect2, table2)
ieq(expect2, table2)
# test short column
table3 = (('foo', 'bar'),
('A', 1),
('B', 2),
('C', 2))
expect4 = (('foo', 'bar', 'baz'),
('A', 1, True),
('B', 2, False),
('C', 2, None))
table4 = addcolumn(table3, 'baz', col)
ieq(expect4, table4)
# test short table
col = [True, False, False]
expect5 = (('foo', 'bar', 'baz'),
('A', 1, True),
('B', 2, False),
(None, None, False))
table5 = addcolumn(table1, 'baz', col)
ieq(expect5, table5)
def test_empty_addcolumn():
table1 = empty()
table2 = addcolumn(table1, 'foo', ['A', 'B'])
table3 = addcolumn(table2, 'bar', [1, 2])
expect = (('foo', 'bar'),
('A', 1),
('B', 2))
ieq(expect, table3)
ieq(expect, table3)
def test_addfieldusingcontext():
table1 = (('foo', 'bar'),
('A', 1),
('B', 4),
('C', 5),
('D', 9))
expect = (('foo', 'bar', 'baz', 'quux'),
('A', 1, None, 3),
('B', 4, 3, 1),
('C', 5, 1, 4),
('D', 9, 4, None))
def upstream(prv, cur, nxt):
if prv is None:
return None
else:
return cur.bar - prv.bar
def downstream(prv, cur, nxt):
if nxt is None:
return None
else:
return nxt.bar - cur.bar
table2 = addfieldusingcontext(table1, 'baz', upstream)
table3 = addfieldusingcontext(table2, 'quux', downstream)
ieq(expect, table3)
ieq(expect, table3)
def test_addfieldusingcontext_stateful():
table1 = (('foo', 'bar'),
('A', 1),
('B', 4),
('C', 5),
('D', 9))
expect = (('foo', 'bar', 'baz', 'quux'),
('A', 1, 1, 5),
('B', 4, 5, 10),
('C', 5, 10, 19),
('D', 9, 19, 19))
def upstream(prv, cur, nxt):
if prv is None:
return cur.bar
else:
return cur.bar + prv.baz
def downstream(prv, cur, nxt):
if nxt is None:
return prv.quux
elif prv is None:
return nxt.bar + cur.bar
else:
return nxt.bar + prv.quux
table2 = addfieldusingcontext(table1, 'baz', upstream)
table3 = addfieldusingcontext(table2, 'quux', downstream)
ieq(expect, table3)
ieq(expect, table3)
def test_movefield():
table1 = (('foo', 'bar', 'baz'),
(1, 'A', True),
(2, 'B', False))
expect = (('bar', 'foo', 'baz'),
('A', 1, True),
('B', 2, False))
actual = movefield(table1, 'bar', 0)
ieq(expect, actual)
ieq(expect, actual)
actual = movefield(table1, 'foo', 1)
ieq(expect, actual)
ieq(expect, actual)
|
thatneat/petl
|
petl/test/transform/test_basics.py
|
Python
|
mit
| 17,148
|
#! /usr/bin/python2.7
import os
import re
import sys
# some static resources
vowels = set(('a','e','i','o','u','A','E','I','O','U'))
#vowel_re
consonant_re = re.compile(r'([bcdfghjklmnpqrstvwxyzBCDFGHJKLMNPQRSTVWXYZ]+)([a-zA-Z]*)(.*)?')
# input
original = sys.stdin.read()
# output
piglatin = []
# loop over the words and change them to pig latin
for word in original.split():
# there are different rules if it starts with a vowel
if word[0] in vowels:
piglatin.append(word+'way')
else:
piglatin.append(consonant_re.sub(r'\2\1ay\3', word))
# output the translated text
sys.stdout.write(' '.join(piglatin))
|
mouckatron/Martyr2MegaProjectList
|
text/piglatin.py
|
Python
|
mit
| 638
|
# ----------------------------------------------------------------------------------
# Electrum plugin for the Digital Bitbox hardware wallet by Shift Devices AG
# digitalbitbox.com
#
try:
import electrum_arg as electrum
from electrum_arg.bitcoin import TYPE_ADDRESS, var_int, msg_magic, Hash, verify_message, public_key_to_p2pkh, EncodeAES, DecodeAES
from electrum_arg.i18n import _
from electrum_arg.keystore import Hardware_KeyStore
from ..hw_wallet import HW_PluginBase
from electrum_arg.util import print_error
import time
import hid
import json
import math
import hashlib
from ecdsa.ecdsa import generator_secp256k1
from ecdsa.util import sigencode_der
DIGIBOX = True
except ImportError as e:
DIGIBOX = False
# ----------------------------------------------------------------------------------
# USB HID interface
#
class DigitalBitbox_Client():
def __init__(self, hidDevice):
self.dbb_hid = hidDevice
self.opened = True
self.password = None
self.isInitialized = False
self.setupRunning = False
self.hidBufSize = 4096
def close(self):
if self.opened:
try:
self.dbb_hid.close()
except:
pass
self.opened = False
def timeout(self, cutoff):
pass
def label(self):
return " "
def is_pairable(self):
return True
def is_initialized(self):
return self.dbb_has_password()
def is_paired(self):
return self.password is not None
def get_xpub(self, bip32_path):
if self.check_device_dialog():
msg = '{"xpub":"' + bip32_path + '"}'
reply = self.hid_send_encrypt(msg)
return reply['xpub']
return None
def dbb_has_password(self):
reply = self.hid_send_plain('{"ping":""}')
if 'ping' not in reply:
raise Exception('Device communication error. Please unplug and replug your Digital Bitbox.')
if reply['ping'] == 'password':
return True
return False
def stretch_key(self, key):
import pbkdf2, hmac
return pbkdf2.PBKDF2(key, 'Digital Bitbox', iterations = 20480, macmodule = hmac, digestmodule = hashlib.sha512).read(64).encode('hex')
def backup_password_dialog(self):
msg = _("Enter the password used when the backup was created:")
while True:
password = self.handler.get_passphrase(msg, False)
if password is None:
return None
if len(password) < 4:
msg = _("Password must have at least 4 characters.\r\n\r\nEnter password:")
elif len(password) > 64:
msg = _("Password must have less than 64 characters.\r\n\r\nEnter password:")
else:
return str(password)
def password_dialog(self, msg):
while True:
password = self.handler.get_passphrase(msg, False)
if password is None:
return False
if len(password) < 4:
msg = _("Password must have at least 4 characters.\r\n\r\nEnter password:")
elif len(password) > 64:
msg = _("Password must have less than 64 characters.\r\n\r\nEnter password:")
else:
self.password = str(password)
return True
def check_device_dialog(self):
# Set password if fresh device
if self.password is None and not self.dbb_has_password():
if not self.setupRunning:
return False # A fresh device cannot connect to an existing wallet
msg = _("An uninitialized Digital Bitbox is detected. " \
"Enter a new password below.\r\n\r\n REMEMBER THE PASSWORD!\r\n\r\n" \
"You cannot access your coins or a backup without the password.\r\n" \
"A backup is saved automatically when generating a new wallet.")
if self.password_dialog(msg):
reply = self.hid_send_plain('{"password":"' + self.password + '"}')
else:
return False
# Get password from user if not yet set
msg = _("Enter your Digital Bitbox password:")
while self.password is None:
if not self.password_dialog(msg):
return False
reply = self.hid_send_encrypt('{"led":"blink"}')
if 'error' in reply:
self.password = None
if reply['error']['code'] == 109:
msg = _("Incorrect password entered.\r\n\r\n" \
+ reply['error']['message'] + "\r\n\r\n" \
"Enter your Digital Bitbox password:")
else:
# Should never occur
msg = _("Unexpected error occurred.\r\n\r\n" \
+ reply['error']['message'] + "\r\n\r\n" \
"Enter your Digital Bitbox password:")
# Initialize device if not yet initialized
if not self.setupRunning:
self.isInitialized = True # Wallet exists. Electrum code later checks if the device matches the wallet
elif not self.isInitialized:
reply = self.hid_send_encrypt('{"device":"info"}')
if reply['device']['id'] <> "":
self.recover_or_erase_dialog() # Already seeded
else:
self.seed_device_dialog() # Seed if not initialized
return self.isInitialized
def recover_or_erase_dialog(self):
msg = _("The Digital Bitbox is already seeded. Choose an option:\n")
choices = [
(_("Create a wallet using the current seed")),
(_("Load a wallet from the micro SD card (the current seed is overwritten)")),
(_("Erase the Digital Bitbox"))
]
try:
reply = self.handler.win.query_choice(msg, choices)
except Exception:
return # Back button pushed
if reply == 2:
self.dbb_erase()
elif reply == 1:
if not self.dbb_load_backup():
return
else:
pass # Use existing seed
self.isInitialized = True
def seed_device_dialog(self):
msg = _("Choose how to initialize your Digital Bitbox:\n")
choices = [
(_("Generate a new random wallet")),
(_("Load a wallet from the micro SD card"))
]
try:
reply = self.handler.win.query_choice(msg, choices)
except Exception:
return # Back button pushed
if reply == 0:
self.dbb_generate_wallet()
else:
if not self.dbb_load_backup(show_msg=False):
return
self.isInitialized = True
def dbb_generate_wallet(self):
key = self.stretch_key(self.password)
filename = "Electrum-" + time.strftime("%Y-%m-%d-%H-%M-%S") + ".pdf"
msg = '{"seed":{"source": "create", "key": "%s", "filename": "%s", "entropy": "%s"}}' % (key, filename, 'Digital Bitbox Electrum Plugin')
reply = self.hid_send_encrypt(msg)
if 'error' in reply:
raise Exception(reply['error']['message'])
def dbb_erase(self):
self.handler.show_message(_("Are you sure you want to erase the Digital Bitbox?\r\n\r\n" \
"To continue, touch the Digital Bitbox's light for 3 seconds.\r\n\r\n" \
"To cancel, briefly touch the light or wait for the timeout."))
hid_reply = self.hid_send_encrypt('{"reset":"__ERASE__"}')
self.handler.clear_dialog()
if 'error' in hid_reply:
raise Exception(hid_reply['error']['message'])
else:
self.password = None
raise Exception('Device erased')
def dbb_load_backup(self, show_msg=True):
backups = self.hid_send_encrypt('{"backup":"list"}')
if 'error' in backups:
raise Exception(backups['error']['message'])
try:
f = self.handler.win.query_choice(_("Choose a backup file:"), backups['backup'])
except Exception:
return False # Back button pushed
key = self.backup_password_dialog()
if key is None:
raise Exception('Canceled by user')
key = self.stretch_key(key)
if show_msg:
self.handler.show_message(_("Loading backup...\r\n\r\n" \
"To continue, touch the Digital Bitbox's light for 3 seconds.\r\n\r\n" \
"To cancel, briefly touch the light or wait for the timeout."))
msg = '{"seed":{"source": "backup", "key": "%s", "filename": "%s"}}' % (key, backups['backup'][f])
hid_reply = self.hid_send_encrypt(msg)
self.handler.clear_dialog()
if 'error' in hid_reply:
raise Exception(hid_reply['error']['message'])
return True
def hid_send_plain(self, msg):
reply = ""
try:
self.dbb_hid.write('\0' + bytearray(msg) + '\0' * (self.hidBufSize - len(msg)))
r = []
while len(r) < self.hidBufSize:
r = r + self.dbb_hid.read(self.hidBufSize)
r = str(bytearray(r)).rstrip(' \t\r\n\0')
r = r.replace("\0", '')
reply = json.loads(r)
except Exception as e:
print_error('Exception caught ' + str(e))
return reply
def hid_send_encrypt(self, msg):
reply = ""
try:
secret = Hash(self.password)
msg = EncodeAES(secret, msg)
reply = self.hid_send_plain(msg)
if 'ciphertext' in reply:
reply = DecodeAES(secret, ''.join(reply["ciphertext"]))
reply = json.loads(reply)
if 'error' in reply:
self.password = None
except Exception as e:
print_error('Exception caught ' + str(e))
return reply
# ----------------------------------------------------------------------------------
#
#
class DigitalBitbox_KeyStore(Hardware_KeyStore):
hw_type = 'digitalbitbox'
device = 'DigitalBitbox'
def __init__(self, d):
Hardware_KeyStore.__init__(self, d)
self.force_watching_only = False
self.maxInputs = 14 # maximum inputs per single sign command
def get_derivation(self):
return str(self.derivation)
def give_error(self, message, clear_client = False):
if clear_client:
self.client = None
raise Exception(message)
def decrypt_message(self, pubkey, message, password):
raise RuntimeError(_('Encryption and decryption are currently not supported for %s') % self.device)
def sign_message(self, sequence, message, password):
sig = None
try:
inputPath = self.get_derivation() + "/%d/%d" % sequence
inputHash = Hash(msg_magic(message)).encode('hex')
hasharray = []
hasharray.append({'hash': inputHash, 'keypath': inputPath})
hasharray = json.dumps(hasharray)
msg = '{"sign":{"meta":"sign message", "data":%s}}' % (hasharray)
dbb_client = self.plugin.get_client(self)
if not dbb_client.is_paired():
raise Exception("Could not sign message.")
reply = dbb_client.hid_send_encrypt(msg)
self.handler.show_message(_("Signing message ...\r\n\r\n" \
"To continue, touch the Digital Bitbox's blinking light for 3 seconds.\r\n\r\n" \
"To cancel, briefly touch the blinking light or wait for the timeout."))
reply = dbb_client.hid_send_encrypt(msg) # Send twice, first returns an echo for smart verification (not implemented)
self.handler.clear_dialog()
if 'error' in reply:
raise Exception(reply['error']['message'])
if 'sign' not in reply:
raise Exception("Could not sign message.")
for i in range(4):
sig = chr(27 + i + 4) + reply['sign'][0]['sig'].decode('hex')
try:
addr = public_key_to_p2pkh(reply['sign'][0]['pubkey'].decode('hex'))
if verify_message(addr, sig, message):
break
except Exception:
continue
else:
raise Exception("Could not sign message")
except BaseException as e:
self.give_error(e)
return sig
def sign_transaction(self, tx, password):
if tx.is_complete():
return
try:
p2shTransaction = False
derivations = self.get_tx_derivations(tx)
hasharray = []
pubkeyarray = []
# Build hasharray from inputs
for i, txin in enumerate(tx.inputs()):
if txin['type'] == 'coinbase':
self.give_error("Coinbase not supported") # should never happen
if txin['type'] in ['p2sh']:
p2shTransaction = True
for x_pubkey in txin['x_pubkeys']:
if x_pubkey in derivations:
index = derivations.get(x_pubkey)
inputPath = "%s/%d/%d" % (self.get_derivation(), index[0], index[1])
inputHash = Hash(tx.serialize_preimage(i).decode('hex')).encode('hex')
hasharray_i = {'hash': inputHash, 'keypath': inputPath}
hasharray.append(hasharray_i)
break
else:
self.give_error("No matching x_key for sign_transaction") # should never happen
# Sanity check
if p2shTransaction:
for txinput in tx.inputs():
if txinput['type'] != 'p2sh':
self.give_error("P2SH / regular input mixed in same transaction not supported") # should never happen
# Build pubkeyarray from outputs (unused because echo for smart verification not implemented)
if not p2shTransaction:
for _type, address, amount in tx.outputs():
assert _type == TYPE_ADDRESS
info = tx.output_info.get(address)
if info is not None:
index, xpubs, m = info
changePath = self.get_derivation() + "/%d/%d" % index
changePubkey = self.derive_pubkey(index[0], index[1])
pubkeyarray_i = {'pubkey': changePubkey, 'keypath': changePath}
pubkeyarray.append(pubkeyarray_i)
# Build sign command
dbb_signatures = []
steps = math.ceil(1.0 * len(hasharray) / self.maxInputs)
for step in range(int(steps)):
hashes = hasharray[step * self.maxInputs : (step + 1) * self.maxInputs]
msg = '{"sign": {"meta":"%s", "data":%s, "checkpub":%s} }' % \
(Hash(tx.serialize()).encode('hex'), json.dumps(hashes), json.dumps(pubkeyarray))
dbb_client = self.plugin.get_client(self)
if not dbb_client.is_paired():
raise Exception("Could not sign transaction.")
reply = dbb_client.hid_send_encrypt(msg)
if 'error' in reply:
raise Exception(reply['error']['message'])
if 'echo' not in reply:
raise Exception("Could not sign transaction.")
if steps > 1:
self.handler.show_message(_("Signing large transaction. Please be patient ...\r\n\r\n" \
"To continue, touch the Digital Bitbox's blinking light for 3 seconds. " \
"(Touch " + str(step + 1) + " of " + str(int(steps)) + ")\r\n\r\n" \
"To cancel, briefly touch the blinking light or wait for the timeout.\r\n\r\n"))
else:
self.handler.show_message(_("Signing transaction ...\r\n\r\n" \
"To continue, touch the Digital Bitbox's blinking light for 3 seconds.\r\n\r\n" \
"To cancel, briefly touch the blinking light or wait for the timeout."))
reply = dbb_client.hid_send_encrypt(msg) # Send twice, first returns an echo for smart verification (not implemented)
self.handler.clear_dialog()
if 'error' in reply:
raise Exception(reply['error']['message'])
if 'sign' not in reply:
raise Exception("Could not sign transaction.")
dbb_signatures.extend(reply['sign'])
# Fill signatures
if len(dbb_signatures) <> len(tx.inputs()):
raise Exception("Incorrect number of transactions signed.") # Should never occur
for i, txin in enumerate(tx.inputs()):
num = txin['num_sig']
for pubkey in txin['pubkeys']:
signatures = filter(None, txin['signatures'])
if len(signatures) == num:
break # txin is complete
ii = txin['pubkeys'].index(pubkey)
signed = dbb_signatures[i]
if signed['pubkey'] != pubkey:
continue
sig_r = int(signed['sig'][:64], 16)
sig_s = int(signed['sig'][64:], 16)
sig = sigencode_der(sig_r, sig_s, generator_secp256k1.order())
txin['signatures'][ii] = sig.encode('hex')
tx._inputs[i] = txin
except BaseException as e:
self.give_error(e, True)
else:
print_error("Transaction is_complete", tx.is_complete())
tx.raw = tx.serialize()
class DigitalBitboxPlugin(HW_PluginBase):
libraries_available = DIGIBOX
keystore_class = DigitalBitbox_KeyStore
client = None
DEVICE_IDS = [
(0x03eb, 0x2402) # Digital Bitbox
]
def __init__(self, parent, config, name):
HW_PluginBase.__init__(self, parent, config, name)
if self.libraries_available:
self.device_manager().register_devices(self.DEVICE_IDS)
def get_dbb_device(self, device):
dev = hid.device()
dev.open_path(device.path)
return dev
def create_client(self, device, handler):
self.handler = handler
client = self.get_dbb_device(device)
if client <> None:
client = DigitalBitbox_Client(client)
return client
def setup_device(self, device_info, wizard):
devmgr = self.device_manager()
device_id = device_info.device.id_
client = devmgr.client_by_id(device_id)
client.handler = self.create_handler(wizard)
client.setupRunning = True
client.get_xpub("m/44'/0'")
def get_xpub(self, device_id, derivation, wizard):
devmgr = self.device_manager()
client = devmgr.client_by_id(device_id)
client.handler = self.create_handler(wizard)
client.check_device_dialog()
xpub = client.get_xpub(derivation)
return xpub
def get_client(self, keystore, force_pair=True):
devmgr = self.device_manager()
handler = keystore.handler
with devmgr.hid_lock:
client = devmgr.client_for_keystore(self, handler, keystore, force_pair)
if client <> None:
client.check_device_dialog()
return client
|
argentumproject/electrum-arg
|
plugins/digitalbitbox/digitalbitbox.py
|
Python
|
mit
| 20,399
|
# coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import argparse
import os
import simplejson as json
import grpc
from google.protobuf.json_format import MessageToJson
from qrl.core import config
from qrl.core.AddressState import AddressState
from qrl.crypto.xmss import XMSS
from qrl.core.txs.Transaction import Transaction
from qrl.core.txs.TransferTransaction import TransferTransaction
from pyqrllib.pyqrllib import hstr2bin, bin2hstr
from qrl.generated import qrl_pb2_grpc, qrl_pb2, qrlmining_pb2, qrlmining_pb2_grpc
from flask import Flask, Response, request
from jsonrpc.backend.flask import api
app = Flask(__name__)
def read_slaves(slaves_filename):
with open(slaves_filename, 'r') as f:
slave_data = json.load(f)
slave_data[0] = bytes(hstr2bin(slave_data[0]))
return slave_data
def get_addr_state(addr: bytes) -> AddressState:
stub = get_public_stub()
response = stub.GetAddressState(request=qrl_pb2.GetAddressStateReq(address=addr))
return AddressState(response.state)
def set_unused_ots_key(xmss, addr_state, start=0):
for i in range(start, 2 ** xmss.height):
if not addr_state.ots_key_reuse(i):
xmss.set_ots_index(i)
return True
return False
def valid_payment_permission(public_stub, master_address_state, payment_xmss, json_slave_txn):
access_type = master_address_state.get_slave_permission(payment_xmss.pk)
if access_type == -1:
tx = Transaction.from_json(json_slave_txn)
public_stub.PushTransaction(request=qrl_pb2.PushTransactionReq(transaction_signed=tx.pbdata))
return None
if access_type == 0:
return True
return False
def get_unused_payment_xmss(public_stub):
global payment_slaves
global payment_xmss
master_address = payment_slaves[0]
master_address_state = get_addr_state(master_address)
if payment_xmss:
addr_state = get_addr_state(payment_xmss.address)
if set_unused_ots_key(payment_xmss, addr_state, payment_xmss.ots_index):
if valid_payment_permission(public_stub, master_address_state, payment_xmss, payment_slaves[2]):
return payment_xmss
else:
payment_xmss = None
if not payment_xmss:
unused_ots_found = False
for slave_seed in payment_slaves[1]:
xmss = XMSS.from_extended_seed(slave_seed)
addr_state = get_addr_state(xmss.address)
if set_unused_ots_key(xmss, addr_state): # Unused ots_key_found
payment_xmss = xmss
unused_ots_found = True
break
if not unused_ots_found: # Unused ots_key_found
return None
if not valid_payment_permission(public_stub, master_address_state, payment_xmss, payment_slaves[2]):
return None
return payment_xmss
@app.route('/api/<api_method_name>')
def api_proxy(api_method_name):
"""
Proxy JSON RPC requests to the gRPC server as well as converts back gRPC response
to JSON.
:param api_method_name:
:return:
"""
stub = qrl_pb2_grpc.PublicAPIStub(grpc.insecure_channel('{}:{}'.format(config.user.public_api_host,
config.user.public_api_port)))
public_api = qrl_pb2.DESCRIPTOR.services_by_name['PublicAPI']
api_method = public_api.FindMethodByName(api_method_name)
api_request = getattr(qrl_pb2, api_method.input_type.name)()
for arg in request.args:
if arg not in api_method.input_type.fields_by_name:
raise Exception('Invalid args %s', arg)
data_type = type(getattr(api_request, arg))
if data_type == bool and request.args[arg].lower() == 'false':
continue
value = data_type(request.args.get(arg, type=data_type))
setattr(api_request, arg, value)
resp = getattr(stub, api_method_name)(api_request, timeout=10)
return Response(response=MessageToJson(resp, sort_keys=True), status=200, mimetype='application/json')
def get_mining_stub():
global mining_stub
return mining_stub
def get_public_stub():
global public_stub
return public_stub
@api.dispatcher.add_method
def getlastblockheader(height=0):
stub = get_mining_stub()
request = qrlmining_pb2.GetLastBlockHeaderReq(height=height)
grpc_response = stub.GetLastBlockHeader(request=request, timeout=10)
block_header = {
'difficulty': grpc_response.difficulty,
'height': grpc_response.height,
'timestamp': grpc_response.timestamp,
'reward': grpc_response.reward,
'hash': grpc_response.hash,
'depth': grpc_response.depth
}
resp = {
"block_header": block_header,
"status": "OK"
}
return resp
@api.dispatcher.add_method
def getblockheaderbyheight(height):
return getlastblockheader(height)
@api.dispatcher.add_method
def getblocktemplate(reserve_size, wallet_address):
stub = get_mining_stub()
request = qrlmining_pb2.GetBlockToMineReq(wallet_address=wallet_address.encode())
grpc_response = stub.GetBlockToMine(request=request, timeout=10)
resp = {
'blocktemplate_blob': grpc_response.blocktemplate_blob,
'difficulty': grpc_response.difficulty,
'height': grpc_response.height,
'reserved_offset': grpc_response.reserved_offset,
'seed_hash': grpc_response.seed_hash,
'status': 'OK'
}
return resp
@api.dispatcher.add_method
def submitblock(blob):
stub = get_mining_stub()
request = qrlmining_pb2.SubmitMinedBlockReq(blob=bytes(hstr2bin(blob)))
response = stub.SubmitMinedBlock(request=request, timeout=10)
if response.error:
raise Exception # Mining pool expected exception when block submission fails
return MessageToJson(response, sort_keys=True)
@api.dispatcher.add_method
def getblockminingcompatible(height):
stub = get_mining_stub()
request = qrlmining_pb2.GetBlockMiningCompatibleReq(height=height)
response = stub.GetBlockMiningCompatible(request=request, timeout=10)
return MessageToJson(response, sort_keys=True)
@api.dispatcher.add_method
def transfer(destinations, fee, mixin, unlock_time):
if len(destinations) > config.dev.transaction_multi_output_limit:
raise Exception('Payment Failed: Amount exceeds the allowed limit')
addrs_to = []
amounts = []
for tx in destinations:
addrs_to.append(bytes(hstr2bin(tx['address'][1:]))) # Skipping 'Q'
amounts.append(tx['amount'])
stub = get_public_stub()
xmss = get_unused_payment_xmss(stub)
if not xmss:
raise Exception('Payment Failed: No Unused Payment XMSS found')
tx = TransferTransaction.create(addrs_to=addrs_to,
amounts=amounts,
message_data=None,
fee=fee,
xmss_pk=xmss.pk,
master_addr=payment_slaves[0])
tx.sign(xmss)
response = stub.PushTransaction(request=qrl_pb2.PushTransactionReq(transaction_signed=tx.pbdata))
if response.error_code != 3:
raise Exception('Transaction Submission Failed, Response Code: %s', response.error_code)
response = {'tx_hash': bin2hstr(tx.txhash)}
return response
app.add_url_rule('/json_rpc', 'api', api.as_view(), methods=['POST'])
def parse_arguments():
parser = argparse.ArgumentParser(description='QRL node')
parser.add_argument('--qrldir', '-d', dest='qrl_dir', default=config.user.qrl_dir,
help="Use a different directory for node data/configuration")
parser.add_argument('--network-type', dest='network_type', choices=['mainnet', 'testnet'],
default='mainnet', required=False, help="Runs QRL Testnet Node")
return parser.parse_args()
def main():
args = parse_arguments()
qrl_dir_post_fix = ''
copy_files = []
if args.network_type == 'testnet':
qrl_dir_post_fix = '-testnet'
package_directory = os.path.dirname(os.path.abspath(__file__))
copy_files.append(os.path.join(package_directory, 'network/testnet/genesis.yml'))
copy_files.append(os.path.join(package_directory, 'network/testnet/config.yml'))
config.user.qrl_dir = os.path.expanduser(os.path.normpath(args.qrl_dir) + qrl_dir_post_fix)
config.create_path(config.user.qrl_dir, copy_files)
config.user.load_yaml(config.user.config_path)
global payment_slaves, payment_xmss
global mining_stub, public_stub
mining_stub = qrlmining_pb2_grpc.MiningAPIStub(grpc.insecure_channel('{0}:{1}'.format(config.user.mining_api_host,
config.user.mining_api_port)))
public_stub = qrl_pb2_grpc.PublicAPIStub(grpc.insecure_channel('{0}:{1}'.format(config.user.public_api_host,
config.user.public_api_port)))
payment_xmss = None
payment_slaves = read_slaves(config.user.mining_pool_payment_wallet_path)
app.run(host=config.user.grpc_proxy_host, port=config.user.grpc_proxy_port)
if __name__ == '__main__':
main()
|
cyyber/QRL
|
src/qrl/grpcProxy.py
|
Python
|
mit
| 9,400
|