code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2010, Jesse Liesch
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE IMPLIED
# DISCLAIMED. IN NO EVENT SHALL JESSE LIESCH BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import adodb
import sys
import os, os.path
import threading
try:
# Server
from pysqlite2 import dbapi2 as sqlite
except:
import sqlite3 as sqlite
class Db:
def __init__(self, name, host="", user="", password=""):
self.name = name
self.host = host
self.user = user
self.password = password
self.conns = {}
self.connParams = {}
self.transactionDepth = 0
self.lastQuery = False
def close(self):
self.getConn().close()
def getConnParam(self):
id = threading.currentThread().getName()
# Make sure we have a connection
if not id in self.conns:
self.getConn()
return self.connParams[id]
def getConn(self):
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def boolAdapter(b):
if b:
return 'True'
else:
return 'False'
# Return connection for current thread
id = threading.currentThread().getName()
if not id in self.conns:
sqlite.register_adapter(bool, boolAdapter)
conn = sqlite.connect(self.name, timeout=30, isolation_level=None)
conn.row_factory = dict_factory
self.connParams[id] = "?"
self.conns[id] = conn
return self.conns[id]
def getMysqlConn(self):
# Return connection for current thread
id = threading.currentThread().getName()
if not id in self.conns:
conn = adodb.NewADOConnection('mysql')
conn.Connect(self.host, self.user, self.password, self.name)
# Account for differently named functions
if not "execute" in dir(conn) and "Execute" in dir(conn):
conn.execute = conn.Execute
self.connParams[id] = "%s"
self.conns[id] = conn
return self.conns[id]
def checkTable(self, name, fields, index=[], unique=[]):
# First create empty table
try:
createString = "create table if not exists " + name + "("
first = True
for f in fields:
if first:
first = False
else:
createString += ", "
createString += f["name"] + " " + f["type"]
createString += ")"
cursor = self.getConn().execute(createString)
except Exception, e:
print e
pass
meta = self.getConn().execute('select * from ' + name).description
# Check for new fields
for i in range(len(fields)):
f = fields[i]
found = False
for m in meta:
if f["name"] == m[0]:
found = True
break
if not found:
# Create field
alterString = "alter table " + name + " add column " + f["name"] + " " + f["type"]
self.getConn().execute(alterString)
# Build index
for i in index:
s = "create index if not exists " + i["name"] + " on " + name + "("
first = True
for col in i["cols"]:
if first:
first = False
else:
s += ", "
s += col
s += ")"
self.getConn().execute(s)
# Build unique index
for i in unique:
s = "create unique index if not exists " + i["name"] + " on " + name + "("
first = True
for col in i["cols"]:
if first:
first = False
else:
s += ", "
s += col
s += ")"
self.getConn().execute(s)
def query(self, queryStr, tuple=False, reRaiseException=False):
reRaiseException = True
try:
if tuple:
self.lastQuery = "%s %s" % (queryStr, tuple)
return self.getConn().execute(queryStr, tuple)
else:
self.lastQuery = queryStr
return self.getConn().execute(queryStr)
except Exception, e:
if reRaiseException:
raise
# Show Error Here
# Return empty string
return self.getConn().execute("select 0 where 1 = 0")
def delete(self, table, where=False):
deleteStr = "delete from " + table
deleteTuple = []
if where:
deleteStr += " where "
first = True
for key in where:
if first:
first = False
else:
deleteStr += " and "
deleteStr += key + "=" + self.getConnParam()
deleteTuple.append(where[key])
self.query(deleteStr, deleteTuple)
def select(self, table, orderBy=False, where=False, limit=False, what=False):
selectStr = "select "
if what:
selectStr += what
else:
selectStr += "*"
selectStr += " from " + table
selectTuple = []
# TODO: make sure key is not bad
if where:
selectStr += " where "
first = True
for key in where.keys():
if first:
first = False
else:
selectStr += " and "
if where[key] == "is null" or where[key] == "is not null":
selectStr += key + " " + where[key]
else:
selectStr += key
if key.find("=") == -1 and key.find(">") == -1 and key.find("<") == -1:
selectStr += "=" + self.getConnParam()
else:
selectStr += "" + self.getConnParam()
selectTuple.append(where[key])
if orderBy:
selectStr += " order by " + orderBy
if limit:
selectStr += " limit " + str(limit)
return self.query(selectStr, selectTuple)
def insert(self, table, data):
insertStr = "insert into " + table + " ("
insertTuple = []
first = True
for key in data.keys():
if first:
first = False
else:
insertStr += ", "
insertStr += key
insertStr += ") values ("
first = True
for i in data.keys():
if first:
first = False
insertStr += self.getConnParam()
else:
insertStr += ", " + self.getConnParam()
insertTuple.append(data[i])
insertStr += ")"
return self.query(insertStr, insertTuple)
def update(self, table, data, where):
updateStr = "update " + table + " set "
updateTuple = []
# TODO: make sure key is not bad
first = True
for key in data.keys():
if first:
first = False
else:
updateStr += ", "
updateStr += key + "=" + self.getConnParam()
updateTuple.append(data[key])
updateStr += " where "
first = True
for key in where.keys():
if first:
first = False
else:
updateStr += " and "
if where[key] == "is null" or where[key] == "is not null":
updateStr += key + " " + where[key]
else:
updateStr += key + "=" + self.getConnParam()
updateTuple.append(where[key])
return self.query(updateStr, updateTuple)
# Return true on insert, false on update
def insertOrUpdate(self, table, data, on={}):
if not on:
# If on is empty insert if data is not find
result = self.select(table, where=data)
if not result.fetchone():
self.insert(table, data)
return True
else:
# Select by on. If found and different, update.
# If found and the same do nothing
# If not found insert.
result = self.select(table, where=on)
# TODO: handle update
if not result.fetchone():
self.insert(table, data)
return True
else:
self.update(table, data, on)
return False
def inTransaction(self):
return self.transactionDepth > 0
def beginTransaction(self):
self.transactionDepth += 1
if self.transactionDepth == 1:
self.getConn().execute("begin immediate transaction")
#if self.transactionDepth == 1:
# print "DB begin transaction"
def rollbackTransaction(self):
self.transactionDepth = 0
try:
self.getConn().execute("rollback transaction")
except Exception, e:
print e
#print "DB rollback transaction"
def commitTransaction(self):
if self.transactionDepth == 1:
self.transactionDepth = 0
self.getConn().execute("commit transaction")
if self.transactionDepth >= 1:
self.transactionDepth -= 1
#if self.transactionDepth == 0:
# print "DB committed transaction"
#else:
# print "DB reduce transaction depth"
|
Sir-Henry-Curtis/XBMC_Remote
|
XBMC/service.ir.remote/resources/library/db.py
|
Python
|
gpl-3.0
| 11,087
|
import os
import pathlib
import urllib.request
from collections import namedtuple
from typing import Optional
from lxml import etree
MATERIALSDBINDEXURL = "http://www.materialsdb.org/download/ProducerIndex.xml"
def get_cache_folder():
cache_dir = pathlib.Path(
os.environ.get("APPDATA")
or os.environ.get("XDG_CACHE_HOME")
or pathlib.Path.home() / ".cache"
).joinpath(
"materialsdb",
)
pathlib.Path(cache_dir).mkdir(parents=True, exist_ok=True)
return cache_dir
def get_cached_index_path() -> pathlib.Path:
return get_cache_folder() / "ProducerIndex.xml"
def parse_cached_index() -> etree._ElementTree:
path = get_cached_index_path()
if path.exists():
return etree.parse(str(path))
root = etree.Element("root")
return etree.ElementTree(root)
def get_by_id(root: etree._Element, id: str) -> Optional[etree._Element]:
for company in root:
if company.get("id") == id:
return company
return None
def require_update(cached_company, company) -> bool:
if cached_company is None:
return True
for attrib in ["LastKnownDate", "KnownVersion"]:
if company.get(attrib) > cached_company.get(attrib):
return True
return False
def get_producers_dir() -> pathlib.Path:
producer_path = get_cache_folder().joinpath("Producers")
pathlib.Path(producer_path).mkdir(parents=True, exist_ok=True)
return producer_path
def update_producers_data():
cached_index = parse_cached_index()
cached_root = cached_index.getroot()
new_index = etree.parse(MATERIALSDBINDEXURL)
new_root = new_index.getroot()
producers_dir = get_producers_dir()
has_index_update = False
existing = []
updated = []
deleted = []
for company in new_root:
cached_producer = get_by_id(cached_root, company.get("id"))
producer_path = producers_dir / pathlib.Path(company.get("href")).name
if not require_update(cached_producer, company) and producer_path.exists():
existing.append(producer_path)
continue
if cached_producer:
cached_path = producers_dir / pathlib.Path(cached_producer.get("href")).name
deleted.append(cached_path)
cached_path.unlink(True)
has_index_update = True
urllib.request.urlretrieve(company.get("href"), producer_path)
updated.append(producer_path)
if has_index_update:
new_index.write(str(get_cached_index_path()))
Report = namedtuple("Report", ["existing", "updated", "deleted"])
return Report(existing, updated, deleted)
def producers():
for producer in get_producers_dir().iterdir():
if producer.suffix.lower() == ".xml":
yield producer
def main():
update_producers_data()
if __name__ == "__main__":
main()
|
CyrilWaechter/pyRevitMEP
|
lib/materialsdb/cache.py
|
Python
|
gpl-3.0
| 2,862
|
#!/usr/bin/python
#
# This file is part of LibQtTracker project
#
# Copyright (C) 2009, Nokia
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; see the file COPYING.LIB. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
#
# Author: Iridian Kiiskinen <ext-iridian.kiiskinen at nokia.com>
import ConfigParser, os
import sys
import getopt
from xml.sax import saxutils
from rdf_namespaces import *
# In addition, you might want to add commonly used namespaces to namespaces list
def usage():
print "Usage: python service2rdfxml.py --ontology=ONTOLOGY --metadata=ONTOLOGY.metadata --service=ONTOLOGY.service [--uri=URI] [--namespace=ns=URI ] [--verbose]"
def main():
try:
default_ns = ""
metadatafs = list()
servicefs = list()
verbosity = 1
ontology = ""
opts, args = getopt.getopt(sys.argv[1:], "hvu:vo:va:vm:vs:vn:v", ["help", "verbose", "uri=", "ontology=", "metadata=", "service=", "namespace="])
for o, a in opts:
if o in ("-u", "--uri"):
default_ns = a
elif o in ("-o", "--ontology"):
ontology = a
elif o in ("-m", "--metadata"):
metadatafs.append(a)
elif o in ("-s", "--service"):
servicefs.append(a)
elif o in ("-n", "--namespace"):
namespaces[a.split("=",1)[0]] = a.split("=",1)[1]
elif o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-v", "--verbose"):
verbosity += 1
if default_ns == "":
default_ns = "http://www.tracker-project.org/ontologies/tracker#"
if not len(metadatafs) and not len(servicefs):
usage()
sys.exit()
print "<rdf:RDF"
for ns, url in namespaces.iteritems():
print " xmlns:" + ns + "=\"" + url + "\""
print ">"
print ""
registered_klasses = {}
for servicef in servicefs:
service = ConfigParser.ConfigParser()
service.readfp(open(servicef))
for klass in service.sections():
try:
klass_uri, prefix, suffix = namespaceItem(klass, default_ns);
except:
sys.stderr.write("Dropping a class with unrecognized namespace: " + klass + "\n")
continue
registered_klasses[klass_uri] = True
print "\t<rdfs:Class rdf:about=\"" + klass_uri + "\">"
print "\t\t<rdfs:label>" + suffix + "</rdfs:label>"
for name, value in service.items(klass):
if name == "superclasses":
superclasses = value.split (";")
for superclass in superclasses:
if len(superclass.strip()):
print "\t\t<rdfs:subClassOf>"
print "\t\t\t<rdfs:Class rdf:about=\"" + namespaceItem(superclass, default_ns)[0] + "\"/>"
print "\t\t</rdfs:subClassOf>"
print "\t</rdfs:Class>"
for metadataf in metadatafs:
metadata = ConfigParser.ConfigParser()
metadata.readfp(open(metadataf))
for mdata in metadata.sections():
try:
property_uri, mns, mfragment = namespaceItem(mdata, default_ns)
except:
if verbosity >= 2:
sys.stderr.write("Dropping a property with unrecognized namespace: " + mdata + "\n")
continue
nodes = []
domain = None
multiplevalues = False
for name, value in metadata.items(mdata):
if name == "displayname":
None
elif name == "multiplevalues" and value == "true":
multiplevalues = True
elif name == "datatype":
if value != "resource": # TODO: remove temp hack, 0.2.4 specific code
nodes.append("\t\t<rdfs:range rdf:resource=\"" + namespaceItem(value, default_ns)[0] + "\"/>")
elif name == "domain":
domain = namespaceItem(value, default_ns)[0]
nodes.append("\t\t<rdfs:domain rdf:resource=\"" + domain + "\"/>")
elif name == "parent":
nodes.append("\t\t<rdfs:subPropertyOf rdf:resource=\"" + namespaceItem(value, default_ns)[0] + "\"/>")
elif name == "multiplevalues" and value == "true":
multiplevalues = True
elif name == "superproperties":
superproperties = value.split (";")
for superproperty in superproperties:
if len(superproperty):
nodes.append("\t\t<rdfs:subPropertyOf rdf:resource=\"" + namespaceItem(superproperty, default_ns)[0] + "\"/>")
else:
nodes.append("\t\t<rdfs:comment>" + name + ": " + saxutils.escape(value) + "</rdfs:comment>")
if domain in registered_klasses or namespaceOfItem(property_uri)[0] == ontology:
print "\t<rdf:Property rdf:about=\"" + property_uri + "\">"
print "\t\t<rdfs:label>" + mfragment + "</rdfs:label>"
for node in nodes:
print node
if not multiplevalues:
print "\t\t<nrl:MaxCardinality>1</nrl:MaxCardinality>"
print "\t</rdf:Property>"
print "</rdf:RDF>"
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
if __name__ == "__main__":
main()
|
dudochkin-victor/libqttracker
|
tools/build/detail/service2rdfxml.py
|
Python
|
lgpl-2.1
| 6,867
|
_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py'
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
backbone=dict(
type='ResNeSt',
stem_channels=64,
depth=50,
radix=2,
reduction_factor=4,
avg_down_stride=True,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=norm_cfg,
norm_eval=False,
style='pytorch',
init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')),
roi_head=dict(
bbox_head=dict(
type='Shared4Conv1FCBBoxHead',
conv_out_channels=256,
norm_cfg=norm_cfg)))
# # use ResNeSt img_norm
img_norm_cfg = dict(
mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='LoadAnnotations',
with_bbox=True,
with_mask=False,
poly2mask=False),
dict(
type='Resize',
img_scale=[(1333, 640), (1333, 800)],
multiscale_mode='range',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
train=dict(pipeline=train_pipeline),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline))
|
open-mmlab/mmdetection
|
configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py
|
Python
|
apache-2.0
| 1,947
|
# coding=utf-8
# pylint: disable-msg=E1101,W0612
import numpy as np
import pytest
import pandas._libs.lib as lib
import pandas as pd
import pandas.util.testing as tm
from .common import TestData
class TestSeriesReplace(TestData):
def test_replace(self):
N = 100
ser = pd.Series(np.random.randn(N))
ser[0:4] = np.nan
ser[6:10] = 0
# replace list with a single value
ser.replace([np.nan], -1, inplace=True)
exp = ser.fillna(-1)
tm.assert_series_equal(ser, exp)
rs = ser.replace(0., np.nan)
ser[ser == 0.] = np.nan
tm.assert_series_equal(rs, ser)
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
# replace list with a single value
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
# replace with different values
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
# replace with different values with 2 lists
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
# replace inplace
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
ser = pd.Series([np.nan, 0, np.inf])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
ser = pd.Series([np.nan, 0, 'foo', 'bar', np.inf, None, lib.NaT])
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
filled = ser.copy()
filled[4] = 0
tm.assert_series_equal(ser.replace(np.inf, 0), filled)
ser = pd.Series(self.ts.index)
tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
# malformed
pytest.raises(ValueError, ser.replace, [1, 2, 3], [np.nan, 0])
# make sure that we aren't just masking a TypeError because bools don't
# implement indexing
with tm.assert_raises_regex(TypeError, 'Cannot compare types .+'):
ser.replace([1, 2], [np.nan, 0])
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])
tm.assert_series_equal(result, pd.Series([4, 3, 2, 1, 0]))
def test_replace_gh5319(self):
# API change from 0.12?
# GH 5319
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace([np.nan])
tm.assert_series_equal(result, expected)
ser = pd.Series([0, np.nan, 2, 3, 4])
expected = ser.ffill()
result = ser.replace(np.nan)
tm.assert_series_equal(result, expected)
# GH 5797
ser = pd.Series(pd.date_range('20130101', periods=5))
expected = ser.copy()
expected.loc[2] = pd.Timestamp('20120101')
result = ser.replace({pd.Timestamp('20130103'):
pd.Timestamp('20120101')})
tm.assert_series_equal(result, expected)
result = ser.replace(pd.Timestamp('20130103'),
pd.Timestamp('20120101'))
tm.assert_series_equal(result, expected)
# GH 11792: Test with replacing NaT in a list with tz data
ts = pd.Timestamp('2015/01/01', tz='UTC')
s = pd.Series([pd.NaT, pd.Timestamp('2015/01/01', tz='UTC')])
result = s.replace([np.nan, pd.NaT], pd.Timestamp.min)
expected = pd.Series([pd.Timestamp.min, ts], dtype=object)
tm.assert_series_equal(expected, result)
def test_replace_with_single_list(self):
ser = pd.Series([0, 1, 2, 3, 4])
result = ser.replace([1, 2, 3])
tm.assert_series_equal(result, pd.Series([0, 0, 0, 0, 4]))
s = ser.copy()
s.replace([1, 2, 3], inplace=True)
tm.assert_series_equal(s, pd.Series([0, 0, 0, 0, 4]))
# make sure things don't get corrupted when fillna call fails
s = ser.copy()
with pytest.raises(ValueError):
s.replace([1, 2, 3], inplace=True, method='crash_cymbal')
tm.assert_series_equal(s, ser)
def test_replace_with_empty_list(self):
# GH 21977
s = pd.Series([[1], [2, 3], [], np.nan, [4]])
expected = s
result = s.replace([], np.nan)
tm.assert_series_equal(result, expected)
# GH 19266
with tm.assert_raises_regex(ValueError, "cannot assign mismatch"):
s.replace({np.nan: []})
with tm.assert_raises_regex(ValueError, "cannot assign mismatch"):
s.replace({np.nan: ['dummy', 'alt']})
def test_replace_mixed_types(self):
s = pd.Series(np.arange(5), dtype='int64')
def check_replace(to_rep, val, expected):
sc = s.copy()
r = s.replace(to_rep, val)
sc.replace(to_rep, val, inplace=True)
tm.assert_series_equal(expected, r)
tm.assert_series_equal(expected, sc)
# MUST upcast to float
e = pd.Series([0., 1., 2., 3., 4.])
tr, v = [3], [3.0]
check_replace(tr, v, e)
# MUST upcast to float
e = pd.Series([0, 1, 2, 3.5, 4])
tr, v = [3], [3.5]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, 'a'])
tr, v = [3, 4], [3.5, 'a']
check_replace(tr, v, e)
# again casts to object
e = pd.Series([0, 1, 2, 3.5, pd.Timestamp('20130101')])
tr, v = [3, 4], [3.5, pd.Timestamp('20130101')]
check_replace(tr, v, e)
# casts to object
e = pd.Series([0, 1, 2, 3.5, True], dtype='object')
tr, v = [3, 4], [3.5, True]
check_replace(tr, v, e)
# test an object with dates + floats + integers + strings
dr = pd.date_range('1/1/2001', '1/10/2001',
freq='D').to_series().reset_index(drop=True)
result = dr.astype(object).replace(
[dr[0], dr[1], dr[2]], [1.0, 2, 'a'])
expected = pd.Series([1.0, 2, 'a'] + dr[3:].tolist(), dtype=object)
tm.assert_series_equal(result, expected)
def test_replace_bool_with_string_no_op(self):
s = pd.Series([True, False, True])
result = s.replace('fun', 'in-the-sun')
tm.assert_series_equal(s, result)
def test_replace_bool_with_string(self):
# nonexistent elements
s = pd.Series([True, False, True])
result = s.replace(True, '2u')
expected = pd.Series(['2u', False, '2u'])
tm.assert_series_equal(expected, result)
def test_replace_bool_with_bool(self):
s = pd.Series([True, False, True])
result = s.replace(True, False)
expected = pd.Series([False] * len(s))
tm.assert_series_equal(expected, result)
def test_replace_with_dict_with_bool_keys(self):
s = pd.Series([True, False, True])
with tm.assert_raises_regex(TypeError, 'Cannot compare types .+'):
s.replace({'asdf': 'asdb', True: 'yes'})
def test_replace2(self):
N = 100
ser = pd.Series(np.fabs(np.random.randn(N)), tm.makeDateIndex(N),
dtype=object)
ser[:5] = np.nan
ser[6:10] = 'foo'
ser[20:30] = 'bar'
# replace list with a single value
rs = ser.replace([np.nan, 'foo', 'bar'], -1)
assert (rs[:5] == -1).all()
assert (rs[6:10] == -1).all()
assert (rs[20:30] == -1).all()
assert (pd.isna(ser[:5])).all()
# replace with different values
rs = ser.replace({np.nan: -1, 'foo': -2, 'bar': -3})
assert (rs[:5] == -1).all()
assert (rs[6:10] == -2).all()
assert (rs[20:30] == -3).all()
assert (pd.isna(ser[:5])).all()
# replace with different values with 2 lists
rs2 = ser.replace([np.nan, 'foo', 'bar'], [-1, -2, -3])
tm.assert_series_equal(rs, rs2)
# replace inplace
ser.replace([np.nan, 'foo', 'bar'], -1, inplace=True)
assert (ser[:5] == -1).all()
assert (ser[6:10] == -1).all()
assert (ser[20:30] == -1).all()
def test_replace_with_empty_dictlike(self):
# GH 15289
s = pd.Series(list('abcd'))
tm.assert_series_equal(s, s.replace(dict()))
tm.assert_series_equal(s, s.replace(pd.Series([])))
def test_replace_string_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace('2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_replacer_equals_replacement(self):
# GH 20656
# make sure all replacers are matching against original values
s = pd.Series(['a', 'b'])
expected = pd.Series(['b', 'a'])
result = s.replace({'a': 'b', 'b': 'a'})
tm.assert_series_equal(expected, result)
def test_replace_unicode_with_number(self):
# GH 15743
s = pd.Series([1, 2, 3])
result = s.replace(u'2', np.nan)
expected = pd.Series([1, 2, 3])
tm.assert_series_equal(expected, result)
def test_replace_mixed_types_with_string(self):
# Testing mixed
s = pd.Series([1, 2, 3, '4', 4, 5])
result = s.replace([2, '4'], np.nan)
expected = pd.Series([1, np.nan, 3, np.nan, 4, 5])
tm.assert_series_equal(expected, result)
|
harisbal/pandas
|
pandas/tests/series/test_replace.py
|
Python
|
bsd-3-clause
| 9,775
|
#####################################################################
# Example : perform intrinsic calibration of a connected camera
# Author : Toby Breckon, toby.breckon@durham.ac.uk
# Copyright (c) 2018-2021 Department of Computer Science,
# Durham University, UK
# License : LGPL - http://www.gnu.org/licenses/lgpl.html
# Acknowledgements:
# http://opencv-python-tutroals.readthedocs.org/en/latest/ \
# py_tutorials/py_calib3d/py_table_of_contents_calib3d/py_table_of_contents_calib3d.html
# http://docs.ros.org/electric/api/cob_camera_calibration/html/calibrator_8py_source.html
#####################################################################
import cv2
import argparse
import sys
import numpy as np
#####################################################################
keep_processing = True
# parse command line arguments for camera ID or video file
parser = argparse.ArgumentParser(
description='Perform ' +
sys.argv[0] +
' example operation on incoming camera/video image')
parser.add_argument(
"-c",
"--camera_to_use",
type=int,
help="specify camera to use",
default=0)
parser.add_argument(
"-r",
"--rescale",
type=float,
help="rescale image by this factor",
default=1.0)
parser.add_argument(
"-cbx",
"--chessboardx",
type=int,
help="specify number of internal chessboard squares \
(corners) in x-direction",
default=6)
parser.add_argument(
"-cby",
"--chessboardy",
type=int,
help="specify number of internal chessboard squares \
(corners) in y-direction",
default=9)
parser.add_argument(
"-cbw",
"--chessboardw",
type=float,
help="specify width/height of chessboard squares in mm",
default=40.0)
parser.add_argument(
"-i",
"--iterations",
type=int,
help="specify number of iterations for each stage of optimisation",
default=100)
parser.add_argument(
"-e",
"--minimum_error",
type=float,
help="specify lower error threshold upon which to stop \
optimisation stages",
default=0.001)
args = parser.parse_args()
#####################################################################
# define video capture object
try:
# to use a non-buffered camera stream (via a separate thread)
import camera_stream
cap = camera_stream.CameraVideoStream()
except BaseException:
# if not then just use OpenCV default
print("INFO: camera_stream class not found - camera input may be buffered")
cap = cv2.VideoCapture()
# define display window names
window_name = "Camera Input" # window name
window_nameU = "Undistored (calibrated) Camera" # window name
#####################################################################
# perform intrinsic calibration (removal of image distortion in image)
do_calibration = False
termination_criteria_subpix = (
cv2.TERM_CRITERIA_EPS +
cv2.TERM_CRITERIA_MAX_ITER,
args.iterations,
args.minimum_error)
# set up a set of real-world "object points" for the chessboard pattern
patternX = args.chessboardx
patternY = args.chessboardy
square_size_in_mm = args.chessboardw
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((patternX * patternY, 3), np.float32)
objp[:, :2] = np.mgrid[0:patternX, 0:patternY].T.reshape(-1, 2)
objp = objp * square_size_in_mm
# create arrays to store object points and image points from all the images.
objpoints = [] # 3d point in real world space
imgpoints = [] # 2d points in image plane.
#####################################################################
# count number of chessboard detections
chessboard_pattern_detections = 0
print()
print("--> hold up chessboard (grabbing images at 2 fps)")
print("press c : to continue to calibration")
#####################################################################
# open connected camera
if cap.open(args.camera_to_use):
while (not(do_calibration)):
# grab frames from camera
ret, frame = cap.read()
# rescale if specified
if (args.rescale != 1.0):
frame = cv2.resize(frame, (0, 0), fx=args.rescale, fy=args.rescale)
# convert to grayscale
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Find the chess board corners in the image
# (change flags to perhaps improve detection ?)
ret, corners = cv2.findChessboardCorners(
gray, (patternX, patternY), None, cv2.CALIB_CB_ADAPTIVE_THRESH |
cv2.CALIB_CB_FAST_CHECK | cv2.CALIB_CB_NORMALIZE_IMAGE)
# If found, add object points, image points (after refining them)
if (ret):
chessboard_pattern_detections += 1
# add object points to global list
objpoints.append(objp)
# refine corner locations to sub-pixel accuracy and then
corners_sp = cv2.cornerSubPix(
gray, corners, (11, 11), (-1, -1), termination_criteria_subpix)
imgpoints.append(corners_sp)
# Draw and display the corners
drawboard = cv2.drawChessboardCorners(
frame, (patternX, patternY), corners_sp, ret)
text = 'detected: ' + str(chessboard_pattern_detections)
cv2.putText(drawboard, text, (10, 25),
cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, 8)
cv2.imshow(window_name, drawboard)
else:
text = 'detected: ' + str(chessboard_pattern_detections)
cv2.putText(frame, text, (10, 25),
cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, 8)
cv2.imshow(window_name, frame)
# start the event loop
key = cv2.waitKey(500) & 0xFF # wait 500 ms. between frames
if (key == ord('c')):
do_calibration = True
else:
print("Cannot open connected camera.")
exit()
#####################################################################
# check we detected some patterns within the first loop
if (chessboard_pattern_detections == 0):
print("No calibration patterns detected - exiting.")
exit()
#####################################################################
# perform calibration - uses [Zhang, 2000]
print("START - intrinsic calibration ...")
ret, K, D, rvecs, tvecs = cv2.calibrateCamera(
objpoints, imgpoints, gray.shape[::-1], None, None)
print("FINISHED - intrinsic calibration")
# print output in readable format
print()
print("Intrinsic Camera Calibration Matrix, K - from intrinsic calibration:")
print("(format as follows: fx, fy - focal lengths / cx, cy - optical centers)")
print("[fx, 0, cx]\n[0, fy, cy]\n[0, 0, 1]")
np.set_printoptions(formatter={'float': lambda x: "{0:0.2f}".format(x)})
print(K)
print()
print("Intrinsic Distortion Co-effients, D - from intrinsic calibration:")
print("(k1, k2, k3 - radial p1, p2 - tangential - distortion coefficients)")
print("[k1, k2, p1, p2, k3]")
np.set_printoptions(formatter={'float': lambda x: "{0:0.5f}".format(x)})
print(D)
print()
print("Image resolution used (width, height): ", frame.shape[:2])
#####################################################################
# perform undistortion (i.e. calibration) of the images
keep_processing = True
print()
print("-> performing undistortion")
print("press x : to exit")
while (keep_processing):
# grab frames from camera
ret, frame = cap.read()
# undistort image using camera matrix K and distortion coefficients D
undistorted = cv2.undistort(frame, K, D, None, None)
# display both images
cv2.imshow(window_name, frame)
cv2.imshow(window_nameU, undistorted)
# start the event loop - essential
key = cv2.waitKey(40) & 0xFF # wait 40ms (i.e. 1000ms / 25 fps = 40 ms)
if (key == ord('x')):
keep_processing = False
#####################################################################
# close all windows and cams.
cv2.destroyAllWindows()
#####################################################################
|
tobybreckon/python-examples-cv
|
calibrate_camera.py
|
Python
|
lgpl-3.0
| 8,076
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0011_widget_is_raw'),
]
operations = [
migrations.AddField(
model_name='widget',
name='blacklist_ip',
field=models.CharField(default=b'', max_length=3000),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='blacklist_phones',
field=models.CharField(default=b'', max_length=3000),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='delay_before_callback_from_a_to_b',
field=models.IntegerField(default=0),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='delay_before_callback_to_additional_number',
field=models.IntegerField(default=0),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='disable_on_mobiles',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='geo_filter',
field=models.CharField(default=b'all', max_length=20, choices=[(b'all', b'All')]),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='operator_incoming_number',
field=models.CharField(default=b'callfeed', max_length=8, choices=[(b'callfeed', b'Callfeed'), (b'client', b'Client')]),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='speak_site_name',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='widget',
name='time_before_callback_sec',
field=models.IntegerField(default=0),
preserve_default=True,
),
]
|
vesellov/callfeed.net
|
mainapp/migrations/0012_auto_20150525_1959.py
|
Python
|
mit
| 2,217
|
import os
import threading
from typing import List
import time
import logging
import requests
from .School import School
from .DispatcherManager import DispatcherManager
class StatusMonitor(threading.Thread):
def __init__(self, dispatcher_manager: DispatcherManager, schools: List[dict]):
super(StatusMonitor, self).__init__()
self.logger = logging.getLogger("StatusMonitor")
self.dispatcher_manager = dispatcher_manager
self.schools = [] # type: List[School]
for school in schools:
self.schools.append(School(self, **school))
self.running = False
self.first_check = not os.getenv("SCHOOLTRACKER_DEBUG", False)
def run(self):
self.running = True
while self.running:
for school in self.schools:
self.check_status(school)
if self.first_check:
self.first_check = False
time.sleep(60)
def check_status(self, school: School):
try:
page_response = requests.get("https://www.nlesd.ca/schools/statusreport/")
except requests.HTTPError:
return
resp_text = page_response.text.replace("\r", "").replace("\n", "")
if resp_text.count(school.name) != 0:
# Brace for bootleg text parsing
after_school = resp_text.split(school.name + "</a>")[1]
span = after_school.split("<br/>")[3]
status_list = span.split(";\">")[1].split("<span style=\"color: grey;\"></span>")[0].split(
"</span><br>")
status = ". ".join(
[i.replace("<br/>", "").replace("<br>", "").split("<span")[0].strip().rstrip(".").capitalize() for i in
status_list])
else:
status = "School open"
if status != school.last_status:
self.logger.info("Status for {} updated. New status: {}".format(school.name, status))
self.dispatch_notification(school, status)
school.last_status = status
def dispatch_notification(self, school: School, new_status: str):
if self.first_check:
return
else:
self.dispatcher_manager.dispatch_notification(school, new_status)
|
nint8835/SchoolTracker
|
SchoolTracker/StatusMonitor.py
|
Python
|
mit
| 2,247
|
#
# (c) Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API over the neutron service.
"""
from django.utils.translation import ugettext_lazy as _
from django.views import generic
from openstack_dashboard import api
from openstack_dashboard.api.rest import urls
from openstack_dashboard.api.rest import utils as rest_utils
from openstack_dashboard.usage import quotas
@urls.register
class Networks(generic.View):
"""API for Neutron Networks
http://developer.openstack.org/api-ref-networking-v2.html
"""
url_regex = r'neutron/networks/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of networks for a project
The listing result is an object with property "items". Each item is
a network.
"""
tenant_id = request.user.tenant_id
result = api.neutron.network_list_for_tenant(request, tenant_id)
return{'items': [n.to_dict() for n in result]}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a network
:param admin_state_up (optional): The administrative state of the
network, which is up (true) or down (false).
:param name (optional): The network name. A request body is optional:
If you include it, it can specify this optional attribute.
:param net_profile_id (optional): network profile id
:param shared (optional): Indicates whether this network is shared
across all tenants. By default, only administrative users can
change this value.
:param tenant_id (optional): Admin-only. The UUID of the tenant that
will own the network. This tenant can be different from the
tenant that makes the create network request. However, only
administrative users can specify a tenant ID other than their
own. You cannot change this value through authorization
policies.
:return: JSON representation of a Network
"""
if not api.neutron.is_port_profiles_supported():
request.DATA.pop("net_profile_id", None)
new_network = api.neutron.network_create(request, **request.DATA)
return rest_utils.CreatedResponse(
'/api/neutron/networks/%s' % new_network.id,
new_network.to_dict()
)
@urls.register
class Subnets(generic.View):
"""API for Neutron SubNets
http://developer.openstack.org/api-ref-networking-v2.html#subnets
"""
url_regex = r'neutron/subnets/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of subnets for a project
The listing result is an object with property "items". Each item is
a subnet.
"""
result = api.neutron.subnet_list(request, **request.GET)
return{'items': [n.to_dict() for n in result]}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a Subnet for a given Network
:param name (optional): The subnet name.
:param network_id: The ID of the attached network.
:param tenant_id (optional): The ID of the tenant who owns the network.
Only administrative users can specify a tenant ID other than
their own.
:param allocation_pools (optional): The start and end addresses for the
allocation pools.
:param gateway_ip (optional): The gateway IP address.
:param ip_version: The IP version, which is 4 or 6.
:param cidr: The CIDR.
:param id (optional): The ID of the subnet.
:param enable_dhcp (optional): Set to true if DHCP is enabled and false
if DHCP is disabled.
:return: JSON representation of a Subnet
"""
new_subnet = api.neutron.subnet_create(request, **request.DATA)
return rest_utils.CreatedResponse(
'/api/neutron/subnets/%s' % new_subnet.id,
new_subnet.to_dict()
)
@urls.register
class Ports(generic.View):
"""API for Neutron Ports
http://developer.openstack.org/api-ref-networking-v2.html#ports
"""
url_regex = r'neutron/ports/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of ports for a network
The listing result is an object with property "items". Each item is
a subnet.
"""
# see
# https://github.com/openstack/neutron/blob/master/neutron/api/v2/attributes.py
result = api.neutron.port_list(request, **request.GET)
return{'items': [n.to_dict() for n in result]}
@urls.register
class Services(generic.View):
"""API for Neutron agents
"""
url_regex = r'neutron/agents/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of agents
"""
if api.base.is_service_enabled(request, 'network') and \
api.neutron.is_extension_supported(request, 'agent'):
result = api.neutron.agent_list(request, **request.GET)
return {'items': [n.to_dict() for n in result]}
else:
raise rest_utils.AjaxError(501, '')
@urls.register
class Extensions(generic.View):
"""API for neutron extensions.
"""
url_regex = r'neutron/extensions/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of extensions.
The listing result is an object with property "items". Each item is
an extension.
Example:
http://localhost/api/neutron/extensions
"""
result = api.neutron.list_extensions(request)
return {'items': [e for e in result]}
class DefaultQuotaSets(generic.View):
"""API for getting default quotas for neutron
"""
url_regex = r'neutron/quota-sets/defaults/$'
@rest_utils.ajax()
def get(self, request):
if api.base.is_service_enabled(request, 'network'):
quota_set = api.neutron.tenant_quota_get(
request, request.user.tenant_id)
result = [{
'display_name': quotas.QUOTA_NAMES.get(
quota.name,
quota.name.replace('_', ' ').title()
) + '',
'name': quota.name,
'limit': quota.limit
} for quota in quota_set]
return {'items': result}
else:
raise rest_utils.AjaxError(501, _('Service Neutron is disabled.'))
@urls.register
class QuotasSets(generic.View):
"""API for setting quotas of a given project.
"""
url_regex = r'neutron/quotas-sets/(?P<project_id>[0-9a-f]+)$'
@rest_utils.ajax(data_required=True)
def patch(self, request, project_id):
"""Update a single project quota data.
The PATCH data should be an application/json object with the
attributes to set to new quota values.
This method returns HTTP 204 (no content) on success.
"""
# Filters only neutron quota fields
disabled_quotas = quotas.get_disabled_quotas(request)
if api.base.is_service_enabled(request, 'network') and \
api.neutron.is_extension_supported(request, 'quotas'):
neutron_data = {
key: request.DATA[key] for key in quotas.NEUTRON_QUOTA_FIELDS
if key not in disabled_quotas
}
api.neutron.tenant_quota_update(request,
project_id,
**neutron_data)
else:
message = _('Service Neutron is disabled or quotas extension not '
'available.')
raise rest_utils.AjaxError(501, message)
|
coreycb/horizon
|
openstack_dashboard/api/rest/neutron.py
|
Python
|
apache-2.0
| 8,279
|
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class RollbackDiscountEffectProps(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'name': 'str',
'value': 'float'
}
attribute_map = {
'name': 'name',
'value': 'value'
}
def __init__(self, name=None, value=None, local_vars_configuration=None): # noqa: E501
"""RollbackDiscountEffectProps - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self._value = None
self.discriminator = None
self.name = name
self.value = value
@property
def name(self):
"""Gets the name of this RollbackDiscountEffectProps. # noqa: E501
The name of the \"setDiscount\" effect that was rolled back # noqa: E501
:return: The name of this RollbackDiscountEffectProps. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this RollbackDiscountEffectProps.
The name of the \"setDiscount\" effect that was rolled back # noqa: E501
:param name: The name of this RollbackDiscountEffectProps. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def value(self):
"""Gets the value of this RollbackDiscountEffectProps. # noqa: E501
The value of the discount that was rolled back # noqa: E501
:return: The value of this RollbackDiscountEffectProps. # noqa: E501
:rtype: float
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this RollbackDiscountEffectProps.
The value of the discount that was rolled back # noqa: E501
:param value: The value of this RollbackDiscountEffectProps. # noqa: E501
:type: float
"""
if self.local_vars_configuration.client_side_validation and value is None: # noqa: E501
raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RollbackDiscountEffectProps):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, RollbackDiscountEffectProps):
return True
return self.to_dict() != other.to_dict()
|
talon-one/talon_one.py
|
talon_one/models/rollback_discount_effect_props.py
|
Python
|
mit
| 5,173
|
#!/usr/bin/python
import unittest
import os
import random
import numpy as np
from pymatgen.core.structure import Structure
from pymatgen.core.lattice import Lattice
from pymatgen.core.surface import Slab, SlabGenerator, generate_all_slabs, \
get_symmetrically_distinct_miller_indices
from pymatgen.symmetry.groups import SpaceGroup
from pymatgen.util.testing import PymatgenTest
def get_path(path_str):
cwd = os.path.abspath(os.path.dirname(__file__))
path = os.path.join(cwd, "..", "..", "..", "test_files", "surface_tests",
path_str)
return path
class SlabTest(PymatgenTest):
def setUp(self):
zno1 = Structure.from_file(get_path("ZnO-wz.cif"), primitive=False)
zno55 = SlabGenerator(zno1, [1, 0, 0], 5, 5, lll_reduce=False,
center_slab=False).get_slab()
self.zno1 = zno1
self.zno55 = zno55
self.h = Structure(Lattice.cubic(3), ["H"],
[[0, 0, 0]])
self.libcc = Structure(Lattice.cubic(3.51004), ["Li", "Li"],
[[0, 0, 0], [0.5, 0.5, 0.5]])
def test_init(self):
zno_slab = Slab(self.zno55.lattice, self.zno55.species,
self.zno55.frac_coords,
self.zno55.miller_index,
self.zno55.oriented_unit_cell,
0, self.zno55.scale_factor)
m =self.zno55.lattice.matrix
area = np.linalg.norm(np.cross(m[0], m[1]))
self.assertAlmostEqual(zno_slab.surface_area, area)
self.assertEqual(zno_slab.lattice.lengths_and_angles,
self.zno55.lattice.lengths_and_angles)
self.assertEqual(zno_slab.oriented_unit_cell.composition,
self.zno1.composition)
self.assertEqual(len(zno_slab), 8)
def test_add_adsorbate_atom(self):
zno_slab = Slab(self.zno55.lattice, self.zno55.species,
self.zno55.frac_coords,
self.zno55.miller_index,
self.zno55.oriented_unit_cell,
0, self.zno55.scale_factor)
zno_slab.add_adsorbate_atom([1], 'H', 1)
self.assertEqual(len(zno_slab), 9)
self.assertEqual(str(zno_slab[8].specie), 'H')
self.assertAlmostEqual(zno_slab.get_distance(1, 8), 1.0)
self.assertTrue(zno_slab[8].c > zno_slab[0].c)
m = self.zno55.lattice.matrix
area = np.linalg.norm(np.cross(m[0], m[1]))
self.assertAlmostEqual(zno_slab.surface_area, area)
self.assertEqual(zno_slab.lattice.lengths_and_angles,
self.zno55.lattice.lengths_and_angles)
def test_get_sorted_structure(self):
species = [str(site.specie) for site in
self.zno55.get_sorted_structure()]
self.assertEqual(species, ["Zn2+"] * 4 + ["O2-"] * 4)
def test_methods(self):
#Test various structure methods
self.zno55.get_primitive_structure()
def test_as_from_dict(self):
d = self.zno55.as_dict()
obj = Slab.from_dict(d)
self.assertEqual(obj.miller_index, (1, 0, 0))
def test_dipole_and_is_polar(self):
self.assertArrayAlmostEqual(self.zno55.dipole, [0, 0, 0])
self.assertFalse(self.zno55.is_polar())
cscl = self.get_structure("CsCl")
cscl.add_oxidation_state_by_element({"Cs": 1, "Cl": -1})
slab = SlabGenerator(cscl, [1, 0, 0], 5, 5,
lll_reduce=False, center_slab=False).get_slab()
self.assertArrayAlmostEqual(slab.dipole, [-4.209, 0, 0])
self.assertTrue(slab.is_polar())
class SlabGeneratorTest(PymatgenTest):
def test_get_slab(self):
s = self.get_structure("LiFePO4")
gen = SlabGenerator(s, [0, 0, 1], 10, 10)
s = gen.get_slab(0.25)
self.assertAlmostEqual(s.lattice.abc[2], 20.820740000000001)
fcc = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3), ["Fe"],
[[0, 0, 0]])
gen = SlabGenerator(fcc, [1, 1, 1], 10, 10)
slab = gen.get_slab()
gen = SlabGenerator(fcc, [1, 1, 1], 10, 10, primitive=False)
slab_non_prim = gen.get_slab()
self.assertEqual(len(slab), 6)
self.assertEqual(len(slab_non_prim), len(slab) * 4)
#Some randomized testing of cell vectors
for i in range(1, 231):
i = random.randint(1, 230)
sg = SpaceGroup.from_int_number(i)
if sg.crystal_system == "hexagonal" or (sg.crystal_system == \
"trigonal" and sg.symbol.endswith("H")):
latt = Lattice.hexagonal(5, 10)
else:
#Cubic lattice is compatible with all other space groups.
latt = Lattice.cubic(5)
s = Structure.from_spacegroup(i, latt, ["H"], [[0, 0, 0]])
miller = (0, 0, 0)
while miller == (0, 0, 0):
miller = (random.randint(0, 6), random.randint(0, 6),
random.randint(0, 6))
gen = SlabGenerator(s, miller, 10, 10)
a, b, c = gen.oriented_unit_cell.lattice.matrix
self.assertAlmostEqual(np.dot(a, gen._normal), 0)
self.assertAlmostEqual(np.dot(b, gen._normal), 0)
def test_normal_search(self):
fcc = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3), ["Fe"],
[[0, 0, 0]])
for miller in [(1, 0, 0), (1, 1, 0), (1, 1, 1), (2, 1, 1)]:
gen = SlabGenerator(fcc, miller, 10, 10)
gen_normal = SlabGenerator(fcc, miller, 10, 10,
max_normal_search=max(miller))
slab = gen_normal.get_slab()
self.assertAlmostEqual(slab.lattice.alpha, 90)
self.assertAlmostEqual(slab.lattice.beta, 90)
self.assertGreaterEqual(len(gen_normal.oriented_unit_cell),
len(gen.oriented_unit_cell))
graphite = self.get_structure("Graphite")
for miller in [(1, 0, 0), (1, 1, 0), (0, 0, 1), (2, 1, 1)]:
gen = SlabGenerator(graphite, miller, 10, 10)
gen_normal = SlabGenerator(graphite, miller, 10, 10,
max_normal_search=max(miller))
self.assertGreaterEqual(len(gen_normal.oriented_unit_cell),
len(gen.oriented_unit_cell))
sc = Structure(Lattice.hexagonal(3.32, 5.15), ["Sc", "Sc"],
[[1/3, 2/3, 0.25], [2/3, 1/3, 0.75]])
gen = SlabGenerator(sc, (1, 1, 1), 10, 10, max_normal_search=1)
self.assertAlmostEqual(gen.oriented_unit_cell.lattice.angles[1], 90)
def test_get_slabs(self):
gen = SlabGenerator(self.get_structure("CsCl"), [0, 0, 1], 10, 10)
#Test orthogonality of some internal variables.
a, b, c = gen.oriented_unit_cell.lattice.matrix
self.assertAlmostEqual(np.dot(a, gen._normal), 0)
self.assertAlmostEqual(np.dot(b, gen._normal), 0)
self.assertEqual(len(gen.get_slabs()), 1)
s = self.get_structure("LiFePO4")
gen = SlabGenerator(s, [0, 0, 1], 10, 10)
self.assertEqual(len(gen.get_slabs()), 5)
self.assertEqual(len(gen.get_slabs(bonds={("P", "O"): 3})), 2)
# There are no slabs in LFP that does not break either P-O or Fe-O
# bonds for a miller index of [0, 0, 1].
self.assertEqual(len(gen.get_slabs(
bonds={("P", "O"): 3, ("Fe", "O"): 3})), 0)
#If we allow some broken bonds, there are a few slabs.
self.assertEqual(len(gen.get_slabs(
bonds={("P", "O"): 3, ("Fe", "O"): 3},
max_broken_bonds=2)), 2)
# At this threshold, only the origin and center Li results in
# clustering. All other sites are non-clustered. So the of
# slabs is of sites in LiFePO4 unit cell - 2 + 1.
self.assertEqual(len(gen.get_slabs(tol=1e-4)), 15)
LiCoO2 = Structure.from_file(get_path("icsd_LiCoO2.cif"),
primitive=False)
gen = SlabGenerator(LiCoO2, [0, 0, 1], 10, 10)
lco = gen.get_slabs(bonds={("Co", "O"): 3})
self.assertEqual(len(lco), 1)
a, b, c = gen.oriented_unit_cell.lattice.matrix
self.assertAlmostEqual(np.dot(a, gen._normal), 0)
self.assertAlmostEqual(np.dot(b, gen._normal), 0)
scc = Structure.from_spacegroup("Pm-3m", Lattice.cubic(3), ["Fe"],
[[0, 0, 0]])
gen = SlabGenerator(scc, [0, 0, 1], 10, 10)
slabs = gen.get_slabs()
self.assertEqual(len(slabs), 1)
gen = SlabGenerator(scc, [1, 1, 1], 10, 10, max_normal_search=1)
slabs = gen.get_slabs()
self.assertEqual(len(slabs), 1)
def test_triclinic_TeI(self):
# Test case for a triclinic structure of TeI. Only these three
# Miller indices are used because it is easier to identify which
# atoms should be in a surface together. The closeness of the sites
# in other Miller indices can cause some ambiguity when choosing a
# higher tolerance.
numb_slabs = {(0, 0, 1): 5, (0, 1, 0): 3, (1, 0, 0): 7}
TeI = Structure.from_file(get_path("icsd_TeI.cif"),
primitive=False)
for k, v in numb_slabs.items():
trclnc_TeI = SlabGenerator(TeI, k, 10, 10)
TeI_slabs = trclnc_TeI.get_slabs()
self.assertEqual(v, len(TeI_slabs))
def test_get_orthogonal_c_slab(self):
TeI = Structure.from_file(get_path("icsd_TeI.cif"),
primitive=False)
trclnc_TeI = SlabGenerator(TeI, (0, 0, 1), 10, 10)
TeI_slabs = trclnc_TeI.get_slabs()
slab = TeI_slabs[0]
norm_slab = slab.get_orthogonal_c_slab()
self.assertAlmostEqual(norm_slab.lattice.angles[0], 90)
self.assertAlmostEqual(norm_slab.lattice.angles[1], 90)
class FuncTest(PymatgenTest):
def setUp(self):
self.cscl = self.get_structure("CsCl")
self.lifepo4 = self.get_structure("LiFePO4")
self.tei = Structure.from_file(get_path("icsd_TeI.cif"),
primitive=False)
self.LiCoO2 = Structure.from_file(get_path("icsd_LiCoO2.cif"),
primitive=False)
self.p1 = Structure(Lattice.from_parameters(3, 4, 5, 31, 43, 50),
["H", "He"], [[0, 0, 0], [0.1, 0.2, 0.3]])
self.graphite = self.get_structure("Graphite")
def test_get_symmetrically_distinct_miller_indices(self):
indices = get_symmetrically_distinct_miller_indices(self.cscl, 1)
self.assertEqual(len(indices), 3)
indices = get_symmetrically_distinct_miller_indices(self.cscl, 2)
self.assertEqual(len(indices), 6)
self.assertEqual(len(get_symmetrically_distinct_miller_indices(
self.lifepo4, 1)), 7)
# The TeI P-1 structure should have 13 unique millers (only inversion
# symmetry eliminates pairs)
indices = get_symmetrically_distinct_miller_indices(self.tei, 1)
self.assertEqual(len(indices), 13)
# P1 and P-1 should have the same # of miller indices since surfaces
# always have inversion symmetry.
indices = get_symmetrically_distinct_miller_indices(self.p1, 1)
self.assertEqual(len(indices), 13)
indices = get_symmetrically_distinct_miller_indices(self.graphite, 2)
self.assertEqual(len(indices), 12)
def test_generate_all_slabs(self):
slabs = generate_all_slabs(self.cscl, 1, 10, 10)
# Only three possible slabs, one each in (100), (110) and (111).
self.assertEqual(len(slabs), 3)
slabs = generate_all_slabs(self.cscl, 1, 10, 10,
bonds={("Cs", "Cl"): 4})
# No slabs if we don't allow broken Cs-Cl
self.assertEqual(len(slabs), 0)
slabs = generate_all_slabs(self.cscl, 1, 10, 10,
bonds={("Cs", "Cl"): 4},
max_broken_bonds=100)
self.assertEqual(len(slabs), 3)
slabs1 = generate_all_slabs(self.lifepo4, 1, 10, 10, tol=0.1,
bonds={("P", "O"): 3})
self.assertEqual(len(slabs1), 4)
slabs2 = generate_all_slabs(self.lifepo4, 1, 10, 10,
bonds={("P", "O"): 3, ("Fe", "O"): 3})
self.assertEqual(len(slabs2), 0)
# There should be only one possible stable surfaces, all of which are
# in the (001) oriented unit cell
slabs3 = generate_all_slabs(self.LiCoO2, 1, 10, 10,
bonds={("Co", "O"): 3})
self.assertEqual(len(slabs3), 1)
mill = (0, 0, 1)
for s in slabs3:
self.assertEqual(s.miller_index, mill)
if __name__ == "__main__":
unittest.main()
|
sonium0/pymatgen
|
pymatgen/core/tests/test_surface.py
|
Python
|
mit
| 13,101
|
#!/usr/bin/python2.7
# Run this script as user: www-data
import os
import server_path
import squeakspace.server.db_sqlite3 as db
import config
try:
os.remove(config.db_path)
except OSError:
pass
conn = db.connect(config.db_path)
c = db.cursor(conn)
db.make_db(c, config.total_quota)
db.commit(conn)
db.close(conn)
|
eek6/squeakspace
|
admin/init_server_db.py
|
Python
|
gpl-3.0
| 325
|
import sys
from zephyrus.components import ComponentManager
import zephyrus.script as sc
class LogSection(sc.ConfigSection):
parameters = [
sc.Parameter('main_log', 'Main log filename(str)', str),
sc.Parameter('population_log', 'Population log filename (str)', str),
sc.Parameter('final_population_log', 'Final population log (str)', str)
]
class StrategySection(sc.ConfigSection):
parameters = [
sc.Parameter('n_generations', 'Number of generations (int)', int),
sc.Parameter('population_size', 'Population size (int)', int),
sc.Parameter('crossover_rate', 'Crossover rate (float)', float),
sc.Parameter('mutation_rate', 'Mutation rate (float)', float)
]
class StandardScenarioParameter(sc.AutoParameter):
def parser(self, parameters, _globals):
resolution = parameters['resolution']
enum = ComponentManager.get_component_enum(_globals['components_filename'])
scenario = []
scenario.append((enum.WALLN + enum.WALLW).value)
scenario.extend(enum.WALLN.value for _ in range(resolution - 2))
scenario.append((enum.WALLN + enum.WALLE).value)
for _ in range(resolution - 2):
scenario.append(enum.WALLW.value)
scenario.extend([0] * (resolution - 2))
scenario.append(enum.WALLE.value)
scenario.append((enum.WALLS + enum.WALLW).value)
scenario.extend(enum.WALLS.value for _ in range(resolution - 2))
scenario.append((enum.WALLS + enum.WALLE).value)
return scenario
class EnvironmentSection(sc.ConfigSection):
parameters = [
sc.ConstantParameter('n_agent', 1),
sc.Parameter('n_trash', 'Quantity of trash (int)', int),
sc.Parameter('n_bin', 'Quantity of trash bin (int)', int),
sc.Parameter('n_recharge', 'Quantity of recharge points (int)', int),
sc.Parameter('resolution', 'Enviroment resolution (n x n blocks) (int)', int),
StandardScenarioParameter('standard_scenario')
]
class VaccumConfigBuilder(sc.ConfigBuilder):
sections = [
LogSection('log'),
EnvironmentSection('environment'),
StrategySection('strategy'),
sc.DefaultSimulationSection('simulation')
]
def __init__(self, components_filename):
self.globals = {'components_filename': components_filename}
if __name__ == '__main__':
if len(sys.argv) != 3:
print("Usage config_builder.py component_config output_file")
sys.exit(1)
VaccumConfigBuilder(sys.argv[1]).generate_config_file(sys.argv[2])
|
wairton/zephyrus-mas
|
zephyrus/examples/vacuum/config_builder.py
|
Python
|
mit
| 2,589
|
# encoding: utf-8
# module dbm
# from /usr/lib/python2.7/lib-dynload/dbm.x86_64-linux-gnu.so
# by generator 1.135
# no doc
# no imports
# Variables with simple values
library = 'Berkeley DB'
# functions
def open(path, flag=None, mode=None): # real signature unknown; restored from __doc__
"""
open(path[, flag[, mode]]) -> mapping
Return a database object.
"""
pass
# classes
from Exception import Exception
class error(Exception):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/dbm.py
|
Python
|
gpl-2.0
| 718
|
from new.fanfoucli.config import cfg
from new.fanfoucli.fan import Fan
import sys
import logging
logging.basicConfig(level=logging.DEBUG)
def test_auth():
f = Fan(cfg)
f.view()
def test_main():
sys.argv[1:] = ['-V']
from new.fanfoucli.cli import main
main()
def test_switch():
fan = Fan(cfg)
fan.switch_account()
def test_login():
fan = Fan(cfg)
fan.login()
def test_config():
cfg.configure()
if __name__ == '__main__':
test_auth()
# test_config()
# test_main()
# test_switch()
# test_login()
# test_config()
|
j178/fanfou-cli
|
test/__init__.py
|
Python
|
mit
| 586
|
"""
**************
Graph Matching
**************
Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
edges; that is, no two edges share a common vertex.
`Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
"""
import networkx as nx
__all__ = ["min_maximal_matching"]
def min_maximal_matching(G):
r"""Returns the minimum maximal matching of G. That is, out of all maximal
matchings of the graph G, the smallest is returned.
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
min_maximal_matching : set
Returns a set of edges such that no two edges share a common endpoint
and every edge not in the set shares some common endpoint in the set.
Cardinality will be 2*OPT in the worst case.
Notes
-----
The algorithm computes an approximate solution fo the minimum maximal
cardinality matching problem. The solution is no more than 2 * OPT in size.
Runtime is $O(|E|)$.
References
----------
.. [1] Vazirani, Vijay Approximation Algorithms (2001)
"""
return nx.maximal_matching(G)
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
esstoolkit/external/networkx/algorithms/approximation/matching.py
|
Python
|
gpl-3.0
| 1,155
|
# Exercise 3
#
# Improve the Who's Your Daddy program by adding a choice that lets the user enter a name and get back a grandfather.
# Your program should still only use one dictionary of son-father pairs. Make sure to include several generations in
# your dictionary so that a match can be found.
#
# We used dictionaries for the previous exercise, so might aswell continue with that decision.
generations = [["John", "John's dad", "John's grandfather"],
["Chris", "Chris's dad", "Chris's grandfather"],
["Matthew", "Matthew's dad", "Matthew's grandfather"]]
print("This is the 'Who's your daddy?' game.")
while True:
print("These are the following possibilities: ")
print("\t\t 1. Check someone's family")
print("\t\t 2. Add family")
print("\t\t 3. Replace family")
print("\t\t 4. Delete family")
print("\t\t 10. Exit the game.")
j = int(input("So, what is your option? "))
if j == 1:
print("You want to check someone's family.")
elif j == 2:
print("You want to add someone's family.")
elif j == 3:
print("You want to replace someone's family.")
elif j == 4:
print("You want to delete someone's family.")
elif j == 10:
print("So you want to exit the game, goodbye!")
break
else:
print("Not a valid option. Please try again.")
continue
son_name = input("What is the name of the son? ")
dad_name = ""
grandfather_name = ""
for i in range(0, len(generations)):
if son_name == generations[i][0]:
dad_name = generations[i][1]
grandfather_name = generations[i][2]
break
if j == 1:
if dad_name:
print("The dad of " + son_name + " is called " + dad_name + " and the grandfather is called " + grandfather_name
+ ".")
else:
print("The name of the son is not registered.")
elif j == 2:
if dad_name:
print("The son you entered is already registered.")
else:
dad_name = input("Enter the name of " + son_name + " dad: ")
grandfather_name = input("Enter the name of " + son_name + " grandfather: ")
generations.append([son_name, dad_name, grandfather_name])
elif j == 3:
if dad_name:
dad_name = input("Enter the name of the new dad of " + son_name + ": ")
grandfather_name = input("Enter the name of the new grandfather of " + son_name + ": ")
generations[i][1] = dad_name
generations[i][2] = grandfather_name
else:
print("The son you entered is not registered.")
elif j == 4:
if dad_name:
print("Deleted son " + son_name + " and his dad " + dad_name + " and his grandfather " + grandfather_name + ".")
del generations[i]
else:
print("The son you entered is not registered.")
|
dmartinezgarcia/Python-Programming
|
Chapter 5 - Lists and dictionaries/exercise_4.py
|
Python
|
gpl-2.0
| 2,754
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'EntrezTerm'
db.create_table(u'entrez_entrezterm', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50)),
('term', self.gf('django.db.models.fields.CharField')(max_length=512)),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='term_owner', to=orm['auth.User'])),
('period', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=7)),
('db', self.gf('django.db.models.fields.CharField')(default='pubmed', max_length=30)),
('creation_date', self.gf('django.db.models.fields.DateField')(blank=True)),
('lastedit_date', self.gf('django.db.models.fields.DateField')(blank=True)),
))
db.send_create_signal(u'entrez', ['EntrezTerm'])
# Adding model 'EntrezEntry'
db.create_table(u'entrez_entrezentry', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('eid', self.gf('django.db.models.fields.CharField')(default='', max_length=20)),
('raw', self.gf('django.db.models.fields.TextField')(default='')),
('term', self.gf('django.db.models.fields.related.ForeignKey')(related_name='entry_term', to=orm['entrez.EntrezTerm'])),
('content', self.gf('django.db.models.fields.TextField')(default='')),
('title', self.gf('django.db.models.fields.CharField')(max_length=512)),
('magzine', self.gf('django.db.models.fields.CharField')(max_length=512)),
('authors', self.gf('django.db.models.fields.CharField')(max_length=512)),
('abstract', self.gf('django.db.models.fields.TextField')(blank=True)),
('read', self.gf('django.db.models.fields.BooleanField')(default=False)),
('creation_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('lastedit_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal(u'entrez', ['EntrezEntry'])
def backwards(self, orm):
# Deleting model 'EntrezTerm'
db.delete_table(u'entrez_entrezterm')
# Deleting model 'EntrezEntry'
db.delete_table(u'entrez_entrezentry')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'entrez.entrezentry': {
'Meta': {'ordering': "['-creation_time']", 'object_name': 'EntrezEntry'},
'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'authors': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'content': ('django.db.models.fields.TextField', [], {'default': "''"}),
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'eid': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastedit_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'magzine': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'raw': ('django.db.models.fields.TextField', [], {'default': "''"}),
'read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'term': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entry_term'", 'to': u"orm['entrez.EntrezTerm']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
u'entrez.entrezterm': {
'Meta': {'ordering': "['-creation_date']", 'object_name': 'EntrezTerm'},
'creation_date': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'db': ('django.db.models.fields.CharField', [], {'default': "'pubmed'", 'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastedit_date': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'term_owner'", 'to': u"orm['auth.User']"}),
'period': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '7'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '512'})
}
}
complete_apps = ['entrez']
|
indexofire/gork
|
src/gork/application/entrez/migrations/0001_initial.py
|
Python
|
mit
| 8,146
|
# vim: set fileencoding=utf-8:
# GNU Solfege - free ear training software
# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import errno
import locale
import logging
import os
import shutil
import sqlite3
import subprocess
import sys
import tempfile
from urlparse import urlparse
import webbrowser
from solfege import mpd
from solfege import soundcard
from solfege import abstract
from solfege import cfg
from solfege import dataparser
from solfege import filesystem
from solfege import gu
from solfege import i18n
from solfege import lessonfile
from solfege import osutils
from solfege import parsetree
from solfege import reportlib
from solfege import utils
import solfege
try:
from pyalsa import alsaseq
except ImportError:
alsaseq = None
solfege_copyright = u"Copyright © 1999-2008 Tom Cato Amundsen <tca@gnu.org>, and others."
warranty = """
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
def check_rcfile():
"""See default.config for rcfileversion values, meanings and
a description of how to add config variables.
"""
rcfileversion = 21
if cfg.get_int("app/rcfileversion") > rcfileversion:
cfg.drop_user_config()
return
if cfg.get_int("app/rcfileversion") <= 1:
if not "example-files" in cfg.get_string('config/lessoncollections'):
cfg.set_string('config/lessoncollections',
"%s example-files" % cfg.get_string('config/lessoncollections'))
if cfg.get_int("app/rcfileversion") <= 5:
# This is more complicated that necessary to fix an old
# error.
if cfg.get_string("sound/commandline"):
cfg.del_key("sound/commandline")
if cfg.get_int("app/rcfileversion") <= 3:
cfg.set_list("config/lessoncollections",
cfg.get_string("config/lessoncollections").split())
if cfg.get_int("app/rcfileversion") <= 4:
cfg.del_key("config/web_browser")
if sys.platform == 'win32':
if cfg.get_string('sound/wav_player'):
cfg.del_key('sound/wav_player')
if cfg.get_int("app/rcfileversion") <= 5:
cfg.set_string("mainwin/history_back_ak", "<alt>Left")
cfg.set_string("mainwin/history_forward_ak", "<alt>Right")
cfg.set_string("mainwin/history_reload_ak", "<ctrl>r")
if cfg.get_int("app/rcfileversion") <= 6:
cfg.set_list("config/lessoncollections", ['solfege', 'user'])
if cfg.get_int("app/rcfileversion") <= 7:
cfg.set_int("rhythm/countin_perc", 80)
if cfg.get_int("app/rcfileversion") <= 8:
cfg.del_key("singinterval/highest_tone")
cfg.del_key("singinterval/lowest_tone")
cfg.del_key("melodicinterval/highest_tone")
cfg.del_key("melodicinterval/lowest_tone")
cfg.del_key("harmonicinterval/highest_tone")
cfg.del_key("harmonicinterval/lowest_tone")
if cfg.get_int("app/rcfileversion") <= 9:
cfg.del_section("mainwin")
if cfg.get_int("app/rcfileversion") <= 10:
cfg.del_section("lessoncollections")
cfg.del_key("config/lessoncollections")
for n in cfg.iterate_sections():
cfg.del_key("%s/lessoncollection" % n)
cfg.del_key("%s/lessonfile" % n)
if cfg.get_int("app/rcfileversion") <= 11:
for s in ('rhythm', 'rhythmtapping2'):
cfg.del_key("%s/countin_perc" % s)
cfg.del_key("%s/rhythm_perc" % s)
if cfg.get_int("app/rcfileversion") <= 12:
cfg.del_key("sound/card_info")
if cfg.get_int("app/rcfileversion") <= 13:
cfg.del_key("config/lowest_instrument_velocity")
cfg.del_key("config/middle_instrument_velocity")
cfg.del_key("config/highest_instrument_velocity")
cfg.del_key("config/preferred_instrument_velocity")
if cfg.get_int("app/rcfileversion") <= 14:
# We have to split the midi_to_wav_cmd into two strings, and
# moving the options to *_options, so that midi_to_wav_cmd only
# have the name of the binary. This to allow binaries in dirs
# with spaces.
for k in ("midi_to_wav", "wav_to_mp3", "wav_to_ogg"):
v = cfg.get_string("app/%s_cmd" % k).split(" ")
cfg.set_string("app/%s_cmd" % k, v[0])
cfg.set_string("app/%s_cmd_options" % k, " ".join(v[1:]))
if cfg.get_int("app/rcfileversion") <= 15:
for k in ("midi", "wav", "mp3", "ogg"):
v = cfg.get_string("sound/%s_player" % k).split(" ")
cfg.set_string("sound/%s_player" % k, v[0])
cfg.set_string("sound/%s_player_options" % k,
" ".join(v[1:]))
if cfg.get_int("app/rcfileversion") < 17:
v = cfg.get_string("app/frontpage").split("/")
if v[0] == u"exercises" and v[1] != u"standard":
cfg.set_string("app/frontpage",
u"/".join([v[0], u"standard"] + v[1:]))
if cfg.get_int("app/rcfileversion") < 18:
cfg.del_key("gui/web_browser_as_help_browser")
if cfg.get_int("app/rcfileversion") < 19:
for ex in ('singinterval', 'melodicinterval'):
if cfg.get_int("%s/maximum_number_of_intervals" % ex) == 10:
cfg.set_int("%s/maximum_number_of_intervals" % ex, 12)
if cfg.get_int("app/rcfileversion") < 20:
cfg.del_key("gui/reserved_vspace")
if cfg.get_int("app/rcfileversion") < 21:
for ex in ("melodicinterval", "harmonicinterval"):
i = cfg.get_int("%s/inputwidget" % ex)
if i > 0:
cfg.set_int("%s/inputwidget" % ex, i + 1)
cfg.set_int("app/rcfileversion", rcfileversion)
try:
a = mpd.notename_to_int(cfg.get_string("user/lowest_pitch"))
b = mpd.notename_to_int(cfg.get_string("user/highest_pitch"))
except mpd.InvalidNotenameException:
if cfg.get_string("user/sex") == "male":
cfg.set_string("user/highest_pitch", "e'")
cfg.set_string("user/lowest_pitch", "c")
else:
cfg.set_string("user/highest_pitch", "e''")
cfg.set_string("user/lowest_pitch", "c'")
class SolfegeApp(cfg.ConfigUtils):
def __init__(self, options):
"""
options -- command line options parsed by optparse
"""
cfg.ConfigUtils.__init__(self, 'solfege-app')
lessonfile.MusicBaseClass.temp_dir = tempfile.mkdtemp(prefix="solfege-")
os.environ['SOLFEGETEMPDIR'] = lessonfile.MusicBaseClass.temp_dir
# test_mode is when we are running a test from the Tests menu
self.m_test_mode = False
self.m_options = options
self.m_teachers = {}
self.m_running_exercise = None
self.m_sound_init_exception = None
#
self.m_userman_language = "C"
for lang in i18n.langs():
if os.path.isdir(os.path.join('help', lang)):
self.m_userman_language = lang
break
def setup_sound(self):
if sys.platform == 'win32' and \
cfg.get_string("sound/type") == "sequencer-device":
# just in case c:\home\.solfegerc is wrong
cfg.set_string("sound/type", "winsynth")
if self.m_options.no_sound \
or cfg.get_string("sound/type") == "fake-synth":
soundcard.initialise_using_fake_synth(self.m_options.verbose_sound_init)
elif cfg.get_string("sound/type") == "alsa-sequencer":
if alsaseq:
try:
clientid, portid = self.get_list("sound/alsa-client-port")
except ValueError:
clientid, portid = (None, None)
try:
soundcard.initialise_alsa_sequencer((clientid, portid),
self.m_options.verbose_sound_init)
except alsaseq.SequencerError, e:
logging.debug("initialise_alsa_sequencer failed. Using fake synth.")
self.display_sound_init_error_message(e)
soundcard.initialise_using_fake_synth(True)
return
else:
if solfege.splash_win:
solfege.splash_win.hide()
gu.dialog_ok(_("The pyalsa Python module is missing"),
solfege.win,
_("Solfege was configured to use the Python modules from www.alsa-project.org, but the modules were not found. You must reconfigure sound in the preferences window (Ctrl-F12) or restart Solfege in a way that it finds the modules."))
soundcard.initialise_using_fake_synth(True)
if solfege.splash_win:
solfege.splash_win.show()
elif cfg.get_string("sound/type") == "winsynth":
try:
soundcard.initialise_winsynth(cfg.get_int("sound/synth_number"),
verbose_init=self.m_options.verbose_sound_init)
except ImportError, e:
self.display_sound_init_error_message(e)
cfg.set_string("sound/type", "fake-synth")
soundcard.initialise_using_fake_synth(True)
return
except RuntimeError, e:
# We can get here if winmidi.output_devices() in winsynth
# __init__ returns no devices. Don't know when, but it could
# happen.
gu.display_exception_message(e)
cfg.set_string("sound/type", "fake-synth")
soundcard.initialise_using_fake_synth(True)
return
if cfg.get_int("sound/synth_number") != soundcard.synth.m_devnum:
solfege.win.display_error_message2(_("MIDI setup"), _("MIDI Device %(olddev)i not available. Will use device %(newdev)i.") % {'olddev': cfg.get_int("sound/synth_number"), 'newdev': soundcard.synth.m_devnum})
cfg.set_int("sound/synth_number", soundcard.synth.m_devnum)
elif cfg.get_string("sound/type") == "external-midiplayer":
soundcard.initialise_external_midiplayer(
verbose_init=self.m_options.verbose_sound_init)
soundcard.synth.error_report_cb = solfege.win.display_error_message
elif cfg.get_string("sound/type") == '':
solfege.win.display_error_message(
_("You should configure sound from the 'Sound' page "
"of the preferences window."))
elif cfg.get_string("sound/type") == "sequencer-device":
try:
soundcard.initialise_devicefile(
cfg.get_string("sound/device_file"),
cfg.get_int("sound/synth_number"),
verbose_init=self.m_options.verbose_sound_init)
except (soundcard.SoundInitException, OSError, ImportError), e:
self.m_sound_init_exception = e
soundcard.initialise_using_fake_synth(True)
if cfg.get_string("programs/csound") == "AUTODETECT":
for p in osutils.find_csound_executables():
cfg.set_string("programs/csound", p)
break
else:
# If not csound binary was found, then we set the string empty.
# This means that autodetection will only happen the first time
# you run the program. But later will newly installed binaries
# be shown in the combo box of the preferences window.
cfg.set_string("programs/csound", "")
if cfg.get_string("programs/mma") == "AUTODETECT":
for p in osutils.find_mma_executables(cfg.get_list("app/win32_ignore_drives")):
cfg.set_string("programs/mma", p)
break
else:
cfg.set_string("programs/mma", "")
def display_sound_init_error_message(self, e):
if isinstance(e, soundcard.SoundInitException):
solfege.win.display_error_message(
"""%s""" % str(e).decode(locale.getpreferredencoding(), 'replace'))
elif isinstance(e, ImportError):
solfege.win.display_error_message2(str(e), _("You should configure sound from the preferences window, and try to use an external midi player. Or try to recompile the program and check for error messages to see why the module is not built."))
elif getattr(e, 'errno', None) == errno.EACCES:
solfege.win.display_error_message(
"The sound init failed: %s\n"
"The errno EACCES indicates that you don't have write "
"permission to the device."
% str(e).decode(locale.getpreferredencoding(), 'replace'))
elif getattr(e, 'errno', None) == errno.EBUSY:
solfege.win.display_error_message(
"The sound init failed: %s\n"
"It seems like some other program is using the device. You "
"should try to quit that other program and restart Solfege."
% str(e).decode(locale.getpreferredencoding(), 'replace'))
else:
solfege.win.display_error_message(
"The sound init failed: %s\n"
"You should configure sound from the 'Sound' page of "
"the preferences window.\n\n"
"It is also possible that the OS sound setup is incorrect."
% str(e).decode(locale.getpreferredencoding(), 'replace'))
def please_help_me(self):
if isinstance(solfege.win.get_view(), abstract.Gui):
# If the view visible is an exercise, when we check if the
# lesson file header define a specific help page.
if self.m_teachers[self.m_running_exercise].m_P.header.help:
self.handle_href('%s.html' % self.m_teachers[self.m_running_exercise].m_P.header.help)
else:
# if not, we display help page named the same as the
# exercise module
self.handle_href('%s.html' % solfege.win.m_viewer)
def show_exercise_theory(self):
if self.m_teachers[self.m_running_exercise].m_P.header.theory:
solfege.win.display_docfile("%s.html" % self.m_teachers[self.m_running_exercise].m_P.header.theory)
def _practise_lessonfile(self, filename, urlobj=None):
"""
return the module name.
"""
module = lessonfile.infocache.get(filename, 'module')
if self.m_running_exercise:
solfege.win.box_dict[self.m_running_exercise].on_end_practise()
if not lessonfile.is_uri(filename):
# Since the file is in ~/.solfege/exercises we must check
# if the user have written his own exercise module
if os.path.exists(os.path.normpath(os.path.join(
os.path.dirname(filename),
"..", "modules", u"%s.py" % module))):
module = u"user:%s/%s" % (
os.path.dirname(filename).split(os.sep)[-2],
module)
if module not in self.m_teachers:
self.create_teacher(module)
if module not in solfege.win.box_dict:
solfege.win.initialise_exercise(self.m_teachers[module])
self.m_teachers[module].set_lessonfile(filename)
if self.m_teachers[module].m_P:
solfege.win.activate_exercise(module, urlobj)
self.m_running_exercise = module
self.m_teachers[module].g_view = solfege.win.box_dict[module]
solfege.win.show_help_on_current()
return module
def practise_lessonfile(self, filename):
def cleanup():
module = lessonfile.infocache.get(filename, 'module')
self.m_teachers[module].m_P = None
solfege.win.box_dict[module].practise_box.set_sensitive(False)
solfege.win.box_dict[module].config_box.set_sensitive(False)
solfege.win.box_dict[module].action_area.set_sensitive(False)
solfege.win.box_dict[module].std_buttons_end_practise()
try:
module = self._practise_lessonfile(filename)
except (lessonfile.LessonfileParseException,
dataparser.DataparserException,
parsetree.ParseTreeException,
IOError), e:
cleanup()
gu.display_exception_message(e, lessonfile=filename)
return
if 'm_discards' in dir(self.m_teachers[module].m_P):
for msg in self.m_teachers[module].m_P.m_discards:
print >> sys.stderr, msg
solfege.win.box_dict[module].practise_box.set_sensitive(True)
solfege.win.box_dict[module].config_box.set_sensitive(True)
solfege.win.box_dict[module].action_area.set_sensitive(True)
solfege.win.box_dict[module].on_start_practise()
w = solfege.win.g_ui_manager.get_widget("/Menubar/HelpMenu/PerExerciseHelp/HelpTheory")
if w:
w.set_sensitive(bool(self.m_teachers[module].m_P.header.theory))
return module
def test_lessonfile(self, filename):
self.m_test_mode = True
module = self.practise_lessonfile(filename)
solfege.win.enter_test_mode()
def handle_href(self, href):
u = urlparse(href)
if u.scheme:
try:
webbrowser.open_new(href)
except Exception, e:
solfege.win.display_error_message2(_("Error opening web browser"), str(e))
else:
solfege.win.display_docfile(u.path)
def create_teacher(self, modulename):
"""
Create the teacher in 'modulename' and add it to self.m_teachers.
"""
m = self.import_module(modulename)
self.m_teachers[modulename] = m.Teacher(modulename)
def import_module(self, modulename):
"""
If prefixed with "solfege:"
user:collection/modulename
collection is the directory name in
~/.solfege/exercises/collection/modulename
and "user:" is just a prefix to show that the module name
is in the users directory.
If a plain string with not prefix, it is one of the standard modules
included with Solfege.
Return the imported module
"""
if modulename.startswith("user:"):
collection = modulename[len("user:"):].split(os.sep)[0]
module_dir = os.path.join(filesystem.user_data(),
"exercises", collection, "modules")
sys.path.insert(0, module_dir)
m = __import__(modulename.split("/")[1])
reload(m)
del sys.path[0]
else:
m = __import__("solfege.exercises.%s" % modulename, fromlist=("solfege.exercises.%s" % modulename,), level=0)
return m
def reset_exercise(self, w=None):
"""
Call on_end_practise, and then on_start_practise in
the currently active exercise, if we have a exercise.
"""
if isinstance(solfege.win.get_view(), abstract.Gui):
solfege.win.get_view().on_end_practise()
solfege.win.get_view().on_start_practise()
def quit_program(self):
if isinstance(solfege.win.get_view(), abstract.Gui):
g = solfege.win.get_view()
# Check that the lesson file has a header, because if the
# user tries to quit the program after parsing a bad lesson
# file, we cannot call end_practise() without risking more
# exceptions.
if g.m_t.m_P and hasattr(g.m_t.m_P, 'header'):
g.on_end_practise()
try:
cfg.sync()
except IOError, e:
gu.display_exception_message(e)
try:
solfege.db.conn.commit()
except sqlite3.ProgrammingError, e:
gu.display_exception_message(e)
try:
solfege.db.conn.close()
except sqlite3.ProgrammingError, e:
gu.display_exception_message(e)
if soundcard.synth:
soundcard.synth.close()
shutil.rmtree(lessonfile.MusicBaseClass.temp_dir, True)
def export_training_set(self, export_data, export_dir, output_format,
name_track_by_question):
"""
This function requires a program that can create WAV files
from MIDI files and MP3 files from WAV.
"""
def delay(n, tempo):
"""
tempo is a dict of two integers
"""
track = mpd.Track()
track.set_bpm(*tempo)#self.get_int('config/default_bpm'))
track.note(mpd.Rat(n, 4), 80, 0)
soundcard.synth.play_track(track)
track_idx = 0
num = sum([x['count'] for x in export_data])
# MainWin will set this to True if the user want to cancel
# the export.
self.m_abort_export = False
report = reportlib.Report()
report.append(reportlib.Heading(1, "Exported exercises"))
table = reportlib.Table()
report.append(table)
for lesson_info in export_data:
filename = lesson_info['filename']
module = lessonfile.infocache.get(filename, 'module')
if module not in self.m_teachers:
self.create_teacher(module)
p = self.m_teachers[module].lessonfileclass()
p.parse_file(lessonfile.uri_expand(filename))
for c in range(lesson_info['count']):
trackname = "track-%i"
if module == 'idbyname':
p.select_random_question()
if p.header.lesson_heading:
s = p.header.lesson_heading
else:
s = p.header.title
table.append_row("%i" % track_idx,
p.get_question().name,
s)
if name_track_by_question:
trackname = "%s-%%i" % p.get_name()
soundcard.start_export(os.path.join(
export_dir, "%s.mid" % trackname % track_idx))
for n in range(lesson_info.get('repeat', 1)):
p.play_question()
if n != lesson_info.get('repeat', 1) - 1:
if 'delay' in lesson_info:
delay(lesson_info['delay'], p.get_tempo())
soundcard.end_export()
elif module in ('melodicinterval', 'harmonicinterval'):
t = self.m_teachers[module]
t.set_lessonfile(filename)
t.start_practise()
t.new_question("c", "c''")
t.q_status = t.QSTATUS_SOLVED
try:
table.append_row("%i" % track_idx, "%s" % utils.int_to_intervalname(t.m_interval))
if name_track_by_question:
trackname = "%%i-%s.mid" % utils.int_to_intervalname(t.m_interval)
except AttributeError:
table.append_row("%i" % track_idx, "%s" % (" + ".join([utils.int_to_intervalname(q, False, True) for q in t.m_question])))
if name_track_by_question:
trackname = "%%i-%s.mid" % ("+".join([utils.int_to_intervalname(q, False, True) for q in t.m_question]))
soundcard.start_export(os.path.join(
export_dir, "%s.mid" % trackname % track_idx))
for n in range(lesson_info.get('repeat', 1)):
t.play_question()
if n != lesson_info.get('repeat', 1) - 1:
if 'delay' in lesson_info:
delay(lesson_info['delay'],
(self.get_int('config/default_bpm'), 4))
soundcard.end_export()
else:
logging.warning("export_training_set:ignoring exercise with module='%s'", module)
#####
def do_convert(from_format, to_format):
"""
Return False if we think the convert failed.
"""
app_cfg_name = "app/%s_to_%s_cmd" % (from_format, to_format)
if from_format == 'midi':
from_ext = 'mid'
else:
from_ext = from_format
to_ext = to_format
if not cfg.get_string(app_cfg_name):
solfege.win.display_error_message2("Config variable not defined", "The missing or empty variable was '%s'" % app_cfg_name)
return False
try:
inout = {
'in': os.path.join(export_dir,
"%s.%s" % (trackname % track_idx, from_ext)),
'out': os.path.join(export_dir,
"%s.%s" % (trackname % track_idx, to_ext))}
opts = cfg.get_string(app_cfg_name + '_options').split(" ")
opts = [x % inout for x in opts]
# For some reasong setting the executable arg does
# not work for Python 2.5.2
try:
subprocess.call(
[cfg.get_string(app_cfg_name)] + opts)
except OSError, e:
raise osutils.BinaryForMediaConvertorException(app_cfg_name,
cfg.get_string(app_cfg_name), e)
if os.path.exists(os.path.join(export_dir, "%s.%s" % (trackname % track_idx, to_ext))):
os.remove(os.path.join(export_dir, "%s.%s" % (trackname % track_idx, from_ext)))
else:
# This means that the program failed to generate
# the WAV file. We set output_format to 'midi'
# because we don't want to display this error for
# every single file.
output_format = 'midi'
solfege.win.display_error_message2("External program must have failed", "The file in %(from)s format was not generated from the %(to)s file as expected. Please check your setup in the preferences window (CTRL-F12)." % {'to':to_format.upper(), 'from': from_format.upper()})
except (TypeError, KeyError):
solfege.win.display_error_message2("%(from)s to %(to)s config error", "There was a format string error. Will not generate WAV files. Please check the app/midi_to_wav_cmd config variable." % {'from': from_format, 'to': to_format})
output_format = 'midi'
return True
#####
if output_format in ('mp3', 'wav', 'ogg'):
do_convert('midi', 'wav')
if output_format in ('mp3', 'ogg'):
if not do_convert('wav', output_format):
output_format = 'wav'
track_idx += 1
yield 1.0 * track_idx / num
if self.m_abort_export:
del self.m_abort_export
return
reportlib.HtmlReport(report, os.path.join(export_dir, "toc.html"))
def sheet_gen_questions(self, count, sdict):
"""
count -- how many questions should we generate. We use this value
and not sdict['count'] because sometimes the app has some
questions, and just need a few more.
"""
module = lessonfile.infocache.get(sdict['filename'], 'module')
if module not in self.m_teachers:
self.create_teacher(module)
p = self.m_teachers[module].lessonfileclass()
p.parse_file(lessonfile.uri_expand(sdict['filename']))
if module == 'idbyname':
for x in self._sheet_gen_question_idbyname(p, count, sdict):
yield x
else:
assert module in ('harmonicinterval', 'melodicinterval')
for x in self._sheet_gen_question_interval(module, p, count, sdict):
yield x
def _sheet_gen_question_idbyname(self, p, count, sdict):
"""
yield count dicts, where each dict contain the data needed to
print both the teachers and the students question.
"""
counts = {}.fromkeys(range(len(p.m_questions)), 0)
for x in range(count):
while 1:
p.select_random_question()
if counts[p._idx] >= 1.0 * sdict['count'] / len(p.m_questions):
continue
counts[p._idx] += 1
break
ret = {'question': {}, 'answer': {}}
if sdict['qtype'] == 0:
ret['question']['name'] = "...."
ret['answer']['name'] = p.get_question().name
ret['question']['music'] = p.get_lilypond_code()
ret['answer']['music'] = p.get_lilypond_code()
yield ret
else:
assert sdict['qtype'] == 1
ret['question']['name'] = p.get_question().name
ret['answer']['name'] = p.get_question().name
ret['answer']['music'] = p.get_lilypond_code()
ret['question']['music'] = p.get_lilypond_code_first_note()
yield ret
def _sheet_gen_question_interval(self, module, p, count, sdict):
# FIXME in the idbyname we count how many times each question
# has been selected, to get an even selection. We don't do it
# here at the moment, because we need to descide what we really want.
teacher = self.m_teachers[module]
teacher.set_lessonfile(sdict['filename'])
teacher.start_practise()
for x in range(count):
teacher.new_question("c'", "c''")
# quick hack to use this for both melodic and harmonic intervals
if module == 'melodicinterval':
teacher.m_interval = teacher.m_question[0]
teacher.q_status = teacher.QSTATUS_SOLVED
ret = {'question': {}, 'answer': {}}
if sdict['qtype'] == 0:
ret['question']['name'] = "...."
ret['answer']['name'] = mpd.Interval.new_from_int(abs(teacher.m_interval)).get_name()
ret['question']['music'] = r"\score{" \
r" { %s %s }" \
r"\layout { "\
r" ragged-last = ##t " \
r" \context { \Staff " \
r'\remove "Time_signature_engraver" } }' \
r"}" % (
teacher.m_tonika.get_octave_notename(),
(teacher.m_tonika + mpd.Interval.new_from_int(teacher.m_interval)).get_octave_notename())
ret['answer']['music'] = ret['question']['music']
yield ret
else:
assert sdict['qtype'] == 1
ret['question']['name'] = mpd.Interval.new_from_int(abs(teacher.m_interval)).get_name()
ret['answer']['name'] = mpd.Interval.new_from_int(abs(teacher.m_interval)).get_name()
ret['question']['music'] = r"\score{ { %s s4 s4} "\
r"\layout{ "\
r" ragged-last = ##t "\
r" \context { \Staff "\
r' \remove "Time_signature_engraver" } }'\
r"}" % teacher.m_tonika.get_octave_notename()
ret['answer']['music'] = r"\score{ { %s %s } "\
r"\layout{ "\
r" ragged-last = ##t "\
r" \context { \Staff "\
r' \remove "Time_signature_engraver" } }'\
r"}" % (
teacher.m_tonika.get_octave_notename(),
(teacher.m_tonika + teacher.m_interval).get_octave_notename())
yield ret
|
allancarlos123/Solfege
|
solfege/application.py
|
Python
|
gpl-3.0
| 33,473
|
"""
Functions to study r2 landscapes (equivalent of dispersion spectra) and similar stuff.
"""
import sys, os
import pycs.gen.lc
import pycs.gen.spl
import pycs.gen.util
import pycs.spl.multiopt
import numpy as np
import scipy.optimize as spopt
def explore(lcs, sourcespline, tss):
"""
We explore a volume of time shifts, and calculate the r2 at each point, by
optimizing the source spline coeffs only.
tss (timeshifts) is a list of arrays of timeshifts to try.
>>> tss = np.mgrid[0:0:1j, -10:10:5j, -10:10:5j, -10:10:5j]
todo : see if optimizing the ML coeffs changes something
"""
def calc(x, y, z):
# We make a local copy of the lcs and the spline :
mys = sourcespline.copy()
mylcs = [l.copy() for l in lcs]
# Set the shifts :
for (l, ts) in zip(mylcs[1:], [x, y, z]):
l.shifttime(ts) # So this is relative, starting from the beginning.
# Optimize what we want :
r2 = pycs.spl.multiopt.opt_source(mylcs, mys, verbose=False)
#print shifts, r2
#return np.array([l.timeshift for l in mylcs] + [r2])
return r2
veccalc = np.vectorize(calc, otypes=[np.ndarray])
return veccalc(*tss).astype(float)
#r2s = np.zeros(tss[-1].shape)
#print tss.shape
"""
vectors = [shifts.flatten() for shifts in tss]
results = []
for shifts in zip(*vectors):
results.append(calc(shifts))
return np.vstack(results)
"""
# Some stuff to play with the output (in construnction) ...
"""
from enthought.mayavi import mlab
biga = pycs.gen.util.readpickle("biga.pkl").astype(float)
x,y,z = np.mgrid[-10:10:5j, -10:10:5j, -10:10:5j]
src = mlab.pipeline.scalar_field(biga)
mlab.pipeline.iso_surface(src)
#mlab.pipeline.iso_surface(src, contours=[s.max()-0.1*s.ptp(), ],)
mlab.pipeline.image_plane_widget(src,
plane_orientation='z_axes',
slice_index=1,
)
#mlab.contour3d(x,y,z,biga)
#mlab.show()
#print biga
sys.exit()
"""
"""
x,y,z = np.mgrid[-10:10:5j, -10:10:5j, -10:10:5j]
#mlab.contour3d(x, y, z, biga)
src = mlab.pipeline.scalar_field(biga)
mlab.pipeline.iso_surface(src)
#mlab.pipeline.iso_surface(src, contours=[s.max()-0.1*s.ptp(), ],)
mlab.pipeline.image_plane_widget(src,
plane_orientation='z_axes',
slice_index=1,
)
mlab.show()
sys.exit()
"""
"""
lcs = pycs.gen.util.readpickle("lcsopt.pkl")
s = pycs.gen.util.readpickle("source.pkl")
tss = np.mgrid[-10:10:5j, -10:10:5j, -10:10:5j]
#print tss.shape
biga = pycs.spl.multispec.explore(lcs, s, tss=tss)
pycs.gen.util.writepickle(biga, "biga.pkl")
#print biga.shape
sys.exit()
"""
"""
print biga
pycs.gen.util.writepickle(biga, "biga.pkl")
sys.exit()
biga = pycs.gen.util.readpickle("biga.pkl").transpose()
mlab.surf(biga[1], biga[2], biga[4])
"""
"""
mlab.clf()
src = mlab.pipeline.scalar_scatter(biga[1], biga[2], biga[3], biga[4])
mlab.pipeline.scalar_cut_plane(src)
#mlab.colorbar(title='r2', orientation='vertical')
"""
|
COSMOGRAIL/PyCS
|
pycs/spl/old/multispec.py
|
Python
|
gpl-3.0
| 3,019
|
# coding=utf-8
class ImporterUtils():
def __init__(self):
pass
@staticmethod
def program_categories_for_prijepolje():
programs = {
"СКУПШТИНА ОПШТИНЕ- ПРОГРАМ 15-ЛОК.САМОУПРАВА":[
"Програмска активност 0001-Функционисање локалне самоуправе",
],
"ПРЕДСЕДНИК ОПШ.ПРОГРАМ 15-ЛОКАЛНА САМОУПРАВА":[
"Прогрмска активн.0001-Функционисање локалне самоуправе",
"Програмска актив.0007-Канцеларија за младе"
],
"ОПШТИНСКО ВЕЋЕ ПРОГРАМ 15-ЛОКАЛНА САМОУПРАВА": [
"Програмска активност 0001-функционисање локалне самоуправе"
],
"ОПШТИНСКА УПРАВА ПРОГРАМ 15-ЛОКАЛНА САМОУПРАВА":[
"Програмска активност 0001-Функционисање локалне самоуправе",
],
"ОСНОВНО ОБРАЗОВАЊЕ ПРОГРАМ 9":[
"Програмска активност-Функционисање основних школа",
],
"СРЕДЊЕ ОБРАЗОВАЊЕ ПРОГРАМ 10":[
"Програмска активност-Функц.средњих школа",
],
"ДОМ КУЛТУРЕ ПРОГРАМ 13-РАЗВОЈ КУЛТУРЕ":[
"Програмска активност 0001-Функционисање локалних установа културе",
],
"МАТИЧНА БИБЛИОТЕКА ПРОГРАМ 13-РАЗВОЈ КУЛТУРЕ":[
"Програмска активност 0001-Функционисање локалних установа културе",
],
"М У З Е Ј ПРОГРАМ 13-РАЗВОЈ КУЛТУРЕ":[
"Програмска активност 0001-Функционисање локалних установа културе",
],
"ИСТОРИЈСКИ АРХИВ ПРОГРАМ 13-РАЗВОЈ КУЛТУРЕ":[
"Функционисање локалних установа културе",
],
"ДЕЧЈИ ВРТИЋ ПРОГРАМ 8-ПРЕДШК.ВАСП.":[
"Програмска активност 0001-Функц.предш.установа",
],
"ЦЕНТАР ЗА СОЦ.РАД ПРОГРАМ 11-СОЦ.И ДЕЧ.ЗАШТИТА":[
"Програмска активност-Социјалне помоћи",
],
"РАЗВОЈ ЗАЈЕДНИЦЕ ПРОГРАМ 3-ЛОКАЛНИ ЕКОНОМСКИ РАЗВОЈ":[
"Програмска активност 0002-Унапређење привредног амбијента",
"Програмска активност 0002-Месне Заједнице ",
],
"ЈАВНИ РЕД И БЕЗБЕДНОСТ ПРОГРАМ 15-ФУНКЦИОНИСАЊЕ ЛОК. САМОУПРАВЕ":[
"Програмска активност 0001-Функ.ЛС",
],
"ДИРЕКЦИЈА ЗА ИЗГРАДЊУ ПРОГРАМ 1-ЛОКАЛНИ РАЗВОЈ И ПРОСТОРНО ПЛАНИРАЊЕ":[
"Програмска активност 0002-Уређивање грађ.земљишта",
],
"ДИРЕКЦИЈА ЗА ИЗГРАДЊУ ПРОГРАМ 7-ПУТНА ИНФРАСТРУКТУРА":[
"Програмска активност 0002-Одржавање путева",
],
"KOMУНАЛНА ДЕЛАТНОСТ ПРОГРАМ 2- КОМУНАЛНА ДЕЛАТНОСТ":[
"Програмска активност 0008-Јавна хигијена",
"Програмска активност 0004-Даљинско грејање",
"Програмска активност 0003 -Одржавање депонија",
"Програмска активност 0009-Уређење и одржавање зеленила",
],
"ЗАШТИТА ЖИВОТНЕ СРЕДИНЕ ПРОГРАМ 6":[
"Програмска активност 0004-Заштита природних вредности и унапређење подручја са природним свост",
],
"JAВНА РАСВЕТА ПРОГРАМ 2-КОМУНАЛНА ДЕЛАТНОСТ":[
"Програмска активност 0010-Јавна расвета",
],
"ЛОКАЛНИ ПРЕВОЗ ПРОГРАМ 2-КОМУНАЛНА ДЕЛАТНОСТ":[
"Програмска активност 0005-Јавни превоз",
],
"ИНФОРМИСАЊЕ ПРОГРАМ 15-ЛОКАЛНАСАМОУПРАВА":[
"Програмска активност 0006-Информисање",
],
"ТУРИСТИЧКА ОРГАНИЗАЦИЈА ПРОГРАМ 4-РАЗВОЈ РАЗВОЈ ТУРИЗМА":[
"Програмска активност 0002-Туристичка промоција",
],
"ЗАШТИТА ЖИВОТНЕ СРЕДИНЕ ПРОГРАМ 6":[
"Програмска активност 0001-Управљање заштитом животне сред. И природних вред.",
],
"РАЗВОЈ ПОЉОПРИВРЕДЕ ПРОГРАМ 5":[
"Програмска активност-0002- Подстицаји пољ.произв.",
],
"РАЗВОЈ СПОРТА И ОМЛАДИНЕ ПРОГРАМ 14":[
"ПА-Подршка л.с.о.у.и с."
],
"ДОМ ЗДРАВЉА ПРОГРАМ 12-ПРИМАРНА ЗДРАВСТВЕНА ЗАШТИТА":[
"ПА 0001-Функцион.установа примарне здрав.заштите",
],
"СОЦ.И ДЕЧИЈА ЗАШТИТА ПРОГРАМ 11":[
"Програмска активност Дечија заштита",
],
"ЦРВЕНИ КРСТ ПРОГРАМ 11-СОЦИЈАЛНА И ДЕЧИЈА ЗАШТИТА":[
"ПА 0005-Активности Црвеног крста",
]
}
return programs
@staticmethod
def parent_categories_for_vranje():
valjevo_parents = {
"41": "РАСХОДИ ЗА ЗАПОСЛЕНЕ",
"42": "КОРИШЋЕЊЕ РОБА И УСЛУГА",
"44": "НЕГ. КУРС.РАЗЛИКЕ",
"45": "СУБВЕНЦИЈЕ",
"46": "ДОТАЦИЈЕ ИЗ БУЏЕТА",
"47": "СОЦИЈАЛНЕ ПОМОЋИ",
"48": "ОСТАЛИ РАСХОДИ",
"49": "РЕЗЕРВЕ",
"51": "ОСНОВНА СРЕДСТВА У ИЗГРАДЊИ"
}
return valjevo_parents
@staticmethod
def program_categories_for_vranje():
vranje_programs = {
"ПРОГРАМ 15: ЛОКАЛНА САМОУПРАВА": [
"Програмска активност: Функционисање локалне самоуправе и градских општина",
"Пројекат: Прослава Дана особођења Града и државних празника",
"Програмска активност: Управљање јавним дугом",
"Програмска активност: Информисање",
"Програмска активност: Програми националних мањина",
"Програмска активност: Заштитник грађана",
],
"ПРОГРАМ 1: ЛОКАЛНИ РАЗВОЈ И ПРОСТОРНО ПЛАНИРАЊЕ": [
"Пројекат: Експропријација земљишта за потребе Фабрике за прераду отпадних вода и заобилазнице до индустријске зоне Бунушевац",
"Програмска активност: Стратешко, просторно и урбанистичко планирање",
"Партерно уређење платоа у Врањској Бањи",
"Програмска активност: Уређивање грађевинског земљишта",
"Пројекат : Реконструкција шеталишта у улици Краља Стефана Првовенчаног од Робне куће до зграде ЈП Дирекције",
],
"ПРОГРАМ 3: ЛОКАЛНИ ЕКОНОМСКИ РАЗВОЈ": [
"Програмска активност: Унапређење привредног амбијента",
"Програмска активност: Подстицаји за развој предузетништва",
"Програмска активност: Одржавање економске инфраструктуре",
"Програмска активност: Финансијска подршка локалном економском развоју",
"Пројекат: Стручна пракса 2015",
],
"ПРОГРАМ 7 - ПУТНА ИНФРАСТРУКТУРА": [
"Пројекат: Увођење видео надзора у центру Града",
"Програмска активност: Управљање саобраћајном инфраструктуром",
"Програмска активност: Одржавање путева",
"Пројекат: Периодично одржавање путева Златокоп -Ћуковац-Врањска Бања и Бунушевац-Содерце, Миланово-Буштрање",
],
"ПРОГРАМ 11: СОЦИЈАЛНА И ДЕЧЈА ЗАШТИТА": [
"Програмска активност: Социјалне помоћи",
"Програмска активност: Подршка социо-хуманитарним организацијама",
"Програмска активност: Активности Црвеног крста",
"Пројектат: Смањење сиромаштва и унапређење могућности запошљавања маргинализованих и угрожених група становништва са фокусом на ресоцијализацију осуђеника",
"Пројекат: Смањење сиромаштва и унапређење могућности запошљавања маргинализованих и угрожених група становништва са фокусом на Ромкиње у Србији",
"Пројекат: Изградња монтажних објеката за трајно решавање смештаја избелих и расељених лица",
"Програмска активност: Прихватилишта, прихватне станице и друге врсте смештаја"
],
"ПРОГРАМ 12: ПРИМАРНА ЗДРАВСТВЕНА ЗАШТИТА": [
"Програмска активност: Функционисање установа примарне здравствене заштите",
"Пројекат: Суфинансирање вантелесне оплодње"
],
"ПРОГРАМ 14 - РАЗВОЈ СПОРТА И ОМЛАДИНЕ": [
"Програмска активност: Подршка локалним спортским организацијама, удружењима и савезима",
"Програмска активност: Подршка предшколском, школском и рекреативном спорту и масовној физичкој култури",
"Програмска активност: Одржавање спортске инфраструктуре"
],
"ПРОГРАМ 15 - ЛОКАЛНА САМОУПРАВА":[
"Функционисање локалне самоуправе и градских општина",
"Пројекат: Градска слава - Света Тројица",
"Програмска активност: Општинско јавно правобранилаштво",
"Програмска активност: Функционисање локалне самоуправе и градских општина",
"СКУПШТИНА ОПШТИНЕ",
"ПРЕДСЕДНИК ОПШТИНЕ И ОПШТИНСКО ВЕЋЕ",
"ОПШТИНСКА УПРАВНА ЈЕДИНИЦА",
"УПРАВА БАЊЕ",
"Друмски саобраћај",
"Изградња Балон сале - завршетак I и II фаза",
"Уређивање и одржавање зеленила",
"Уређење водотокова",
"Екпропријација и припремање грађевинског земљишта",
"Изградња канализационе мреже",
"Улична расвета",
"Програмска активност: Месне заједнице",
"Програмска активност: Канцеларија за младе"
],
"ПРОГРАМ 2: КОМУНАЛНЕ ДЕЛАТНОСТИ":[
"Програмска активност: Јавна расвета",
"Програмска активност: Водоснабдевање",
"Програмска активност: Управљање отпадним водама",
'Пројекат: "ESCO" пројекат побољшања енергетског учинка јавне расвете',
],
"ПРОГРАМ 7: ПУТНА ИНФРАСТРУКТУРА":[
"Програмска активност: Одржавање путева",
"Пројекат: Асфалтирање путева у сеоским МЗ",
"Програмска активност: Управљање саобраћајном инфраструктуром",
],
"ПРОГРАМ 5: РАЗВОЈ ПОЉОПРИВРЕДЕ": [
"Програмска активност: Унапређење услова за пољопривредну делатност",
],
"ПРОГРАМ 6: ЗАШТИТА ЖИВОТНЕ СРЕДИНЕ": [
"Програмска активност: Управљање заштитом животне средине и природних вредности",
"Програмска активност: Праћење квалитета елемената животне средине",
"Пројекат: Набавка контејнера за изношење смећа",
'Пројекат: Изградња санитарног контејнера и биолошког пречишћивача отпадних вода у насељу "Цигански рид" у Врању',
"Пројекат: Набавка уличних канти за отпатке и бетонских мобилијера",
"Пројекат: Озелењавање јавних површина",
"Пројекат: Набавка камиона аутосмећара",
"Пројекат: Очување животне средине уређењем отпадних вода",
"Пројекат: Компостно поље",
],
"ПРОГРАМ 4 - РАЗВОЈ ТУРИЗМА":[
"Програмска активност: Управљањем развојем туризма",
"Дани Врања и Дани Врања у Београду",
"Прослава Дана Града",
"Програмска активност: Туристичка промоција",
"Пројекат: Доградња планинарског дома",
"Пројекат: Уградња соларних панела",
"Пројекат: Изградња платоа испред планинарског дома",
"Пројекат: Постављање жичаре Дубока 2"
],
"ПРОГРАМ 2 - КОМУНАЛНА ДЕЛАТНОСТ": [
"Програмска активност: Водоснабдевање",
"Програмска активност: Управљање отпадним водама",
"Програмска активност: Паркинг сервис",
"Програмска активност: Уређење, одржавање и коришћење пијаца",
"Програмска активност: Уређење и одржавање зеленила",
"Програмска активност: Јавна расвета",
"Програмска активност: Одржавање гробаља, и погребне услуге",
],
"ПРОГРАМ 13 - РАЗВОЈ КУЛТУРЕ": [
"Програмска активност: Подстицаји културном и уметничком стваралаштву",
"Програмска активност: Функционисање локалних установа културе",
"Пројекат: '35. Борини позоришни дани'",
"Пројекат: Изградња и опремање зграде Позоришта",
"Пројекат: Светосавска недеља 2016",
"Програм социјалне укључености лица са инвалидитетом, посебним потребама и радно способних лица",
"Пројекат: Еколошки кутак и еколошка едукација",
"Пројекат: Набавка архивских кутија",
'Манифестација "Златни пуж 2015."'
],
"ПРОГРАМ 8 - ПРЕДШКОЛСКО ОБРАЗОВАЊЕ": [
"Програмска активност: Функционисање предшколских установа",
'Пројекат: Санација отворене терасе на вртићу "Чаролија"',
],
"ПРОГРАМ 9 - ОСНОВНО ОБРАЗОВАЊЕ": [
"Програмска активност: Функционисање основних школа",
"Пројекат: Поправка инсталације грејања, котла и димњака у ОШ 20. октобар Власе",
"Пројекат: Санирање и опремање школске кухиње ЈЈ Змај",
"Пројекат: Реконструкција санитарног чвора у ОШ 20. октобар Власе и ОШ Предраг Девеџић Врањска Бања",
"Програмска активност: Функционисање средњих школа",
"Пројекат: Санирање школских спортских терена и сала",
'Пројекат: Изградња система за наводњавање локалним квашењем земљишта школског имања "Златокоп" Пољопривредно-ветеринарске школе',
],
"ПРОГРАМ 14: РАЗВОЈ СПОРТА И ОМЛАДИНЕ": [
"Пројекат: Изградња спортских терена на Бесној Кобили",
"Програмска активност: Додатно образовање и усавршавање омладине",
"Пројекат: Летња школа за најбоље полазнике РЦТ на Бесној Кобили",
"Пројекат: Организовање Регионалне смотре талената",
],
"ПРОГРАМ 10 - СРЕДЊЕ ОБРАЗОВАЊЕ": [
"Програмска активност: Функционисање средњих школа",
"Пројекат: Санирање школских спортских терена и сала",
'Пројекат: Изградња система за наводњавање локалним квашењем земљишта школског имања "Златокоп" Пољопривредно-ветеринарске школе',
]
}
return vranje_programs
@staticmethod
def sombor_programs():
descriptions = {
"ПРОГРАМ 15 - ЛОКАЛНА САМОУПРАВА": [
"Функционисање локалне самоуправе и градских општина",
"Информисање",
"Градско (општинско) јавно правобранилаштво",
"Програми националних мањина",
"Канцеларија за младе",
"Управљање јавним дугом",
"Месне заједнице",
"МЗ ''АЛЕКСА ШАНТИЋ''",
"МЗ ''БАЧКИ БРЕГ''",
"МЗ ''БАЧКИ МОНОШТОР''",
"МЗ 'БЕЗДАН''",
"МЗ 'ДОРОСЛОВО''",
"МЗ ''ГАКОВО''",
"МЗ ''КЉАЈИЋЕВО''",
"МЗ ''КОЛУТ''",
"МЗ ''РАСТИНА''",
"МЗ ''РИЂИЦА''",
"МЗ ''СВЕТОЗАР МИЛЕТИЋ''",
"МЗ ''СТАНИШИЋ''",
"МЗ ''СТАПАР''",
"МЗ ''ТЕЛЕЧКА''",
"МЗ ''ЧОНОПЉА''",
"МЗ ''ВЕНАЦ''",
"МЗ ''СЕЛЕНЧА''",
"МЗ ''ГОРЊА ВАРОШ''",
"МЗ ''СТАРА СЕЛЕНЧА''",
"МЗ ''МЛАКЕ''",
"МЗ ''ЦРВЕНКА''",
"МЗ ''НОВА СЕЛЕНЧА ''"
],
"ПРОГРАМ 2 - КОМУНАЛНА ДЕЛАТНОСТ": [
"Остале комуналне услуге", "Водоснабдевање"
],
"ПРОГРАМ 6 - ЗАШТИТА ЖИВОТНЕ СРЕДИНЕ": [
"Управљање заштитом животне средине и природних вредности",
"Праћење квалитета елемената животне средине",
"Остале комуналне услуге",
"Паркинг сервис",
"Даљинско грејање",
"Остале комуналне услуге",
"Одржавање депонија",
"Јавна хигијена",
"Уређење и одржавање зеленила",
"Јавна расвета",
"Одржавање гробаља",
"Остале комуналне услуге",
""
],
"ПРОГРАМ 7 - ПУТНА ИНФРАСТРУКТУРА": [
"Одржавање путева",
""
],
"ПРОГРАМ 1 - ЛОКАЛНИ РАЗВОЈ И ПРОСТОРНО ПЛАНИРАЊЕ": [
"Стратешко, просторно и урбанистичко планирање",
""
],
"ПРОГРАМ 11 - СОЦИЈАЛНА И ДЕЧИЈА ЗАШТИТА": [
"Социјалне помоћи",
"Прихватилишта, прихватне станице и др.врсте смештаја",
"Саветодавно-терапијске и социјално-едукативне услуге",
"Социјалне помоћи",
"Прихватилипта, прихватне станице и др.врсте смештаја",
"Подршка социјално хуманитарним организацијама",
"Саветодавно-терапијске и социјално-едукативне услуге",
"Активности Црвеног крста",
"Дечија заштита"
],
"ПРОГРАМ 13 - РАЗВОЈ КУЛТУРЕ": [
"Функционисање локалних установа културе",
"Библиотека ''КАРЛО БИЈЕЛИЦКИ'' Сомбор",
"НАРОДНО ПОЗОРИШТЕ Сомбор",
"ГРАДСКИ МУЗЕЈ Сомбор",
"КУЛТУРНИ ЦЕНТАР ''ЛАЗА КОСТИЋ'' Сомбор",
"ИСТОРИЈСКИ АРХИВ Сомбор",
"Галерија ''МИЛАН КОЊОВИЋ'' Сомбор",
"Подстицаји културном и уметничком стваралаштву",
],
"ПРОГРАМ 14 - РАЗВОЈ СПОРТА И ОМЛАДИНЕ": [
"",
'Спортски центар ''СОКО'' Сомбор',
'Средства за спортске активности'
],
"ПРОГРАМ 8 - ПРЕДШКОЛСКО ОБРАЗОВАЊЕ": [
"Функционисање предшколских установа"
],
"ПРОГРАМ 9 - ОСНОВНО ОБРАЗОВАЊЕ": [
"Функционисање основних школа",
"ОШ ''АВРАМ МРАЗОВИЋ'' СОМБОР",
"ОШ ''БРАТСТВО-ЈЕДИНСТВО'' СОМБОР",
"ОШ ''ДОСИТЕЈ ОБРАДОВИЋ'' СОМБОР",
"ОШ ''ИВО ЛОЛА РИБАР'' СОМБОР",
"ОШ ''НИКОЛА ВУКИЋЕВИЋ'' СОМБОР",
"ОШ ''АЛЕКСА ШАНТИЋ'' АЛЕКСА ШАНТИЋ",
"ОШ ''22.ОКТОБАР'' БАЧКИ МОНОШТОР",
"ОШ ''МОША ПИЈАДЕ'' БАЧКИ БРЕГ",
"ОШ ''ЛАЗА КОСТИЋ'' ГАКОВО",
"ОШ ''ПЕТЕФИ ШАНДОР'' ДОРОСЛОВО",
"ОШ ''БРАТСТВО-ЈЕДИНСТВО'' БЕЗДАН",
"ОШ ''НИКОЛА ТЕСЛА'' КЉАЈИЋЕВО",
"ОШ ''ОГЊЕН ПРИЦА'' КОЛУТ",
"ОШ ''НИКОЛА ТЕСЛА'' КЉАЈИЋЕВО",
"ОШ ''ОГЊЕН ПРИЦА'' КОЛУТ",
"ОШ ''ПЕТАР КОЧИЋ'' РИЂИЦА",
"ОШ ''БРАТСТВО-ЈЕДИНСТВО'' СВЕТОЗАР МИЛЕТИЋ",
"ОШ ''ИВАН ГОРАН КОВАЧИЋ'' СТАНИШИЋ",
"ОШ ''БРАНКО РАДИЧЕВИЋ'' СТАПАР",
"ОШ ''КИШ ФЕРЕНЦ'' ТЕЛЕЧКА",
"ОШ ''МИРОСЛАВ АНТИЋ'' ЧОНОПЉА",
"СОШ '''ВУК КАРАЏИЋ'' СОМБОР",
"ОСНОВНА МУЗИЧКА ШКОЛА ''ПЕТАР КОЊОВИЋ'' СОМБОР",
'ШК.ЗА ОСНОВНО ОБРАЗ.ОДРАСЛИХ СОМБОР',
"НАКНАДЕ ЗА ПРЕВОЗ, СМЕШТАЈ И АНГАЖОВАЊЕ ЛИЧНИХ ПРАТИЛАЦА ДЕЦЕ И УЧЕНИКА"
],
"ПРОГРАМ 10 - СРЕДЊЕ ОБРАЗОВАЊЕ": [
"Функционисање средњих школа",
"СМШ ''ДР РУЖИЦА РИП'' СОМБОР",
"ГИМНАЗИЈА ''ВЕЉКО ПЕТРОВИЋ'' СОМБОР",
"СРЕДЊА ПОЉ.ПРЕХ.ШКОЛА СОМБОР",
"СРЕДЊА ШКОЛА ''СВЕТИ САВА'' СОМБОР",
"СРЕДЊА ЕКОНОМСКА ШКОЛА",
"СРЕДЊА ТЕХНИЧКА ШКОЛА СОМБОР",
"СОШ ''ВУК КАРАЏИЋ'' Сомбор",
"СРЕДЊА МУЗИЧКА ШКОЛА СОМБОР",
"Функционисање средњих школа",
"Функционисање средњих школа"
],
"ПРОГРАМ 12 - ПРИМАРНА ЗДРАВСТВЕНА ЗАШТИТА":[
"Функционисање установа прим.здравствене заштите"
],
"ПРОГРАМ 4 - РАЗВОЈ ТУРИЗМА": [
"Управљање развојем туризма",
"Туристичка промоција"
],
"ПРОГРАМ 3- ЛОКАЛНИ ЕКОНОМСКИ РАЗВОЈ": [
"Унапређење привредног амбијента",
"Подстицаји за развој предузетништва",
"Финансијска подршка локалном економском развоју"
],
"ПРОГРАМ 5 - РАЗВОЈ ПОЉОПРИВРЕДЕ": [
"Унапређење услова за пољопривредну делатност"
]
}
return descriptions
@staticmethod
def prihodi_parent_categories_for_valjevo():
valjevo_parents = {
"791110": "ПРИХОДИ ИЗ БУЏЕТА",
"810000": "ПРИМАЊА ОД ПРОДАЈЕ ОСНОВНИХ СРЕДСТАВА",
"821000": "ПРИМАЊА ОД ПРОДАЈЕ РОБНИХ РЕЗЕРВИ",
"840000": "ПРИМАЊА ОД ПРОДАЈЕ ПРИРОДНЕ ИМОВИНЕ",
"910000": "ПРИМАЊА ОД ЗАДУЖИВАЊА ",
"920000": "ПРИМАЊА ОД ПРОДАЈЕ ФИН. ИМОВИНЕ"
}
return valjevo_parents
@staticmethod
def prihodi_parent_categories_for_kraljevo():
kraljevo_parents = {
"711000": "Порез на доходак, добит и капиталне добитке",
"713000": "Порез на имовину",
"714000": "Порез на добра и услуге",
"716000": "Други порези",
"731000": "Донације од иностраних држава",
"732000": "Донације од међународних органзација",
"733000": "Трансфери од других нивоа власти",
"741000": "Приходи од имовине",
"742000": "Приходи од продаје добара и услуга",
"743000": "Новчане казне и одузета имовинска корист",
"744000": "Добровољни трансфери од физичких и правних лица",
"745000": "Мешовити и неодређени приходи",
"771000": "Меморандумске ставке за рефундацију расхода",
"772000": "Меморандумске ставке за рефундацију расхода из претходне године",
"811000": "Примања од продаје непокретности",
"812000": "Примања од продаје покретне имовине",
"813000": "Примања од продаје осталих основних средстава",
"841000": "Примања од продаје земљишта",
"911000": "Примања од домаћих задужења",
"921000": "Примања од продаје домаће финансијске имовине"
}
return kraljevo_parents
@staticmethod
def cacak_parent_catecories():
categories = {
"710000": [
"1. Порески приходи",
"1.1. Порез на доходак, добит и капиталне добитке (осим самодоприноса)",
"1.2. Самодопринос",
"1.3. Порез на фонд зарада",
"1.4. Порез на имовину",
"1.5. Порез на добра и услуге у чему:",
"-накнаде које се користе преко Буџетског фонда за заштиту и унапређење животне средине",
"1.6. Остали порески приходи"
],
"": [
"2. Непорески приходи у чему:",
"- поједине врсте прихода са одређеном наменом (наменски приходи)",
"6. Меморандумске ставке за рефундацију расхода",
"4. Издаци за набавку финансијске имовине (осим 6211)",
"2.1.Задуживање код домаћих кредитора"
],
"731+732": "3. Примања од продаје нефинансијске имовине",
"733000": "4. Донације",
"770000": "5. Трансфери",
"41": [
"1.1. Расходи за запослене",
"1.2. Коришћење роба и услуга",
"1.3. Употреба основних средстава",
"1.4. Отплата камата",
"1.5. Субвенције",
"1.6. Социјална заштита из буџета",
"1.7. Остали расходи, у чему:",
],
"5": "2. Трансфери",
"6200000": "3. Издаци за набавку нефинансијске имовине",
"920000": "ПРИМАЊА ОД ПРОДАЈЕ ФИНАНСИЈСКЕ ИМОВИНЕ И ЗАДУЖИВАЊА ",
"910000": "1. Примања по основу отплате кредита и продаје финансијске имовине",
"611000": [
"3.1. Отплата дуга домаћим кредиторима",
"3.2. Отплата дуга страним кредиторима",
"3.3. Отплата дуга по гаранцијама"
]
}
return categories
@staticmethod
def total_of_economic_classification_of_budzets():
totals = [
("1", "1.0", "2101", "110", "0001", 611428000),
("1", "1.1", "2101", "130", "0002", 26047000),
("1", "1.1", "2101", "130", "0003", 1144311000),
("2", "", "2101", "110", "0004", 215545000),
("3", "", "", "", "", 7154950000),
("3", "3.1", "2102", "110", "0001", 20804000),
("3", "3.2", "2102", "110", "0002", 21260000),
("3", "3.3", "2102", "110", "0003", 23731000),
("3", "3.4", "2102", "110", "0005", 19830000),
("3", "3.5", "2102", "110", "0004", 19987000),
("3", "3.6", "2102", "110", "0006", 23483000),
("3", "3.7", "2102", "110", "0007", 24837000),
("3", "3.8", "2102", "110", "0008", 261606000),
("3", "3.9", "2102", "110", "9", 37765000),
("3", "3.10", "0601", "110", "0001", 61535000),
("3", "3.10", "0601", "110", "0002", 24338000),
("3", "3.10", "0601", "110", "0003", 19049000),
("3", "3.10", "0601", "110", "0004", 8354000),
("3", "3.10", "0601", "110", "0005", 38777000),
("3", "3.10", "0601", "110", "0006", 44739000),
("3", "3.10", "0602", "110", "0001", 31134000),
("3", "3.10", "0602", "110", "0002", 11725000),
("3", "3.10", "0602", "110", "0003", 3701000),
("3", "3.10", "0602", "110", "0004", 30475000),
("3", "3.10", "0602", "110", "0005", 4785000),
("3", "3.10", "0602", "110", "4001", 139185000),
("3", "3.10", "0602", "110", "4002", 16104000),
("3", "3.10", "0602", "110", "4003", 32758000),
("3", "3.10", "0602", "110", "4003", 32758000),
("3", "3.10", "0602", "110", "4004", 16080000),
("3", "3.10", "0602", "110", "4005", 12984000),
("3", "3.10", "0602", "110", "4006", 15714000),
("3", "3.10", "0602", "110", "4007", 8760000),
("3", "3.10", "0602", "110", "4008", 20511000),
("3", "3.10", "0602", "110", "4009", 10541000),
("3", "3.11", "1601", "360", "0001", 20204000),
("3", "3.12", "0606", "110", "0001", 13462000),
("3", "3.12", "0606", "110", "0002", 52942000),
("3", "3.12", "0606", "110", "0003", 20777000),
("3", "3.13", "1501", "110", "0001", 361760000),
("3", "3.13", "1501", "110", "0002", 52472000),
("3", "3.14", "2102", "450", "0011", 76066000),
("3", "3.14", "2102", "450", "0012", 86726000),
("3", "3.14", "2102", "450", "0013", 104130000),
("3", "3.14", "2102", "450", "4001", 1000),
("3", "3.15", "1404", "110", "0001", 28585000),
("3", "3.15", "1404", "110", "4001", 1921000),
("3", "3.16", "", "", "", 1921000),
("3", "3.16", "1002", "110", "0001", 27974000),
("3", "3.16", "1002", "110", "0002", 13450000),
("3", "3.16", "1002", "110", "7010", 12032000),
("3", "3.17", "2301", "110", "0001", 25642000),
("3", "3.18", "", "", "", 4742285000),
("3", "3.18", "0603", "110", "0001", 3253704000),
("3", "3.18", "0603", "110", "0002", 182744000),
("3", "3.18", "0603", "110", "0003", 19000000),
("3", "3.18", "0603", "110", "0004", 203686000),
("3", "3.18", "0604", "110", "0001", 165327000),
("3", "3.18", "0604", "110", "0002", 253047000),
("3", "3.18", "0604", "110", "0003", 8600000),
("3", "3.18", "0604", "110", "0004", 235476000),
("3", "3.18", "0604", "110", "0005", 32130000),
("3", "3.18", "0604", "110", "0006", 272740000),
("3", "3.18", "0604", "110", "4001", 1000000),
("3", "3.18", "0604", "110", "7007", 97551000),
("3", "3.18", "0604", "110", "7008", 17280000),
("3", "3.19", "", "", "", 469289000),
("3", "3.19", "1001", "110", "0006", 35213000),
("3", "3.19", "1001", "110", "0008", 47513000),
("3", "3.19", "1001", "110", "7002", 100368000),
("3", "3.19", "1001", "160", "0007", 286195000),
("3", "3.20", "1406", "110", "0001", 28155000),
("3", "3.21", "1802", "110", "4008", 10310000),
("4", "", "1605", "330", "0001", 310395000),
("5", "", "1602", "330", "0001", 75636000),
("5", "", "1602", "330", "0002", 12342000),
("5", "", "1602", "330", "7002", 125460000),
("6", "", "", "", "", 19384272000),
("6", "6.0", "", "", "", 1095204000),
("6", "6.0", "1603", "330", "0002", 1095204000),
("6", "6.1", "", "", "", 444307000),
("6", "6.1", "1603", "330", "0003", 242205000),
("6", "6.1", "1603", "330", "0004", 202102000),
("6", "6.2", "", "", "", 292888000),
("6", "6.2", "1603", "330", "0005", 145783000),
("6", "6.2", "1603", "330", "0006", 147105000),
("6", "6.3", "", "", "", 192369000),
("6", "6.3", "1603", "330", "0007", 120517000),
("6", "6.3", "1603", "330", "0008", 71852000),
("6", "6.4", "1603", "330", "0009", 184118000),
("6", "6.4", "1603", "330", "0010", 165568000),
("6", "6.5", "", "", "", 1418801000),
("6", "6.5", "1603", "330", "0011", 868801000),
("6", "6.5", "1603", "330", "0012", 550000000),
("6", "6.6", "", "", "", 3267601000),
("6", "6.6", "1603", "330", "0013", 2077601000),
("6", "6.6", "1603", "330", "0014", 1190000000),
("6", "6.7", "", "", "", 8645422000),
("6", "6.7", "1603", "330", "0015", 4655422000),
("6", "6.7", "1603", "330", "0016", 3990000000),
("6", "6.8", "", "", "", 1119201000),
("6", "6.8", "1603", "330", "0017", 589201000),
("6", "6.8", "1603", "330", "0018", 530000000),
("6", "6.9", "", "", "", 2558793000),
("6", "6.9", "1603", "330", "0019", 1318801000),
("6", "6.9", "1603", "330", "0020", 1239992000),
("7", "", "1602", "330", "0003", 5000000),
("7", "", "1602", "330", "0004", 56137000),
("8", "8.0", "1604", "330", "0002", 90193000),
("8", "8.1", "1604", "330", "0003", 11140000),
("8", "8.1", "1604", "330", "0004", 74698000),
("8", "8.2", "1604", "330", "0005", 26439000),
("8", "8.2", "1604", "330", "0006", 98800000),
("8", "8.3", "1604", "330", "0007", 38891000),
("8", "8.3", "1604", "330", "0008", 199602000),
("8", "8.4", "1604", "330", "0009", 225606000),
("8", "8.4", "1604", "330", "0010", 55880000),
("8", "8.5", "1603", "330", "0010", 768217000),
("8", "8.5", "1603", "330", "0011", 259380000),
("8", "8.6", "1604", "330", "0013", 1316349000),
("8", "8.6", "1604", "330", "0014", 594195000),
("9", "", "0606", "330", "0004", 22500000),
("9", "", "0606", "330", "0005", 27700000),
("9", "", "0606", "330", "0006", 224586000),
("9", "", "1001", "330", "0039", 395112000),
("10", "", "1001", "133", "0009", 168417000),
("11", "", "1001", "160", "0011", 168224000),
("12", "", "1001", "160", "0012", 72634000),
("13", "", "2304", "110", "0001", 577613000),
("14", "", "2305", "110", "0001", 30583000),
("15", "", "", "", "", 63777145000),
("15", "15.0", "", "", "", 63219614000),
("15", "15.0", "1401", "310", "0001", 2300366000),
("15", "15.0", "1401", "310", "0002", 24747713000),
("15", "15.0", "1401", "310", "4001", 15840000),
("15", "15.0", "1401", "310", "4002", 37301000),
("15", "15.0", "1401", "310", "4003", 37857000),
("15", "15.0", "1401", "310", "4004", 71117000),
("15", "15.0", "1401", "310", "4005", 72535000),
("15", "15.0", "1401", "310", "4006", 300000),
("15", "15.0", "1401", "310", "5001", 30000000),
("15", "15.0", "1401", "310", "5002", 20000000),
("15", "15.0", "1401", "310", "7007", 117688000),
("15", "15.0", "1401", "310", "7011", 6336000),
("15", "15.0", "1401", "310", "7014", 22871000),
("15", "15.0", "1402", "310", "0001", 34941800000),
("15", "15.0", "1403", "310", "0001", 75825000),
("15", "15.1", "1401", "310", "0003", 307531000),
("15", "15.1", "1401", "310", "5004", 250000000),
("16", "", "", "", "", 970708127000),
("16", "16.0", "", "", "", 283361964000),
("16", "16.0", "0608", "180", "0001", 33327366000),
("16", "16.0", "0702", "110", "5001", 4500000000),
("16", "16.0", "0801", "090", "0001", 8900000000),
("16", "16.0", "0901", "090", "0001", 194220000000),
("16", "16.0", "0901", "090", "0001", 194220000000),
("16", "16.0", "0902", "090", "0001", 30699999000),
("16", "16.0", "2101", "160", "0005", 802410000),
("16", "16.0", "2301", "110", "0002", 4360796000),
("16", "16.0", "2301", "110", "0003", 2947599000),
("16", "16.0", "2301", "110", "0004", 814163000),
("16", "16.0", "2301", "110", "0005", 161614000),
("16", "16.0", "2301", "110", "0006", 1158158000),
("16", "16.0", "2301", "110", "0007", 78318000),
("16", "16.0", "2301", "110", "4001", 147600000),
("16", "16.0", "2302", "110", "0003", 42802000),
("16", "16.0", "2303", "110", "0003", 23652000),
("16", "16.0", "2402", "160", "0001", 1175487000),
("16", "16.1", "", "", "", 3970400000),
("16", "16.1", "2303", "110", "0001", 3329700000),
("16", "16.1", "2303", "110", "0002", 545400000),
("16", "16.1", "2303", "110", "4002", 20400000),
("16", "16.1", "2303", "110", "5001", 23000000),
("16", "16.1", "2303", "110", "5002", 9000000),
("16", "16.1", "2303", "110", "5003", 3000000),
("16", "16.1", "2303", "110", "5004", 17000000),
("16", "16.1", "2303", "110", "5005", 3500000),
("16", "16.1", "2303", "110", "5006", 4000000),
("16", "16.1", "2303", "110", "5008", 2500000),
("16", "16.1", "2303", "110", "7010", 12900000),
("16", "16.2", "2302", "110", "0001", 6950287000),
("16", "16.2", "2302", "110", "0002", 1392056000),
("16", "16.2", "2302", "110", "5001", 930011000),
("16", "16.2", "2302", "110", "7010", 16000000),
("16", "16.3", "", "", "", 2374676000),
("16", "16.3", "2301", "110", "0008", 1804975000),
("16", "16.3", "2301", "110", "0009", 262950000),
("16", "16.3", "2301", "110", "4002", 23600000),
("16", "16.3", "2301", "110", "5001", 27000000),
("16", "16.3", "2301", "110", "5002", 184500000),
("16", "16.3", "2301", "110", "5004", 1651000),
("16", "16.3", "2301", "110", "5005", 70000000),
("16", "16.4", "", "", "", 24987000),
("16", "16.4", "2301", "420", "0010", 24987000),
("16", "16.5", "2301", "133", "0011", 55008000),
("16", "16.6", "1502", "410", "0001", 13527000),
("16", "16.7", "2201", "110", "0005", 169990000),
("16", "16.7", "2201", "170", "0001", 516861938000),
("16", "16.7", "2201", "170", "0002", 97812602000),
("16", "16.7", "2201", "170", "0003", 56577893000),
("16", "16.7", "2201", "170", "0004", 125000000),
("16", "16.8", "0902", "410", "0002", 71788000),
("17", "", "", "", "", 7060371000),
("17", "17.0", "", "", "", 2181165000),
("17", "17.0", "0301", "110", "0001", 1394922000),
("17", "17.0", "0301", "110", "0002", 378750000),
("17", "17.0", "0301", "110", "7017", 407493000),
("17", "17.1", "0302", "113", "0001", 3892956000),
("17", "17.1", "0302", "113", "0002", 730250000),
("17", "17.1", "0302", "113", "0004", 28000000),
("17", "17.1", "0302", "113", "7017", 78000000),
("17", "17.2", "1902", "410", "0001", 65018000),
("17", "17.2", "1902", "410", "0002", 84982000),
("18", "", "", "", "", 56029250000),
("18", "18.0", "", "", "", 55129250000),
("18", "18.0", "", "", "", 55129250000),
("18", "18.0", "1701", "210", "0001", 110219000),
("18", "18.0", "1701", "210", "0002", 717880000),
("18", "18.0", "1701", "210", "0003", 1733665000),
("18", "18.0", "1701", "210", "0004", 13888000),
("18", "18.0", "1701", "210", "0005", 118660000),
("18", "18.0", "1701", "210", "0006", 9354000),
("18", "18.0", "1701", "210", "0007", 2678905000),
("18", "18.0", "1701", "210", "0008", 1710838000),
("18", "18.0", "1701", "210", "0009", 330000000),
("18", "18.0", "1701", "210", "0010", 869596000),
("18", "18.0", "1701", "210", "0011", 8597328000),
("18", "18.0", "1701", "210", "5001", 200000000),
("18", "18.0", "1701", "210", "5002", 103000000),
("18", "18.0", "1702", "210", "0001", 4007452000),
("18", "18.0", "1702", "210", "0002", 1871924000),
("18", "18.0", "1702", "210", "0003", 1090367000),
("18", "18.0", "1702", "210", "0004", 572732000),
("18", "18.0", "1702", "210", "0005", 825998000),
("18", "18.0", "1702", "210", "0006", 3765868000),
("18", "18.0", "1702", "210", "0007", 25467676000),
("18", "18.0", "1702", "210", "4002", 300000000),
("18", "18.0", "1702", "210", "5004", 23800000),
("18", "18.0", "1702", "220", "0008", 10100000),
("18", "18.1", "", "", "", 900000000),
("18", "18.1", "1701", "250", "0012", 153091000),
("18", "18.1", "1701", "250", "4001", 300000000),
("18", "18.1", "1701", "250", "5003", 446909000),
("19", "", "", "", "", 6236148000),
("19", "19.0", "", "", "", 6151005000),
("19", "19.0", "0607", "111", "0001", 49702000),
("19", "19.0", "0607", "111", "0002", 5506940000),
("19", "19.0", "0607", "111", "0003", 2000000),
("19", "19.0", "0607", "111", "0004", 17556000),
("19", "19.0", "0607", "111", "0005", 10281000),
("19", "19.0", "0607", "111", "0006", 13194000),
("19", "19.0", "0607", "111", "0007", 34698000),
("19", "19.0", "0607", "111", "0008", 6500000),
("19", "19.0", "0607", "411", "0009", 422719000),
("19", "19.0", "0608", "110", "0002", 39319000),
("19", "19.0", "0608", "110", "0003", 1440000),
("19", "19.0", "0609", "111", "0001", 35523000),
("19", "19.0", "1001", "111", "0001", 11133000),
("19", "19.1", "", "", "", 72712000),
("19", "19.1", "0609", "140", "0002", 51711000),
("19", "19.1", "0609", "140", "4001", 1000),
("19", "19.1", "0609", "140", "4002", 21000000),
("19", "19.2", "", "", "", 1800000),
("19", "19.2", "1001", "160", "0002", 1800000),
("19", "19.3", "0608", "180", "0004", 10631000),
("20", "", "", "", "", 19665777000),
("20", "20.0", "", "", "", 19170392000),
("20", "20.0", "0702", "411", "5002", 820790000),
("20", "20.0", "1502", "410", "0002", 21262000),
("20", "20.0", "1502", "410", "0003", 400000000),
("20", "20.0", "1502", "410", "0004", 13697000),
("20", "20.0", "1502", "410", "0005", 11543000),
("20", "20.0", "1502", "410", "0007", 185670000),
("20", "20.0", "1502", "410", "4001", 500000000),
("20", "20.0", "1502", "474", "0006", 11161000),
("20", "20.0", "1502", "410", "0001", 25560000),
("20", "20.0", "1503", "410", "7015", 72644000),
("20", "20.0", "1504", "410", "0001", 250000000),
("20", "20.0", "1504", "410", "0002", 13937000),
("20", "20.0", "1504", "410", "0003", 6500000000),
("20", "20.0", "1504", "410", "0004", 13581000),
("20", "20.0", "1504", "410", "4001", 3352000000),
("20", "20.0", "1504", "410", "4002", 3620000000),
("20", "20.0", "1504", "410", "4003", 300000000),
("20", "20.0", "1504", "410", "4004", 200000000),
("20", "20.0", "1504", "410", "4005", 5126000),
("20", "20.0", "1504", "410", "4006", 37500000),
("20", "20.0", "1505", "411", "0003", 23935000),
("20", "20.0", "1505", "411", "4001", 1411949000),
("20", "20.0", "1505", "411", "4002", 547780000),
("20", "20.0", "1505", "411", "4003", 35950000),
("20", "20.0", "1505", "411", "7012", 120514000),
("20", "20.0", "1505", "474", "0001", 151793000),
("20", "20.0", "1505", "474", "0002", 340000000),
("20", "20.1", "", "", "", 205452000),
("20", "20.1", "1503", "130", "0004", 191638000),
("20", "20.1", "1503", "130", "0005", 13814000),
("20", "20.2", "", "", "", 111459000),
("20", "20.2", "1503", "130", "0003", 111459000),
("20", "20.3", "", "", "", 158579000),
("20", "20.3", "1503", "130", "0002", 158579000),
("20", "20.4", "", "", "", 19895000),
("20", "20.4", "1504", "110", "0005", 19895000),
("21", "", "", "", "", 23954689000),
("21", "21.0", "", "", "", 23637000000),
("21", "21.0", "0701", "450", "0001", 7069214000),
("21", "21.0", "0701", "450", "0002", 11818736000),
("21", "21.0", "0701", "450", "0003", 76190000),
("21", "21.0", "0701", "450", "0004", 16230000),
("21", "21.0", "0701", "450", "0005", 524744000),
("21", "21.0", "0701", "450", "7006", 109778000),
("21", "21.0", "0702", "450", "0001", 65588000),
("21", "21.0", "0702", "450", "5003", 240000000),
("21", "21.0", "0702", "450", "5004", 200000000),
("21", "21.0", "0702", "450", "5006", 980000000),
("21", "21.0", "0702", "450", "5007", 932000000),
("21", "21.0", "0702", "450", "5008", 100000000),
("21", "21.0", "0702", "450", "5009", 388800000),
("21", "21.0", "0702", "450", "5010", 194400000),
("21", "21.0", "0702", "450", "5011", 142560000),
("21", "21.0", "0702", "450", "5012", 38880000),
("21", "21.0", "0702", "450", "5013", 29160000),
("21", "21.0", "0702", "450", "5014", 10000000),
("21", "21.0", "1101", "620", "0001", 272301000),
("21", "21.0", "1101", "620", "0002", 94072000),
("21", "21.0", "1101", "620", "0003", 58091000),
("21", "21.0", "1101", "620", "0003", 58091000),
("21", "21.0", "1101", "620", "0004", 25854000),
("21", "21.0", "1101", "620", "0005", 130402000),
("21", "21.0", "1101", "620", "4001", 50000000),
("21", "21.0", "1101", "620", "4002", 70000000),
("21", "21.1", "", "", "", 76406000),
("21", "21.1", "0701", "450", "0006", 76406000),
("21", "21.2", "0701", "450", "0007", 24350000),
("21", "21.2", "0701", "450", "0008", 166257000),
("21", "21.2", "0701", "450", "7006", 50676000),
("22", "", "", "", "", 14030881000),
("22", "22.0", "", "", "", 4492617000),
("22", "22.0", "1001", "110", "0003", 460000000),
("22", "22.0", "1001", "110", "0003", 460000000),
("22", "22.0", "1001", "110", "0004", 32000000),
("22", "22.0", "1601", "360", "7004", 101328000),
("22", "22.0", "1602", "330", "0005", 2505168000),
("22", "22.0", "1602", "330", "5001", 2505168000),
("22", "22.0", "1602", "330", "5002", 25000000),
("22", "22.0", "1602", "330", "5003", 80000000),
("22", "22.0", "1602", "330", "5005", 15000000),
("22", "22.0", "1602", "330", "5006", 330000000),
("22", "22.0", "1602", "330", "5008", 250000000),
("22", "22.0", "1602", "330", "5009", 5000000),
("22", "22.0", "1602", "330", "5010", 100000000),
("22", "22.0", "1602", "330", "5011", 10000000),
("22", "22.0", "1602", "330", "5012", 10000000),
("22", "22.0", "1602", "330", "5013", 100000000),
("22", "22.0", "1602", "330", "0006", 409121000),
("22", "22.1", "1602", "340", "0007", 7668954000),
("22", "22.1", "1602", "340", "5011", 27000000),
("22", "22.1", "1602", "340", "5013", 30000000),
("22", "22.1", "1602", "340", "5014", 36720000),
("22", "22.1", "1602", "340", "5015", 250000000),
("22", "22.1", "1602", "340", "5016", 51720000),
("22", "22.1", "1602", "340", "5017", 3000000),
("22", "22.1", "1602", "340", "5019", 13500000),
("22", "22.1", "1602", "340", "5020", 10000000),
("22", "22.1", "1602", "340", "5021", 10500000),
("22", "22.1", "1602", "340", "5022", 50000000),
("22", "22.1", "1602", "340", "7002", 83720000),
("22", "22.2", "1602", "360", "0008", 52654000),
("22", "22.3", "", "", "", 227127000),
("22", "22.3", "1602", "360", "0009", 173917000),
("22", "22.3", "1602", "360", "7002", 53210000),
("22", "22.4", "", "", "", 1023369000),
("22", "22.4", "", "", "", 1023369000),
("22", "22.4", "1901", "", "", 1023369000),
("22", "22.4", "1901", "840", "0001", 68955000),
("22", "22.4", "1901", "840", "0002", 63000000),
("22", "22.4", "1901", "840", "0003", 122000000),
("22", "22.4", "1901", "840", "0004", 40000000),
("22", "22.4", "1901", "840", "0005", 170000000),
("22", "22.4", "1901", "840", "0006", 180000000),
("22", "22.4", "1901", "840", "0007", 63213000),
("22", "22.4", "1901", "840", "0008", 280000000),
("22", "22.4", "1901", "840", "0009", 36201000),
("23", "", "", "", "", 41433438000),
("23", "23.0", "", "", "", 6801836000),
("23", "23.0", "0101", "420", "0001", 727707000),
("23", "23.0", "0101", "420", "0002", 164770000),
("23", "23.0", "0101", "420", "0003", 15281000),
("23", "23.0", "0103", "420", "0004", 530000000),
("23", "23.0", "0402", "560", "0001", 232280000),
("23", "23.0", "0402", "560", "0001", 232280000),
("23", "23.0", "0402", "560", "0002", 58677000),
("23", "23.0", "0402", "560", "0003", 100000000),
("23", "23.0", "0402", "560", "0004", 15070000),
("23", "23.0", "0402", "560", "0005", 14650000),
("23", "23.0", "0402", "560", "0006", 2755305000),
("23", "23.0", "0402", "560", "0007", 115326000),
("23", "23.0", "0402", "560", "0008", 631947000),
("23", "23.0", "0402", "560", "4001", 789512000),
("23", "23.0", "0402", "560", "7005", 303811000),
("23", "23.0", "0402", "560", "7012", 347500000),
("23", "23.1", "", "", "", 800000000),
("23", "23.1", "0103", "420", "0005", 800000000),
("23", "23.2", "", "", "", 2904124000),
("23", "23.2", "0105", "760", "0001", 2028777000),
("23", "23.2", "0105", "760", "0002", 70836000),
("23", "23.2", "0105", "760", "0003", 537994000),
("23", "23.2", "0105", "760", "4002", 266517000),
("23", "23.3", "", "", "", 807148000),
("23", "23.3", "0104", "420", "0001", 382470000),
("23", "23.3", "0104", "420", "0002", 173000000),
("23", "23.3", "0104", "420", "0003", 250328000),
("23", "23.3", "0104", "420", "4001", 1350000),
("23", "23.4", "", "", "", 516779000),
("23", "23.4", "0401", "630", "0001", 66779000),
("23", "23.4", "0401", "630", "5001", 450000000),
("23", "23.5", "", "", "", 3120134000),
("23", "23.5", "0401", "630", "0002", 188200000),
("23", "23.5", "0401", "630", "0003", 70000000),
("23", "23.5", "0401", "630", "0004", 2033805000),
("23", "23.5", "0401", "630", "0005", 157329000),
("23", "23.5", "0401", "630", "5002", 450000000),
("23", "23.5", "0401", "630", "5003", 200000000),
("23", "23.5", "0401", "630", "5004", 16000000),
("23", "23.5", "0401", "630", "5005", 4800000),
("23", "23.6", "0106", "420", "0001", 233624000),
("23", "23.6", "0106", "420", "7013", 6512000),
("23", "23.7", "0106", "420", "0002", 490000000),
("23", "23.8", "0106", "420", "0003", 110870000),
("23", "23.9", "0107", "420", "0001", 65656000),
("23", "23.9", "0107", "420", "4001", 35293000),
("23", "23.9", "0107", "420", "5001", 316138000),
("23", "23.10", "", "", "", 23486645000),
("23", "23.10", "0103", "420", "0001", 21470000000),
("23", "23.10", "0103", "420", "0002", 1300000000),
("23", "23.10", "0103", "420", "0003", 256645000),
("23", "23.10", "0103", "420", "4002", 400000000),
("23", "23.10", "0103", "420", "4003", 60000000),
("23", "23.11", "", "", "", 1533673000),
("23", "23.11", "0102", "420", "0001", 979536000),
("23", "23.11", "0102", "420", "0002", 255000000),
("23", "23.11", "0102", "420", "0003", 117137000),
("23", "23.11", "0102", "420", "5001", 182000000),
("23", "23.12", "", "", "", 205006000),
("23", "23.12", "0402", "560", "0009", 31834000),
("23", "23.12", "0402", "560", "0010", 34202000),
("23", "23.12", "0402", "560", "0011", 84112000),
("23", "23.12", "0402", "560", "4002", 54858000),
("24", "", "", "", "", 156951589000),
("24", "24.0", "", "", "", 17043427000),
("24", "24.0", "0201", "140", "0001", 11844247000),
("24", "24.0", "0201", "140", "0002", 50002000),
("24", "24.0", "0201", "140", "0003", 150000000),
("24", "24.0", "0201", "140", "0004", 110000000),
("24", "24.0", "0201", "140", "0005", 65000000),
("24", "24.0", "0201", "140", "0006", 90000000),
("24", "24.0", "0201", "140", "0007", 105417000),
("24", "24.0", "0201", "140", "4002", 2286079000),
("24", "24.0", "0201", "140", "7010", 1404001000),
("24", "24.0", "0201", "140", "7015", 148992000),
("24", "24.0", "0201", "140", "7015", 148992000),
("24", "24.0", "2001", "980", "0001", 275105000),
("24", "24.0", "2001", "980", "0002", 235041000),
("24", "24.0", "2001", "980", "0003", 39254000),
("24", "24.0", "2001", "980", "0004", 157969000),
("24", "24.0", "2001", "980", "4001", 26977000),
("24", "24.0", "2001", "980", "4002", 37663000),
("24", "24.0", "2001", "980", "4003", 5500000),
("24", "24.0", "2001", "980", "4004", 3570000),
("24", "24.0", "2001", "980", "4005", 5610000),
("24", "24.0", "2001", "980", "4006", 3000000),
("24", "24.1", "", "", "", 66026889000),
("24", "24.1", "2002", "910", "0001", 2170000000),
("24", "24.1", "2002", "910", "0002", 400000),
("24", "24.1", "2003", "910", "0001", 60936591000),
("24", "24.1", "2003", "910", "0002", 20000000),
("24", "24.1", "2003", "910", "0003", 123250000),
("24", "24.1", "2003", "910", "0004", 8500000),
("24", "24.1", "2003", "910", "0005", 21511000),
("24", "24.1", "2003", "910", "0006", 1979754000),
("24", "24.1", "2003", "910", "4001", 689350000),
("24", "24.1", "2003", "910", "7010", 26730000),
("24", "24.1", "2003", "960", "0007", 50803000),
("24", "24.2", "", "", "", 30806817000),
("24", "24.2", "2004", "920", "0001", 29131148000),
("24", "24.2", "2004", "920", "0002", 3020000),
("24", "24.2", "2004", "920", "0003", 425226000),
("24", "24.2", "2004", "920", "0004", 12750000),
("24", "24.2", "2004", "920", "0005", 13438000),
("24", "24.2", "2004", "920", "0006", 1120228000),
("24", "24.2", "2004", "920", "0007", 20640000),
("24", "24.2", "2004", "960", "0008", 80367000),
("24", "24.3", "", "", "", 4085248000),
("24", "24.3", "2007", "960", "0001", 2989338000),
("24", "24.3", "2007", "960", "0002", 406910000),
("24", "24.3", "2007", "960", "0003", 689000000),
("24", "24.4", "", "", "", 30909194000),
("24", "24.4", "2005", "940", "0001", 30128178000),
("24", "24.4", "2005", "940", "0002", 51495000),
("24", "24.4", "2005", "940", "0003", 519384000),
("24", "24.4", "2005", "940", "4001", 5057000),
("24", "24.4", "2005", "940", "4002", 160480000),
("24", "24.4", "2005", "940", "4003", 44600000),
("24", "24.5", "", "", "", 7852759000),
("24", "24.5", "2007", "960", "0004", 5014529000),
("24", "24.5", "2007", "960", "0005", 898259000),
("24", "24.5", "2007", "960", "0006", 1768671000),
("24", "24.5", "2007", "960", "0007", 171300000),
("24", "24.6", "", "", "", 139866000),
("24", "24.6", "2001", "980", "0005", 121516000),
("24", "24.6", "2001", "980", "0006", 6000000),
("24", "24.6", "2001", "980", "0007", 12350000),
("24", "24.7", "", "", "", 87389000),
("24", "24.7", "2001", "980", "0008", 74074000),
("24", "24.7", "2001", "980", "0009", 10215000),
("24", "24.7", "2001", "980", "0010", 550000),
("24", "24.7", "2001", "980", "4007", 2550000),
("25", "", "", "", "", 12597214000),
("25", "25.0", "", "", "", 11376892000),
("25", "25.0", "1801", "760", "0001", 208363000),
("25", "25.0", "1801", "760", "0002", 91455000),
("25", "25.0", "1801", "760", "0003", 252308000),
("25", "25.0", "1801", "760", "0004", 26406000),
("25", "25.0", "1801", "760", "0006", 2300000000),
("25", "25.0", "1802", "760", "0001", 153292000),
("25", "25.0", "1802", "760", "0002", 727192000),
("25", "25.0", "1802", "760", "0003", 30000000),
("25", "25.0", "1802", "760", "0004", 10000000),
("25", "25.0", "1802", "760", "0005", 1500000),
("25", "25.0", "1802", "760", "0006", 25000000),
("25", "25.0", "1802", "760", "0007", 2000000),
("25", "25.0", "1802", "760", "0008", 14783000),
("25", "25.0", "1802", "760", "0009", 10000000),
("25", "25.0", "1802", "760", "0010", 33631000),
("25", "25.0", "1802", "760", "4001", 3000000),
("25", "25.0", "1802", "760", "4002", 2000000),
("25", "25.0", "1802", "760", "4003", 1000000),
("25", "25.0", "1802", "760", "4004", 4000000),
("25", "25.0", "1802", "760", "4005", 1000000),
("25", "25.0", "1802", "760", "4006", 5000000),
("25", "25.0", "1803", "760", "0001", 1380000000),
("25", "25.0", "1803", "760", "0002", 745809000),
("25", "25.0", "1803", "760", "0003", 20000000),
("25", "25.0", "1803", "760", "0004", 80000000),
("25", "25.0", "1803", "760", "4001", 2060884000),
("25", "25.0", "1803", "760", "4002", 335322000),
("25", "25.0", "1803", "760", "4003", 5000000),
("25", "25.0", "1803", "760", "4004", 1450000),
("25", "25.0", "1803", "760", "4005", 5000000),
("25", "25.0", "1803", "760", "4006", 25000000),
("25", "25.0", "1803", "760", "4007", 10000000),
("25", "25.0", "1803", "760", "4008", 2491000),
("25", "25.0", "1803", "760", "4009", 322570000),
("25", "25.0", "1803", "760", "4010", 36639000),
("25", "25.0", "1803", "760", "4011", 10827000),
("25", "25.0", "1803", "760", "4012", 450000),
("25", "25.0", "1803", "760", "4013", 1139820000),
("25", "25.0", "1803", "760", "4014", 17940000),
("25", "25.0", "1803", "760", "4015", 720000000),
("25", "25.0", "1804", "760", "0001", 537760000),
("25", "25.0", "1806", "760", "0001", 2000000),
("25", "25.0", "1806", "760", "0002", 15000000),
("25", "25.0", "1806", "760", "4001", 1000000),
("25", "25.1", "", "", "", 44572000),
("25", "25.1", "1801", "760", "0005", 44572000),
("25", "25.2", "", "", "", 300000000),
("25", "25.2", "1803", "760", "0004", 300000000),
("25", "25.3", "", "", "", 875750000),
("25", "25.3", "1803", "760", "0005", 875750000),
("26", "", "", "", "", 13865350000),
("26", "26.0", "", "", "", 13685350000),
("26", "26.0", "0501", "430", "0001", 40188000),
("26", "26.0", "0501", "430", "0002", 136331000),
("26", "26.0", "0501", "430", "0003", 32329000),
("26", "26.0", "0501", "430", "0004", 682000000),
("26", "26.0", "0501", "430", "0005", 107186000),
("26", "26.0", "0501", "430", "4001", 3600000),
("26", "26.0", "0501", "430", "4002", 76014000),
("26", "26.0", "0501", "430", "4003", 18563000),
("26", "26.0", "0502", "430", "4001", 456545000),
("26", "26.0", "0502", "430", "4002", 9651000),
("26", "26.0", "0502", "430", "4003", 2400000),
("26", "26.0", "0502", "430", "4004", 16338000),
("26", "26.0", "0503", "430", "0001", 4535854000),
("26", "26.0", "0503", "430", "4001", 400000000),
("26", "26.0", "0503", "430", "7005", 111487000),
("26", "26.0", "2401", "430", "0001", 7056864000),
("26", "26.1", "0502", "430", "0001", 180000000),
("27", "", "", "", "", 16678247000),
("27", "27.0", "", "", "", 10858509000),
("27", "27.0", "1201", "820", "0001", 47797000),
("27", "27.0", "1201", "820", "0002", 17588000),
("27", "27.0", "1201", "820", "0003", 121325000),
("27", "27.0", "1201", "820", "0004", 36000000),
("27", "27.0", "1202", "820", "0001", 152000000),
("27", "27.0", "1202", "820", "0002", 87000000),
("27", "27.0", "1202", "820", "0003", 29000000),
("27", "27.0", "1202", "820", "0004", 14000000),
("27", "27.0", "1202", "820", "0005", 60000000),
("27", "27.0", "1202", "820", "0006", 34000000),
("27", "27.0", "1202", "820", "0007", 12000000),
("27", "27.0", "1202", "820", "0008", 35000000),
("27", "27.0", "1202", "820", "0009", 150000000),
("27", "27.0", "1202", "820", "7001", 9000000),
("27", "27.0", "1203", "820", "0001", 95000000),
("27", "27.0", "1203", "820", "0002", 69986000),
("27", "27.0", "1203", "820", "0004", 30000000),
("27", "27.0", "1203", "820", "0005", 112000000),
("27", "27.0", "1203", "820", "0006", 30000000),
("27", "27.0", "1204", "820", "0001", 210520000),
("27", "27.0", "1204", "820", "0002", 38520000),
("27", "27.0", "1204", "820", "0002", 38520000),
("27", "27.0", "1204", "820", "0003", 5200000),
("27", "27.0", "1204", "820", "0004", 8893750000),
("27", "27.0", "1205", "820", "0001", 33001000),
("27", "27.0", "1205", "820", "0002", 49438000),
("27", "27.0", "1205", "820", "0003", 41900000),
("27", "27.0", "1205", "820", "0004", 10250000),
("27", "27.0", "1205", "820", "7010", 20948000),
("27", "27.0", "1206", "820", "820", 253286000),
("27", "27.1", "", "", "", 5819738000),
("27", "27.1", "1202", "820", "0010", 2245661000),
("27", "27.1", "1202", "820", "5001", 975806000),
("27", "27.1", "1202", "820", "5002", 640442000),
("27", "27.1", "1202", "820", "7001", 7100000),
("27", "27.1", "1203", "820", "0007", 1950729000),
("28", "", "", "", "", 160785474000),
("28", "28.0", "", "", "", 157891315000),
("28", "28.0", "0801", "410", "0004", 490718000),
("28", "28.0", "0801", "410", "7010", 25000000),
("28", "28.0", "0801", "410", "7011", 316725000),
("28", "28.0", "0801", "412", "0002", 2800000000),
("28", "28.0", "0801", "412", "0003", 20000000000),
("28", "28.0", "0901", "090", "0002", 25800000000),
("28", "28.0", "0902", "070", "0003", 29499687000),
("28", "28.0", "0902", "070", "0004", 617000000),
("28", "28.0", "0902", "070", "0005", 12556209000),
("28", "28.0", "0902", "070", "7011", 50676000),
("28", "28.0", "0903", "040", "0001", 50323600000),
("28", "28.0", "0903", "040", "0002", 15000000),
("28", "28.0", "0904", "010", "0001", 15269800000),
("28", "28.0", "0904", "010", "0002", 80000000),
("28", "28.0", "0904", "010", "0003", 43900000),
("28", "28.0", "1001", "410", "0005", 3000000),
("28", "28.1", "0801", "410", "0005", 347702000),
("28", "28.2", "0902", "090", "0006", 394582000),
("28", "28.3", "0902", "070", "0007", 309700000),
("28", "28.4", "0801", "410", "0006", 20424000),
("28", "28.5", "", "", "", 571751000),
("28", "28.5", "0801", "410", "0007", 552308000),
("28", "28.5", "0801", "410", "0008", 19443000),
("28", "28.6", "0801", "412", "0009", 1250000000),
("29", "", "", "", "", 4862496000),
("29", "29.0", "", "", "", 3467408000),
("29", "29.0", "1301", "810", "0001", 1796765000),
("29", "29.0", "1301", "810", "0002", 1130000000),
("29", "29.0", "1301", "810", "0003", 155500000),
("29", "29.0", "1301", "810", "0004", 130472000),
("29", "29.0", "1301", "810", "4001", 13583000),
("29", "29.0", "1302", "810", "0001", 199922000),
("29", "29.0", "1302", "810", "0002", 41166000),
("29", "29.1", "", "", "", 198000000),
("29", "29.1", "1301", "810", "0002", 198000000),
("29", "29.2", "", "", "", 39465000),
("29", "29.2", "1301", "810", "0006", 39465000),
("29", "29.3", "", "", "", 370053000),
("29", "29.3", "1301", "810", "0007", 314927000),
("29", "29.3", "1301", "810", "4003", 52126000),
("29", "29.3", "1301", "810", "4004", 3000000),
("29", "29.4", "", "", "", 787570000),
("29", "29.4", "1302", "980", "0003", 787570000),
("30", "", "", "", "", 4984332000),
("30", "30.0", "", "", "", 4453238000),
("30", "30.0", "0301", "490", "0004", 227505000),
("30", "30.0", "0301", "490", "4001", 100000000),
("30", "30.0", "0703", "460", "0001", 117690000),
("30", "30.0", "0703", "460", "0002", 192056000),
("30", "30.0", "0703", "460", "0003", 123500000),
("30", "30.0", "0703", "460", "0004", 159118000),
("30", "30.0", "0703", "460", "4001", 80182000),
("30", "30.0", "0703", "460", "4002", 194429000),
("30", "30.0", "1506", "410", "0001", 46070000),
("30", "30.0", "1506", "410", "0002", 537251000),
("30", "30.0", "1506", "410", "0003", 39590000),
("30", "30.0", "1506", "410", "4001", 23441000),
("30", "30.0", "1506", "410", "4002", 71254000),
("30", "30.0", "1506", "410", "4003", 24853000),
("30", "30.0", "1506", "410", "4004", 2180000),
("30", "30.0", "1507", "473", "0001", 561316000),
("30", "30.0", "1507", "473", "0002", 558678000),
("30", "30.0", "1507", "473", "0003", 6650000),
("30", "30.0", "1507", "473", "0004", 117279000),
("30", "30.0", "1507", "473", "4001", 73289000),
("30", "30.0", "1507", "473", "4002", 570000),
("30", "30.0", "1507", "473", "5001", 1196337000),
("30", "30.1", "", "", "", 531094000),
("30", "30.1", "0703", "460", "0005", 531094000),
("32", "", "", "", "", 78529000),
("32", "", "1605", "110", "0001", 78529000),
("33", "", "0610", "110", "0001", 16170000),
("33", "", "0610", "110", "0002", 13564000),
("33", "", "0610", "110", "0003", 15734000),
("33", "", "0610", "110", "0004", 21742000),
("34", "", "", "", "", 945846000),
("34", "", "0610", "130", "0001", 7459000),
("34", "", "0610", "130", "0002", 24155000),
("34", "", "0610", "130", "0003", 86544000),
("34", "", "0610", "130", "0004", 610505000),
("34", "", "0610", "130", "4001", 217183000),
("35", "", "", "", "", 717114000),
("35", "", "0403", "410", "0001", 280444000),
("35", "", "0403", "410", "0002", 116320000),
("35", "", "0403", "410", "0003", 58545000),
("35", "", "0403", "410", "0004", 51463000),
("35", "", "0403", "410", "0005", 202229000),
("35", "", "0403", "410", "4001", 700000),
("35", "", "0403", "410", "4002", 5160000),
("35", "", "0403", "410", "4003", 1627000),
("35", "", "0403", "410", "4004", 626000),
("36", "", "", "", "", 3656897000),
("36", "", "1102", "410", "0001", 3017500000),
("36", "", "1102", "410", "0002", 87435000),
("36", "", "1102", "410", "0003", 70939000),
("36", "", "1102", "410", "0004", 92926000),
("36", "", "1102", "410", "0005", 96944000),
("36", "", "1102", "410", "0006", 291153000),
("37", "", "", "", "", 27573000),
("37", "", "0402", "410", "0012", 27573000),
("38", "", "", "", "", 345667000),
("38", "", "0605", "130", "0001", 1320000),
("38", "", "0605", "130", "0002", 222062000),
("38", "", "0605", "130", "0003", 118285000),
("38", "", "0605", "130", "4001", 4000000),
("39", "", "", "", "", 30523000),
("39", "", "1401", "250", "0004", 30473000),
("39", "", "1401", "250", "7014", 50000),
("40", "", "", "", "", 147102000),
("40", "", "0202", "130", "0001", 77002000),
("40", "", "0202", "130", "0002", 16098000),
("40", "", "0202", "130", "0003", 54002000),
("41", "", "", "", "", 23097000),
("41", "", "0902", "410", "0008", 14455000),
("41", "", "0902", "410", "0009", 1250000),
("41", "", "0902", "410", "0010", 7392000),
("42", "", "", "", "", 469491000),
("42", "", "0201", "140", "0009", 213736000),
("42", "", "0201", "140", "0010", 255755000),
("43", "", "", "", "", 50830000),
("43", "", "0612", "410", "0001", 50830000),
("44", "", "", "", "", 128852000),
("44", "", "0612", "410", "0002", 102801000),
("44", "", "0612", "410", "0003", 16300000),
("44", "", "0612", "410", "0004", 9750000),
("44", "", "0612", "410", "4001", 1000),
("45", "", "", "", "", 262112000),
("45", "", "0503", "440", "0002", 262112000),
("45", "", "0503", "440", "0002", 262112000),
("46", "", "", "", "", 3159387000),
("46", "", "1001", "070", "0013", 3037464000),
("46", "", "1001", "070", "7007", 62730000),
("46", "", "1001", "070", "7008", 30000000),
("46", "", "1001", "110", "0014", 29193000),
("47", "", "", "", "", 333367000),
("47", "", "1601", "360", "0002", 26312000),
("47", "", "1601", "360", "0003", 37904000),
("47", "", "1601", "360", "0004", 24261000),
("47", "", "1601", "360", "0005", 78130000),
("47", "", "1601", "360", "4001", 23257000),
("47", "", "1601", "360", "4002", 3780000),
("47", "", "1601", "360", "4003", 4515000),
("47", "", "1601", "360", "4004", 5940000),
("47", "", "1601", "360", "4005", 3808000),
("47", "", "1601", "360", "7004", 125460000),
("48", "", "", "", "", 53455000),
("48", "", "0701", "450", "0009", 35661000),
("48", "", "0701", "450", "0010", 17794000),
("49", "", "", "", "", 20220000),
("49", "", "0801", "410", "0010", 20220000),
("50", "", "", "", "", 3991419000),
("50", "", "0606", "130", "0007", 930672000),
("50", "", "0606", "130", "0008", 360083000),
("50", "", "0606", "130", "0009", 1342044000),
("50", "", "0606", "130", "5001", 1358620000),
("51", "", "", "", "", 432445000),
("51", "51.1", "", "", "", 18725000),
("51", "51.1", "0606", "130", "0010", 18725000),
("51", "51.2", "", "", "", 14090000),
("51", "51.2", "0606", "130", "0011", 14090000),
("51", "51.3", "0606", "130", "0012", 14427000),
("51", "51.4", "", "", "", 15406000),
("51", "51.4", "0606", "130", "0013", 15406000),
("51", "51.5", "", "", "", 14393000),
("51", "51.5", "0606", "130", "0014", 14393000),
("51", "51.6", "0606", "130", "0015", 10815000),
("51", "51.7", "", "", "", 18537000),
("51", "51.7", "0606", "130", "0016", 18537000),
("51", "51.8", "", "", "", 10516000),
("51", "51.8", "0606", "130", "0017", 10516000),
("51", "51.9", "", "", "", 17697000),
("51", "51.9", "0606", "130", "0018", 17697000),
("51", "51.10", "", "", "", 15152000),
("51", "51.10", "0606", "130", "0019", 15152000),
("51", "51.11", "", "", "", 12031000),
("51", "51.11", "0606", "130", "0020", 12031000),
("51", "51.12", "", "", "", 16578000),
("51", "51.12", "0606", "130", "0021", 16578000),
("51", "51.13", "", "", "", 19804000),
("51", "51.13", "0606", "130", "0022", 19804000),
("51", "51.14", "", "", "", 13466000),
("51", "51.14", "0606", "130", "0023", 13466000),
("51", "51.15", "", "", "", 16217000),
("51", "51.15", "0606", "130", "0024", 16217000),
("51", "51.16", "", "", "", 31386000),
("51", "51.16", "0606", "130", "0025", 31386000),
("51", "51.17", "", "", "", 15069000),
("51", "51.17", "0606", "130", "0026", 15069000),
("51", "51.18", "", "", "", 22504000),
("51", "51.18", "0606", "130", "0027", 22504000),
("51", "51.19", "", "", "", 12678000),
("51", "51.19", "0606", "130", "0028", 12678000),
("51", "51.20", "", "", "", 29535000),
("51", "51.20", "0606", "130", "0029", 29535000),
("51", "51.21", "", "", "", 10299000),
("51", "51.21", "0606", "130", "0030", 10299000),
("51", "51.22", "", "", "", 12852000),
("51", "51.22", "0606", "130", "0031", 12852000),
("51", "51.23", "", "", "", 13063000),
("51", "51.23", "0606", "130", "0032", 13063000),
("51", "51.24", "", "", "", 11780000),
("51", "51.24", "0606", "130", "0033", 11780000),
("51", "51.25", "", "", "", 14952000),
("51", "51.25", "0606", "130", "0034", 14952000),
("51", "51.26", "", "", "", 4796000),
("51", "51.26", "0606", "130", "0035", 4796000),
("51", "51.27", "", "", "", 5088000),
("51", "51.27", "0606", "130", "0036", 5088000),
("51", "51.28", "", "", "", 11143000),
("51", "51.28", "0606", "130", "0037", 11143000),
("51", "51.29", "", "", "", 9446000),
("51", "51.29", "0606", "130", "0038", 9446000),
("52", "", "", "", "", 3213003000),
("52", "", "2401", "490", "0002", 2706049000),
("52", "", "2401", "490", "0003", 250000000),
("52", "", "2401", "490", "4001", 68000000),
("52", "", "2401", "490", "4002", 188954000),
("53", "", "", "", "", 33016000),
("53", "", "0701", "160", "0011", 33016000)
]
return totals
|
opendatakosovo/data-centar
|
importer/utils.py
|
Python
|
gpl-2.0
| 88,188
|
from django.conf.urls.defaults import *
from encampment.models import Room, TimeSlot, Presentation, Attendee, Sponsor
urlpatterns = patterns('',
url(r'^schedule/$', 'encampment.views.schedule', {}, 'schedule'),
url(r'^schedule/room-(?P<object_id>[0-9]+)/$', 'django.views.generic.list_detail.object_detail', {'queryset': Room.objects.all()}, 'room'),
url(r'^schedule/timeslot-(?P<object_id>[0-9]+/$)', 'django.views.generic.list_detail.object_detail', {'queryset': TimeSlot.objects.all()}, 'timeslot'),
url(r'^presentation-(?P<object_id>[0-9]+)/$', 'django.views.generic.list_detail.object_detail', {'queryset': Presentation.objects.all()}, 'presentation'),
url(r'^register/$', 'encampment.views.register', {}, 'register'),
url(r'^attendees/$', 'django.views.generic.list_detail.object_list', {'queryset': Attendee.objects.all()}, 'attendees'),
url(r'^sponsors/$', 'django.views.generic.list_detail.object_list', {'queryset': Sponsor.objects.all()}, 'sponsors'),
)
|
pombredanne/encampment
|
urls.py
|
Python
|
bsd-3-clause
| 994
|
#! /usr/bin/python
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
class Example(QWidget):
def __init__(self):
super(Example, self).__init__()
self.initUI()
def initUI(self):
self.text = "hello world"
self.setGeometry(100,100, 600,600)
self.setWindowTitle('Draw Demo')
self.show()
def paintEvent(self, event):
qp = QPainter()
qp.begin(self)
qp.setPen(QColor(Qt.red))
qp.setFont(QFont('Arial', 20))
qp.drawPixmap(10,10,QPixmap("python.jpg"))
qp.drawText(400,50, "hello Python")
qp.setPen(QColor(Qt.blue))
qp.drawLine(0,0,600,600)
qp.drawRect(350,350,400,400)
qp.setPen(QColor(Qt.yellow))
qp.drawEllipse(300,300,100,50)
qp.fillRect(500,500,550,500,QBrush(Qt.SolidPattern))
qp.end()
def main():
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
mkhuthir/learnPython
|
Book_pythonlearn_com/24_pyqt/draw.py
|
Python
|
mit
| 965
|
#!/usr/bin/python2.4
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Software construction toolkit builders for SCons."""
import SCons
import library_deps
__component_list = {}
def _InitializeComponentBuilders(env):
"""Re-initializes component builders module.
Args:
env: Environment context
"""
env = env # Silence gpylint
__component_list.clear()
def _RetrieveComponents(component_name, filter_components=None):
"""Get the list of all components required by the specified component.
Args:
component_name: Name of the base component.
filter_components: List of components NOT to include.
Returns:
A list of the transitive closure of all components required by the base
component. That is, if A requires B and B requires C, this returns [B, C].
"""
if filter_components:
filter_components = set(filter_components)
else:
filter_components = set()
components = set([component_name]) # Components always require themselves
new_components = set(components)
while new_components:
# Take next new component and add it to the list we've already scanned.
c = new_components.pop()
components.add(c)
# Add to the list of new components any of c's components that we haven't
# seen before.
new_components.update(__component_list.get(c, set())
- components - filter_components)
return list(components)
def _StoreComponents(self, component_name):
"""Stores the list of child components for the specified component.
Args:
self: Environment containing component.
component_name: Name of the component.
Adds component references based on the LIBS and COMPONENTS variables in the
current environment. Should be called at primary SConscript execution time;
use _RetrieveComponents() to get the final components lists in a Defer()'d
function.
"""
components = set()
for clist in ('LIBS', 'COMPONENTS'):
components.update(map(self.subst, self.Flatten(self[clist])))
if component_name not in __component_list:
__component_list[component_name] = set()
__component_list[component_name].update(components)
def _ComponentPlatformSetup(env, builder_name, **kwargs):
"""Modify an environment to work with a component builder.
Args:
env: Environment to clone.
builder_name: Name of the builder.
kwargs: Keyword arguments.
Returns:
A modified clone of the environment.
"""
# Clone environment so we can modify it
env = env.Clone()
# Add all keyword arguments to the environment
for k, v in kwargs.items():
env[k] = v
# Add compiler flags for included headers, if any
env['INCLUDES'] = env.Flatten(env.subst_list(['$INCLUDES']))
for h in env['INCLUDES']:
env.Append(CCFLAGS=['${CCFLAG_INCLUDE}%s' % h])
# This supports a NaCl convention that was previously supported with a
# modification to SCons. Previously, EXTRA_LIBS was interpolated into LIBS
# using the ${EXTRA_LIBS} syntax. It appears, however, that SCons naturally
# computes library dependencies before interpolation, so EXTRA_LIBS will not
# be correctly depended upon if interpolated. In the past, SCons was modified
# to force interpolation before library dependencies were computed. This new
# approach allows us to use an unmodified version of SCons.
# In general, the use of EXTRA_LIBS is discouraged.
if 'EXTRA_LIBS' in env:
# The SubstList2 method expands and flattens so that scons will
# correctly know about the library dependencies in cases like
# EXTRA_LIBS=['${FOO_LIBS}', 'bar'].
env['LIBS'] = (library_deps.AddLibDeps(env,
env['TARGET_FULLARCH'],
env.SubstList2('${EXTRA_LIBS}')) +
env.SubstList2('${LIBS}'))
# Call platform-specific component setup function, if any
if env.get('COMPONENT_PLATFORM_SETUP'):
env['COMPONENT_PLATFORM_SETUP'](env, builder_name)
# Return the modified environment
return env
#------------------------------------------------------------------------------
# TODO: Should be possible to refactor programs, test programs, libs to all
# publish as packages, for simplicity and code reuse.
def ComponentPackageDeferred(env):
"""Deferred build steps for component package.
Args:
env: Environment from ComponentPackage().
Sets up the aliases to build the package.
"""
package_name = env['PACKAGE_NAME']
# Install program and resources
all_outputs = []
package_filter = env.Flatten(env.subst_list('$COMPONENT_PACKAGE_FILTER'))
components = _RetrieveComponents(package_name, package_filter)
for resource, dest_dir in env.get('COMPONENT_PACKAGE_RESOURCES').items():
all_outputs += env.ReplicatePublished(dest_dir, components, resource)
# Add installed program and resources to the alias
env.Alias(package_name, all_outputs)
def ComponentPackage(self, package_name, dest_dir, **kwargs):
"""Pseudo-builder for package containing other components.
Args:
self: Environment in which we were called.
package_name: Name of package.
dest_dir: Destination directory for package.
kwargs: Keyword arguments.
Returns:
The alias node for the package.
"""
# Clone and modify environment
env = _ComponentPlatformSetup(self, 'ComponentPackage', **kwargs)
env.Replace(
PACKAGE_NAME=package_name,
PACKAGE_DIR=dest_dir,
)
# Add an empty alias for the package and add it to the right groups
a = env.Alias(package_name, [])
for group in env['COMPONENT_PACKAGE_GROUPS']:
SCons.Script.Alias(group, a)
# Store list of components for this program
env._StoreComponents(package_name)
# Let component_targets know this target is available in the current mode
env.SetTargetProperty(package_name, TARGET_PATH=dest_dir)
# Set up deferred call to replicate resources
env.Defer(ComponentPackageDeferred)
# Return the alias, since it's the only node we have
return a
#------------------------------------------------------------------------------
def ComponentObject(self, *args, **kwargs):
"""Pseudo-builder for object to handle platform-dependent type.
Args:
self: Environment in which we were called.
args: Positional arguments.
kwargs: Keyword arguments.
Returns:
Passthrough return code from env.StaticLibrary() or env.SharedLibrary().
TODO: Perhaps this should be a generator builder, so it can take a list of
inputs and return a list of outputs?
"""
# Clone and modify environment
env = _ComponentPlatformSetup(self, 'ComponentObject', **kwargs)
# Make appropriate object type
if env.get('COMPONENT_STATIC'):
o = env.StaticObject(*args, **kwargs)
else:
o = env.SharedObject(*args, **kwargs)
# Add dependencies on includes
env.Depends(o, env['INCLUDES'])
return o
#------------------------------------------------------------------------------
def ComponentLibrary(self, lib_name, *args, **kwargs):
"""Pseudo-builder for library to handle platform-dependent type.
Args:
self: Environment in which we were called.
lib_name: Library name.
args: Positional arguments.
kwargs: Keyword arguments.
Returns:
Passthrough return code from env.StaticLibrary() or env.SharedLibrary().
"""
# Clone and modify environment
env = _ComponentPlatformSetup(self, 'ComponentLibrary', **kwargs)
# Make appropriate library type
if env.get('COMPONENT_STATIC'):
lib_outputs = env.StaticLibrary(lib_name, *args, **kwargs)
else:
lib_outputs = env.SharedLibrary(lib_name, *args, **kwargs)
# TODO(robertm): arm hack, figure out a better way to do this
# we should not be modifying the env as a side-effect
# BUG: http://code.google.com/p/nativeclient/issues/detail?id=2424
env.FilterOut(LINKFLAGS=['-static'])
# Add dependencies on includes
env.Depends(lib_outputs, env['INCLUDES'])
# Scan library outputs for files we need to link against this library, and
# files we need to run executables linked against this library.
need_for_link = []
need_for_debug = []
need_for_run = []
for o in lib_outputs:
if o.suffix in env['COMPONENT_LIBRARY_LINK_SUFFIXES']:
need_for_link.append(o)
if o.suffix in env['COMPONENT_LIBRARY_DEBUG_SUFFIXES']:
need_for_debug.append(o)
if o.suffix == env['SHLIBSUFFIX']:
need_for_run.append(o)
all_outputs = lib_outputs
# Install library in intermediate directory, so other libs and programs can
# link against it
all_outputs += env.Replicate('$LIB_DIR', need_for_link)
# Publish output
env.Publish(lib_name, 'link', need_for_link)
env.Publish(lib_name, 'run', need_for_run)
env.Publish(lib_name, 'debug', need_for_debug)
# Add an alias to build and copy the library, and add it to the right groups
a = self.Alias(lib_name, all_outputs)
for group in env['COMPONENT_LIBRARY_GROUPS']:
SCons.Script.Alias(group, a)
# Store list of components for this library
env._StoreComponents(lib_name)
# Let component_targets know this target is available in the current mode.
env.SetTargetProperty(lib_name, TARGET_PATH=lib_outputs[0])
# If library should publish itself, publish as if it was a program
if env.get('COMPONENT_LIBRARY_PUBLISH'):
env['PROGRAM_BASENAME'] = lib_name
env.Defer(ComponentProgramDeferred)
# Return the library
return lib_outputs[0]
#------------------------------------------------------------------------------
def ComponentTestProgramDeferred(env):
"""Deferred build steps for test program.
Args:
env: Environment from ComponentTestProgram().
Sets up the aliases to compile and run the test program.
"""
prog_name = env['PROGRAM_BASENAME']
# Install program and resources
all_outputs = []
components = _RetrieveComponents(prog_name)
for resource, dest_dir in env.get('COMPONENT_TEST_RESOURCES').items():
all_outputs += env.ReplicatePublished(dest_dir, components, resource)
# Add installed program and resources to the alias
env.Alias(prog_name, all_outputs)
# Add target properties
env.SetTargetProperty(
prog_name,
# The copy of the program we care about is the one in the tests dir
EXE='$TESTS_DIR/$PROGRAM_NAME',
RUN_CMDLINE='$COMPONENT_TEST_CMDLINE',
RUN_DIR='$TESTS_DIR',
TARGET_PATH='$TESTS_DIR/$PROGRAM_NAME',
)
# Add an alias for running the test in the test directory, if the test is
# runnable and has a test command line.
if env.get('COMPONENT_TEST_RUNNABLE') and env.get('COMPONENT_TEST_CMDLINE'):
env.Replace(
COMMAND_OUTPUT_CMDLINE=env['COMPONENT_TEST_CMDLINE'],
COMMAND_OUTPUT_RUN_DIR='$TESTS_DIR',
)
test_out_name = '$TEST_OUTPUT_DIR/${PROGRAM_BASENAME}.out.txt'
if (env.GetOption('component_test_retest')
and env.File(test_out_name).exists()):
# Delete old test results, so test will rerun.
env.Execute(SCons.Script.Delete(test_out_name))
# Set timeout based on test size
timeout = env.get('COMPONENT_TEST_TIMEOUT')
if type(timeout) is dict:
timeout = timeout.get(env.get('COMPONENT_TEST_SIZE'))
if timeout:
env['COMMAND_OUTPUT_TIMEOUT'] = timeout
# Test program is the first run resource we replicated. (Duplicate
# replicate is not harmful, and is a handy way to pick out the correct
# file from all those we replicated above.)
test_program = env.ReplicatePublished('$TESTS_DIR', prog_name, 'run')
# Run the test. Note that we need to refer to the file by name, so that
# SCons will recreate the file node after we've deleted it; if we used the
# env.File() we created in the if statement above, SCons would still think
# it exists and not rerun the test.
test_out = env.CommandOutput(test_out_name, test_program)
# Running the test requires the test and its libs copied to the tests dir
env.Depends(test_out, all_outputs)
env.ComponentTestOutput('run_' + prog_name, test_out)
# Add target properties
env.SetTargetProperty(prog_name, RUN_TARGET='run_' + prog_name)
def ComponentTestProgram(self, prog_name, *args, **kwargs):
"""Pseudo-builder for test program to handle platform-dependent type.
Args:
self: Environment in which we were called.
prog_name: Test program name.
args: Positional arguments.
kwargs: Keyword arguments.
Returns:
Output node list from env.Program().
"""
# Clone and modify environment
env = _ComponentPlatformSetup(self, 'ComponentTestProgram', **kwargs)
env['PROGRAM_BASENAME'] = prog_name
env['PROGRAM_NAME'] = '$PROGPREFIX$PROGRAM_BASENAME$PROGSUFFIX'
# Call env.Program()
out_nodes = env.Program(prog_name, *args, **kwargs)
# Add dependencies on includes
env.Depends(out_nodes, env['INCLUDES'])
# Publish output
env.Publish(prog_name, 'run', out_nodes[0])
env.Publish(prog_name, 'debug', out_nodes[1:])
# Add an alias to build the program to the right groups
a = env.Alias(prog_name, out_nodes)
for group in env['COMPONENT_TEST_PROGRAM_GROUPS']:
SCons.Script.Alias(group, a)
# Store list of components for this program
env._StoreComponents(prog_name)
# Let component_targets know this target is available in the current mode
env.SetTargetProperty(prog_name, TARGET_PATH=out_nodes[0])
# Set up deferred call to replicate resources and run test
env.Defer(ComponentTestProgramDeferred)
# Return the output node
return out_nodes
#------------------------------------------------------------------------------
def ComponentProgramDeferred(env):
"""Deferred build steps for program.
Args:
env: Environment from ComponentProgram().
Sets up the aliases to compile the program.
"""
prog_name = env['PROGRAM_BASENAME']
# Install program and resources
all_outputs = []
components = _RetrieveComponents(prog_name)
for resource, dest_dir in env.get('COMPONENT_PROGRAM_RESOURCES').items():
all_outputs += env.ReplicatePublished(dest_dir, components, resource)
# Add installed program and resources to the alias
env.Alias(prog_name, all_outputs)
def ComponentProgram(self, prog_name, *args, **kwargs):
"""Pseudo-builder for program to handle platform-dependent type.
Args:
self: Environment in which we were called.
prog_name: Test program name.
args: Positional arguments.
kwargs: Keyword arguments.
Returns:
Output node list from env.Program().
"""
# Clone and modify environment
env = _ComponentPlatformSetup(self, 'ComponentProgram', **kwargs)
env['PROGRAM_BASENAME'] = prog_name
if env['PROGSUFFIX'] and env.subst(prog_name).endswith(env['PROGSUFFIX']):
# Temporary hack: If there's already an extension, remove it.
# Because PPAPI is revision locked, and expects to be able to use .nexe
# TODO: When PPAPI deps is rolled, replace with this:
# raise Exception("Program name shouldn't have a suffix")
prog_name = env.subst(prog_name)
prog_name = prog_name[:-len(env['PROGSUFFIX'])]
# Call env.Program()
out_nodes = env.Program(prog_name, *args, **kwargs)
# Add dependencies on includes
env.Depends(out_nodes, env['INCLUDES'])
# Add dependencies on libraries marked as implicitly included in the link.
# These are libraries that are not passed on the command line, but are
# always linked in by the toolchain, i.e. startup files and -lc and such.
if 'IMPLICIT_LIBS' in env:
env.Depends(out_nodes, env['IMPLICIT_LIBS'])
# Publish output
env.Publish(prog_name, 'run', out_nodes[0])
env.Publish(prog_name, 'debug', out_nodes[1:])
# Add an alias to build the program to the right groups
a = env.Alias(prog_name, out_nodes)
env.ComponentProgramAlias(a)
# Store list of components for this program
env._StoreComponents(prog_name)
# Let component_targets know this target is available in the current mode
env.SetTargetProperty(prog_name, TARGET_PATH=out_nodes[0])
# Set up deferred call to replicate resources
env.Defer(ComponentProgramDeferred)
# Return the executable
return out_nodes[0]
def ComponentProgramAlias(self, program):
for group in self['COMPONENT_PROGRAM_GROUPS']:
SCons.Script.Alias(group, program)
#------------------------------------------------------------------------------
def ComponentTestOutput(self, test_name, nodes, **kwargs):
"""Pseudo-builder for test output.
Args:
self: Environment in which we were called.
test_name: Test name.
nodes: List of files/Nodes output by the test.
kwargs: Keyword arguments.
Returns:
Passthrough return code from env.Alias().
"""
# Clone and modify environment
env = _ComponentPlatformSetup(self, 'ComponentTestObject', **kwargs)
# Add an alias for the test output
a = env.Alias(test_name, nodes)
# Determine groups test belongs in
if env.get('COMPONENT_TEST_ENABLED'):
groups = env.SubstList2('$COMPONENT_TEST_OUTPUT_GROUPS')
if env.get('COMPONENT_TEST_SIZE'):
groups.append(env.subst('run_${COMPONENT_TEST_SIZE}_tests'))
else:
# Disabled tests only go in the explicit disabled tests group
groups = ['run_disabled_tests']
for group in groups:
SCons.Script.Alias(group, a)
# Let component_targets know this target is available in the current mode
env.SetTargetProperty(test_name, TARGET_PATH=nodes[0])
# Return the output node
return a
#------------------------------------------------------------------------------
def generate(env):
# NOTE: SCons requires the use of this name, which fails gpylint.
"""SCons entry point for this tool."""
env.Replace(
LIB_DIR='$TARGET_ROOT/lib',
# TODO: Remove legacy COMPONENT_LIBRARY_DIR, once all users have
# transitioned to LIB_DIR
COMPONENT_LIBRARY_DIR='$LIB_DIR',
STAGING_DIR='$TARGET_ROOT/staging',
TESTS_DIR='$TARGET_ROOT/tests',
TEST_OUTPUT_DIR='$TARGET_ROOT/test_output',
# Default command line for a test is just the name of the file.
# TODO: Why doesn't the following work:
# COMPONENT_TEST_CMDLINE='${SOURCE.abspath}',
# (it generates a SCons error)
COMPONENT_TEST_CMDLINE='${PROGRAM_NAME}',
# Component tests are runnable by default.
COMPONENT_TEST_RUNNABLE=True,
# Default test size is large
COMPONENT_TEST_SIZE='large',
# Default timeouts for component tests
COMPONENT_TEST_TIMEOUT={'large': 900, 'medium': 450, 'small': 180},
# Tests are enabled by default
COMPONENT_TEST_ENABLED=True,
# Static linking is a sensible default
COMPONENT_STATIC=True,
# Don't publish libraries to the staging dir by themselves by default.
COMPONENT_LIBRARY_PUBLISH=False,
)
env.Append(
LIBPATH=['$LIB_DIR'],
RPATH=['$LIB_DIR'],
# Default alias groups for component builders
COMPONENT_PACKAGE_GROUPS=['all_packages'],
COMPONENT_LIBRARY_GROUPS=['all_libraries'],
COMPONENT_PROGRAM_GROUPS=['all_programs'],
COMPONENT_TEST_PROGRAM_GROUPS=['all_test_programs'],
COMPONENT_TEST_OUTPUT_GROUPS=['run_all_tests'],
# Additional components whose resources should be copied into program
# directories, in addition to those from LIBS and the program itself.
LIBS=[],
COMPONENTS=[],
# Dicts of what resources should go in each destination directory for
# programs and test programs.
COMPONENT_PACKAGE_RESOURCES={
'run': '$PACKAGE_DIR',
'debug': '$PACKAGE_DIR',
},
COMPONENT_PROGRAM_RESOURCES={
'run': '$STAGING_DIR',
'debug': '$STAGING_DIR',
},
COMPONENT_TEST_RESOURCES={
'run': '$TESTS_DIR',
'debug': '$TESTS_DIR',
'test_input': '$TESTS_DIR',
},
)
# Add command line option for retest
SCons.Script.AddOption(
'--retest',
dest='component_test_retest',
action='store_true',
help='force all tests to rerun')
SCons.Script.Help(' --retest '
'Rerun specified tests, ignoring cached results.\n')
# Defer per-environment initialization, but do before building SConscripts
env.Defer(_InitializeComponentBuilders)
env.Defer('BuildEnvironmentSConscripts', after=_InitializeComponentBuilders)
# Add our pseudo-builder methods
env.AddMethod(_StoreComponents)
env.AddMethod(ComponentPackage)
env.AddMethod(ComponentObject)
env.AddMethod(ComponentLibrary)
env.AddMethod(ComponentProgram)
env.AddMethod(ComponentProgramAlias)
env.AddMethod(ComponentTestProgram)
env.AddMethod(ComponentTestOutput)
# Add our target groups
AddTargetGroup('all_libraries', 'libraries can be built')
AddTargetGroup('all_programs', 'programs can be built')
AddTargetGroup('all_test_programs', 'tests can be built')
AddTargetGroup('all_packages', 'packages can be built')
AddTargetGroup('run_all_tests', 'tests can be run')
AddTargetGroup('run_disabled_tests', 'tests are disabled')
AddTargetGroup('run_small_tests', 'small tests can be run')
AddTargetGroup('run_medium_tests', 'medium tests can be run')
AddTargetGroup('run_large_tests', 'large tests can be run')
|
yantrabuddhi/nativeclient
|
site_scons/site_tools/component_builders.py
|
Python
|
bsd-3-clause
| 22,789
|
###
# Copyright (c) 2013, spline
# All rights reserved.
#
#
###
"""
Add a description of the plugin (to be presented to the user inside the wizard)
here. This should describe *what* the plugin does.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = ""
# XXX Replace this with an appropriate author or supybot.Author instance.
__author__ = supybot.authors.unknown
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
# This is a url where the most recent plugin package can be downloaded.
__url__ = 'http://supybot.com/reticulatingspline/Supybot-Titler/'
import config
import plugin
reload(config)
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
avinson/Supybot-Titler
|
__init__.py
|
Python
|
mit
| 1,166
|
#structly_with_inspect.py
class Structure:
_fields = []
def __init__(self, *args):
for name, val in zip(self._fields, args):
setattr(self, name, val)
class Stock(Structure):
_fields = ['name', 'shares', 'price']
class Point(Structure):
_fields = ['x', 'y']
class Address(Structure):
_fields = ['hostname', 'port']
from inspect import Signature, signature
print('(signature(Stock)) -->', end = " ")
print(signature(Stock))
|
kmad1729/python_notes
|
metaprogramming/start.py
|
Python
|
unlicense
| 471
|
from project.models.fields.Field import Field
from project.models.fields.exceptions import FieldValidException
THEME_MIN_LENGTH = 0
THEME_MAX_LENGTH = 128
class ThemeField(Field):
def __init__(self, theme):
self.set(theme)
def set(self, theme):
if x = chain_of_conditions()
raise FieldValidException(x)
if not isinstance(theme, str):
raise FieldValidException("theme must be string")
if len(theme) not in range(THEME_MIN_LENGTH, THEME_MAX_LENGTH):
raise FieldValidException(
"length theme should be from " + str(THEME_MIN_LENGTH) + " to " + str(THEME_MAX_LENGTH))
self._theme = theme
def get(self):
return self._theme
|
AbramovVitaliy/Abramov-RIS-13
|
lab4_5_6/project/models/fields/ThemeField.py
|
Python
|
mit
| 734
|
# -*- coding: utf-8 -*-
import os
import sys
sys.path.append(os.path.join(os.getcwd(), os.path.pardir))
import unittest
from digraph import digraph
from graph import graph
from graph_algorithms import *
class test_graph(unittest.TestCase):
def setUp(self):
self.gr = graph()
self.gr.add_nodes(["s", "a", "b", "c", "d", "e",
"f", "g", "h", "j", "k", "l"])
self.gr.add_edges([("s", "a"), ("s", "b"), ("a", "c"), ("c", "e")])
self.gr.add_edges([("e", "d"), ("d", "b"), ("a", "b"), ("c", "d")])
self.gr.add_edges([("g", "h"), ("f", "g")])
self.gr.add_edges([("j", "k"), ("j", "l")])
self.digr = digraph()
self.digr.add_nodes(['s', 'a', 'b', 'c', 'd', 'e', 'f'])
self.digr.add_edges([("s", "a"), ("a", "b"), ("b", "a"), ("c", "b")])
self.digr.add_edges([("b", "s"), ("s", "d"), ("d", "e"), ("e", "d")])
self.digr.add_edges([("b", "f"), ("e", "f")])
def test_bfs_undirected_graph(self):
self.assertEqual(len(BFS(self.gr, "s")), 6)
self.assertEqual(len(BFS(self.gr, "j")), 3)
self.assertEqual(len(BFS(self.gr, "g")), 3)
def test_bfs_directed_graph(self):
self.assertEqual(len(BFS(self.digr, "s")), 6)
self.assertEqual(len(BFS(self.digr, "c")), 7)
self.assertEqual(len(BFS(self.digr, "f")), 1)
def test_dfs_undirected_graph(self):
self.assertEqual(len(DFS(self.gr, "s")), 6)
self.assertEqual(len(DFS(self.gr, "j")), 3)
self.assertEqual(len(DFS(self.gr, "g")), 3)
def test_dfs_directed_graph(self):
self.assertEqual(len(DFS(self.digr, "s")), 6)
self.assertEqual(len(DFS(self.digr, "c")), 7)
self.assertEqual(len(DFS(self.digr, "f")), 1)
def test_shortest_hops_undirected_graph(self):
self.assertEqual(shortest_hops(self.gr, "s")["c"], 2)
self.assertEqual(shortest_hops(self.gr, "c")["s"], 2)
self.assertEqual(shortest_hops(self.gr, "s")["s"], 0)
self.assertEqual(shortest_hops(self.gr, "c")["j"], float('inf'))
def test_shortest_hops_directed_graph(self):
self.assertEqual(shortest_hops(self.digr, "s")["f"], 3)
self.assertEqual(shortest_hops(self.digr, "f")["s"], float('inf'))
self.assertEqual(shortest_hops(self.digr, "s")["s"], 0)
self.assertEqual(shortest_hops(self.digr, "s")["c"], float('inf'))
def test_undirected_connected_component(self):
self.assertEqual(len(undirected_connected_components(self.gr)), 3)
self.assertRaises(
Exception, undirected_connected_components, self.digr)
def test_topological_ordering(self):
dag = digraph() # directed acyclic graph
dag.add_nodes(["a", "b", "c", "d", "e", "f", "g", "h"])
dag.add_edges([("a", "b"), ("a", "c"), ("a", "e"), ("d", "a")])
dag.add_edges(
[("g", "b"), ("g", "f"), ("f", "e"), ("h", "f"), ("h", "a")])
order = {o[0]: o[1] for o in topological_ordering(dag)}
self.assertEqual(sum([order[u] < order[v] for (u, v) in
dag.edges()]), len(dag.edges())) # all comparisons are True
def test_directed_connected_components(self):
digr = digraph()
digr.add_nodes(["a", "b", "c", "d", "e", "f", "g", "h", "i"])
digr.add_edges([("b", "a"), ("a", "c"), ("c", "b"), ("d", "b")])
digr.add_edges([("d", "f"), ("f", "e"), ("e", "d"), ("g", "e")])
digr.add_edges([("g", "h"), ("h", "i"), ("i", "g")])
self.assertEqual(len(directed_connected_components(digr)), 3)
digr2 = digraph()
digr2.add_nodes(
["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"])
digr2.add_edges(
[("a", "b"), ("b", "c"), ("c", "a"), ("b", "d"), ("d", "e")])
digr2.add_edges(
[("e", "f"), ("f", "g"), ("g", "e"), ("d", "g"), ("i", "f")])
digr2.add_edges(
[("h", "g"), ("c", "h"), ("c", "k"), ("h", "i"), ("i", "j")])
digr2.add_edges([("h", "j"), ("j", "k"), ("k", "h")])
self.assertEqual(len(directed_connected_components(digr2)), 4)
def test_shortest_path_in_directed_graph(self):
digr = digraph()
digr.add_nodes(["a", "b", "c", "d", "e", "f"])
digr.add_edge(("a", "b"), 7)
digr.add_edge(("a", "c"), 9)
digr.add_edge(("a", "f"), 14)
digr.add_edge(("f", "e"), 9)
digr.add_edge(("c", "f"), 2)
digr.add_edge(("c", "d"), 11)
digr.add_edge(("b", "c"), 10)
digr.add_edge(("b", "d"), 15)
digr.add_edge(("d", "e"), 6)
self.assertEqual(shortest_path(digr, "a")["a"], 0)
self.assertEqual(shortest_path(digr, "a")["b"], 7)
self.assertEqual(shortest_path(digr, "a")["c"], 9)
self.assertEqual(shortest_path(digr, "a")["d"], 20)
self.assertEqual(shortest_path(digr, "a")["e"], 20)
self.assertEqual(shortest_path(digr, "a")["f"], 11)
def test_prims_minimum_spanning_tree(self):
gr = graph()
gr.add_nodes(["a", "b", "c", "d"])
gr.add_edge(("a", "b"), 4)
gr.add_edge(("b", "c"), 3)
gr.add_edge(("a", "c"), 1)
gr.add_edge(("c", "d"), 2)
min_cost = minimum_spanning_tree(gr)
self.assertEqual(min_cost, 6)
def test_kruskals_minimum_spanning_tree(self):
gr = graph()
gr.add_nodes(["a", "b", "c", "d"])
gr.add_edge(("a", "b"), 4)
gr.add_edge(("b", "c"), 3)
gr.add_edge(("a", "c"), 1)
gr.add_edge(("c", "d"), 2)
min_cost = kruskal_MST(gr)
self.assertEqual(min_cost, 6)
if __name__ == "__main__":
unittest.main()
os.system("pause")
|
NicovincX2/Python-3.5
|
Algorithmique/Algorithme/Algorithme de la théorie des graphes/graph_algorithms_test.py
|
Python
|
gpl-3.0
| 5,705
|
# -*- coding: utf-8 -*-
##
## Copyright © 2007, Matthias Urlichs <matthias@urlichs.de>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License (included; see the file LICENSE)
## for more details.
##
"""\
This code implements primitive "if true" and "if false" checks.
"""
from homevent.check import Check,register_condition,unregister_condition
from homevent.module import Module
class TrueCheck(Check):
name="true"
doc="always true."
def check(self,*args):
assert not args,"Truth doesn't have arguments"
return True
class FalseCheck(Check):
name="false"
doc="always false."
def check(self,*args):
assert not args,"Falsehood doesn't have arguments"
return False
class NoneCheck(Check):
name="null"
doc="check if the argument has a value."
def check(self,*args):
assert len(args)==1,u"The ‹null› check requires one argument"
return args[0] is None
class EqualCheck(Check):
name="equal"
doc="check if the arguments are the same."
def check(self,*args):
assert len(args)==2,u"The ‹equal› check requires two arguments"
a,b = args
if a is None: return b is None
try:
return float(a) == float(b)
except (ValueError,TypeError):
return str(a) == str(b)
class LessCheck(Check):
name="less"
doc="check if the first argument is smaller."
def check(self,*args):
assert len(args)==2,u"The ‹less› check requires two arguments"
a,b = args
if a is None or b is None: return False
try:
return float(a) < float(b)
except (ValueError,TypeError):
return str(a) < str(b)
class GreaterCheck(Check):
name="greater"
doc="check if the first argument is larger."
def check(self,*args):
assert len(args)==2,u"The ‹greater› check requires two arguments"
a,b = args
if a is None or b is None: return False
try:
return float(a) > float(b)
except (ValueError,TypeError):
return str(a) > str(b)
class BoolModule(Module):
"""\
This module implements basic boolean conditions
"""
info = u"Boolean conditions. There can be only … two."
def load(self):
register_condition(TrueCheck)
register_condition(FalseCheck)
register_condition(NoneCheck)
register_condition(EqualCheck)
register_condition(LessCheck)
register_condition(GreaterCheck)
def unload(self):
unregister_condition(TrueCheck)
unregister_condition(FalseCheck)
unregister_condition(NoneCheck)
unregister_condition(EqualCheck)
unregister_condition(LessCheck)
unregister_condition(GreaterCheck)
init = BoolModule
|
smurfix/HomEvenT
|
modules/bool.py
|
Python
|
gpl-3.0
| 2,922
|
from nose.tools import *
from exercises import ex10
def test_overlapping():
'''
Check that we return true when something overlaps
'''
test_overlapping_number = ex10.overlapping([1, 2, 3], [1, 3, 4])
assert_true(test_overlapping_number)
def test_no_overlapping():
'''
Check that we return false when nothing overlaps
'''
test_overlapping_number = ex10.overlapping([1, 2, 3], [4, 5, 6])
assert_false(test_overlapping_number)
|
gravyboat/python-exercises
|
tests/ex10_tests.py
|
Python
|
mit
| 467
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# ---------------------------------------------------------------------------#
#
# Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
# Copyright (C) 2003 Mt. Hood Playing Card Co.
# Copyright (C) 2005-2009 Skomoroh
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------#
# imports
# import os
# Toolkit imports
from pysollib.pysoltk import MfxScrolledCanvas
# ************************************************************************
# *
# ************************************************************************
class MfxTreeBaseNode:
def __init__(self, tree, parent_node, text, key):
self.tree = tree
self.parent_node = parent_node
self.text = text
self.key = key
# state
self.selected = 0
self.subnodes = None
# canvas item ids
self.symbol_id = None
self.text_id = None
self.textrect_id = None
def registerKey(self):
if self.key is not None:
lk = self.tree.keys.get(self.key, [])
lk.append(self)
self.tree.keys[self.key] = lk
def whoami(self):
if self.parent_node is None:
return (self.text, )
else:
return self.parent_node.whoami() + (self.text, )
# drawing functions not used with kivy.
'''
def draw(self, x, y, lastx=None, lasty=None):
canvas, style = self.tree.canvas, self.tree.style
topleftx = x + style.distx
toplefty = y - style.height / 2 # +++
# draw the horizontal line
if lastx is not None:
canvas.create_line(x, y, topleftx, y,
stipple=style.linestyle, fill=style.linecolor)
# draw myself - ugly, ugly...
self.selected = 0
self.symbol_id = -1
self.drawSymbol(topleftx, toplefty)
linestart = style.distx + style.width + 5
self.text_id = -1
self.drawText(x + linestart, y)
return x, y, x, y + style.disty
#
#
#
def drawText(self, x, y):
canvas, style = self.tree.canvas, self.tree.style
if self.selected:
fg, bg = style.text_selected_fg, style.text_selected_bg
else:
fg, bg = style.text_normal_fg, style.text_normal_bg
#
if self.tree.nodes.get(self.text_id) is self:
canvas.itemconfig(self.text_id, fill=fg)
else:
# note: I don't use Label + canvas.create_window here
# because it doesn't propagate events to the canvas
# and has some other re-display annoyances
# print 'style.font:', style.font
self.text_id = canvas.create_text(x + 1, y, text=self.text,
anchor="w", justify="left",
font=style.font,
fill=fg)
self.tree.nodes[self.text_id] = self
#
if self.tree.nodes.get(self.textrect_id) is self:
try:
# _tkinter.TclError: unknown option "-fill" ???
canvas.itemconfig(self.textrect_id, fill=bg)
except Tkinter.TclError:
pass
elif self.selected:
b = canvas.bbox(self.text_id)
self.textrect_id = canvas.create_rectangle(
b[0] - 1, b[1] - 1, b[2] + 1, b[3] + 1, fill=bg, outline="")
canvas.tag_lower(self.textrect_id, self.text_id)
self.tree.nodes[self.textrect_id] = self
def updateText(self):
if self.tree.nodes.get(self.text_id) is self:
self.drawText(-1, -1)
#
#
#
def drawSymbol(self, x, y, **kw):
canvas, style = self.tree.canvas, self.tree.style
color = kw.get("color")
if color is None:
if self.selected:
color = "darkgreen"
else:
color = "green"
# note: rectangle outline is one pixel
if self.tree.nodes.get(self.symbol_id) is self:
canvas.itemconfig(self.symbol_id, fill=color)
else:
self.symbol_id = canvas.create_rectangle(
x + 1, y + 1, x + style.width, y + style.height, fill=color)
self.tree.nodes[self.symbol_id] = self
def updateSymbol(self):
if self.tree.nodes.get(self.symbol_id) is self:
self.drawSymbol(-1, -1)
'''
# ************************************************************************
# * Terminal and non-terminal nodes
# ************************************************************************
class MfxTreeLeaf(MfxTreeBaseNode):
def drawText(self, x, y):
if self.text_id < 0:
self.registerKey()
MfxTreeBaseNode.drawText(self, x, y)
class MfxTreeNode(MfxTreeBaseNode):
def __init__(self, tree, parent_node, text, key, expanded=0):
MfxTreeBaseNode.__init__(self, tree, parent_node, text, key)
self.expanded = expanded
def drawChildren(self, x, y, lastx, lasty):
# get subnodes
self.subnodes = self.tree.getContents(self)
# draw subnodes
lx, ly = lastx, lasty
nx, ny = x, y
for node in self.subnodes:
# update tree
node.tree = self.tree
# draw node
lx, ly, nx, ny = node.draw(nx, ny, lx, ly)
# draw the vertical line
if self.subnodes:
style = self.tree.style
dy = (style.disty - style.width) / 2
y = y - style.disty / 2 - dy
self.tree.canvas.create_line(x, y, nx, ly,
stipple=style.linestyle,
fill=style.linecolor)
return ny
def draw(self, x, y, ilastx=None, ilasty=None):
# draw myself
lx, ly, nx, ny = MfxTreeBaseNode.draw(self, x, y, ilastx, ilasty)
if self.expanded:
style = self.tree.style
childx = nx + style.distx + style.width / 2
childy = ny
clastx = nx + style.distx + style.width / 2
clasty = ly + style.height / 2
ny = self.drawChildren(childx, childy, clastx, clasty)
return lx, ly, x, ny
#
#
#
def drawSymbol(self, x, y, **kw):
color = kw.get("color")
if color is None:
if self.expanded:
color = "red"
else:
color = "pink"
MfxTreeBaseNode.drawSymbol(self, x, y, color=color)
# ************************************************************************
# *
# ************************************************************************
class MfxTreeInCanvas(MfxScrolledCanvas):
pass
'''
class Style:
def __init__(self):
self.distx = 16
self.disty = 18
self.width = 16 # width of symbol
self.height = 16 # height of symbol
self.originx = 0
self.originy = 0
self.text_normal_fg = "black"
self.text_normal_bg = "white"
self.text_selected_fg = "white"
self.text_selected_bg = "#00008b" # "darkblue"
self.font = None
self.linestyle = "gray50"
self.linecolor = "black"
def __init__(self, parent, rootnodes, **kw):
# LB bg = kw["bg"] = kw.get("bg") or parent.cget("bg")
kw['bd'] = 0
MfxScrolledCanvas.__init__(self, parent, **kw)
#
self.rootnodes = rootnodes
self.updateNodesWithTree(self.rootnodes, self)
self.selection_key = None
self.nodes = {}
self.keys = {}
#
self.style = self.Style()
# self.style.text_normal_fg = self.canvas.cget("insertbackground")
self.style.text_normal_fg = self.canvas.option_get(
'foreground', '') or self.canvas.cget("insertbackground")
self.style.text_normal_bg = bg
#
bind(self.canvas, "<ButtonPress-1>", self.singleClick)
bind(self.canvas, "<Double-Button-1>", self.doubleClick)
# bind(self.canvas, "<ButtonRelease-1>", xxx)
self.pack(fill='both', expand=True)
def destroy(self):
for node in self.keys.get(self.selection_key, []):
node.selected = 0
MfxScrolledCanvas.destroy(self)
def findNode(self, event=None):
id = self.canvas.find_withtag('current')
if id:
return self.nodes.get(id[0])
return None
#
# draw nodes
#
def draw(self):
nx, ny = self.style.originx, self.style.originy
# Account for initial offsets, see topleft[xy] in BaseNode.draw().
# We do this so that our bounding box always starts at (0, 0)
# and the yscrollincrement works nicely.
nx = nx - self.style.distx
ny = ny + self.style.height / 2
for node in self.rootnodes:
# update tree
node.tree = self
# draw
try:
lx, ly, nx, ny = node.draw(nx, ny, None, None)
except Tkinter.TclError:
# FIXME: Tk bug ???
raise
# set scroll region
bbox = self.canvas.bbox("all")
# self.canvas.config(scrollregion=bbox)
self.canvas.config(scrollregion=(0, 0, bbox[2], bbox[3]))
self.canvas.config(yscrollincrement=self.style.disty)
def clear(self):
self.nodes = {}
self.keys = {}
self.canvas.delete("all")
def redraw(self):
oldcur = self.canvas["cursor"]
self.canvas["cursor"] = "watch"
self.canvas.update_idletasks()
self.clear()
self.draw()
self.updateSelection(self.selection_key)
self.canvas["cursor"] = oldcur
#
#
#
def getContents(self, node):
# Overload this, supposed to return a list of subnodes of node.
pass
def singleClick(self, event=None):
# Overload this if you want to know when a node is clicked on.
pass
def doubleClick(self, event=None):
# Overload this if you want to know when a node is d-clicked on.
self.singleClick(event)
#
#
#
def updateSelection(self, key):
l1 = self.keys.get(self.selection_key, [])
l2 = self.keys.get(key, [])
for node in l1:
if node.selected and node not in l2:
node.selected = 0
node.updateSymbol()
node.updateText()
for node in l2:
if not node.selected:
node.selected = 1
node.updateSymbol()
node.updateText()
self.selection_key = key
def updateNodesWithTree(self, nodes, tree):
for node in nodes:
node.tree = tree
if node.subnodes:
self.updateNodesWithTree(node.subnodes, tree)
'''
# ************************************************************************
# *
# ************************************************************************
'''
class DirectoryBrowser(MfxTreeInCanvas):
def __init__(self, parent, dirs):
nodes = []
if isinstance(dirs, str):
dirs = (dirs, )
for dir in dirs:
self.addNode(nodes, None, dir, dir)
# note: best results if height is a multiple of style.disty
MfxTreeInCanvas.__init__(self, parent, nodes, height=25 * 18)
self.draw()
def addNode(self, list, node, filename, text):
try:
if os.path.isdir(filename):
list.append(MfxTreeNode(self, node, text, key=filename))
else:
list.append(MfxTreeLeaf(self, node, text, key=filename))
except EnvironmentError:
pass
def getContents(self, node):
# use cached values
if node.subnodes is not None:
return node.subnodes
#
dir = node.key
print "Getting %s" % dir
try:
filenames = os.listdir(dir)
filenames.sort()
except EnvironmentError:
return ()
contents = []
for filename in filenames:
self.addNode(contents, node, os.path.join(
dir, filename), filename)
# print "gotten"
return contents
def singleClick(self, event=None):
node = self.findNode(event)
if not node:
return
print "Clicked node %s %s" % (node.text, node.key)
if isinstance(node, MfxTreeLeaf):
self.updateSelection(key=node.key)
elif isinstance(node, MfxTreeNode):
node.expanded = not node.expanded
self.redraw()
return "break"
'''
if __name__ == "__main__":
'''
tk = Tkinter.Tk()
if os.name == "nt":
app = DirectoryBrowser(tk, ("c:\\", "c:\\windows"))
else:
app = DirectoryBrowser(tk, ("/", "/home"))
tk.mainloop()
'''
pass
|
shlomif/PySolFC
|
pysollib/kivy/tktree.py
|
Python
|
gpl-3.0
| 13,687
|
"""
Views related to operations on course objects
"""
import json
import random
import string # pylint: disable=W0402
import logging
from django.utils.translation import ugettext as _
import django.utils
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponseBadRequest, HttpResponseNotFound, HttpResponse, Http404
from util.json_request import JsonResponse, JsonResponseBadRequest
from util.date_utils import get_default_time_display
from edxmako.shortcuts import render_to_response
from xmodule.course_module import DEFAULT_START_DATE
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.content import StaticContent
from xmodule.tabs import PDFTextbookTabs
from xmodule.partitions.partitions import UserPartition, Group
from xmodule.modulestore import EdxJSONEncoder
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateCourseError
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locations import Location
from opaque_keys.edx.keys import CourseKey
from django_future.csrf import ensure_csrf_cookie
from contentstore.course_info_model import get_course_updates, update_course_updates, delete_course_update
from contentstore.utils import (
add_instructor,
initialize_permissions,
get_lms_link_for_item,
add_extra_panel_tab,
remove_extra_panel_tab,
reverse_course_url,
reverse_usage_url,
reverse_url,
remove_all_instructors,
)
from models.settings.course_details import CourseDetails, CourseSettingsEncoder
from models.settings.course_grading import CourseGradingModel
from models.settings.course_metadata import CourseMetadata
from util.json_request import expect_json
from util.string_utils import _has_non_ascii_characters
from .access import has_course_access
from .component import (
OPEN_ENDED_COMPONENT_TYPES,
NOTE_COMPONENT_TYPES,
ADVANCED_COMPONENT_POLICY_KEY,
SPLIT_TEST_COMPONENT_TYPE,
ADVANCED_COMPONENT_TYPES,
)
from contentstore.tasks import rerun_course
from .item import create_xblock_info
from course_creators.views import get_course_creator_status, add_user_with_status_unrequested
from contentstore import utils
from student.roles import (
CourseInstructorRole, CourseStaffRole, CourseCreatorRole, GlobalStaff, UserBasedRole
)
from student import auth
from course_action_state.models import CourseRerunState, CourseRerunUIStateManager
from course_action_state.managers import CourseActionStateItemNotFoundError
from microsite_configuration import microsite
from xmodule.course_module import CourseFields
__all__ = ['course_info_handler', 'course_handler', 'course_info_update_handler',
'course_rerun_handler',
'settings_handler',
'grading_handler',
'advanced_settings_handler',
'course_notifications_handler',
'textbooks_list_handler', 'textbooks_detail_handler',
'group_configurations_list_handler', 'group_configurations_detail_handler']
log = logging.getLogger(__name__)
class AccessListFallback(Exception):
"""
An exception that is raised whenever we need to `fall back` to fetching *all* courses
available to a user, rather than using a shorter method (i.e. fetching by group)
"""
pass
def _get_course_module(course_key, user, depth=0):
"""
Internal method used to calculate and return the locator and course module
for the view functions in this file.
"""
if not has_course_access(user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key, depth=depth)
return course_module
@login_required
def course_notifications_handler(request, course_key_string=None, action_state_id=None):
"""
Handle incoming requests for notifications in a RESTful way.
course_key_string and action_state_id must both be set; else a HttpBadResponseRequest is returned.
For each of these operations, the requesting user must have access to the course;
else a PermissionDenied error is returned.
GET
json: return json representing information about the notification (action, state, etc)
DELETE
json: return json repressing success or failure of dismissal/deletion of the notification
PUT
Raises a NotImplementedError.
POST
Raises a NotImplementedError.
"""
# ensure that we have a course and an action state
if not course_key_string or not action_state_id:
return HttpResponseBadRequest()
response_format = request.REQUEST.get('format', 'html')
course_key = CourseKey.from_string(course_key_string)
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if not has_course_access(request.user, course_key):
raise PermissionDenied()
if request.method == 'GET':
return _course_notifications_json_get(action_state_id)
elif request.method == 'DELETE':
# we assume any delete requests dismiss actions from the UI
return _dismiss_notification(request, action_state_id)
elif request.method == 'PUT':
raise NotImplementedError()
elif request.method == 'POST':
raise NotImplementedError()
else:
return HttpResponseBadRequest()
else:
return HttpResponseNotFound()
def _course_notifications_json_get(course_action_state_id):
"""
Return the action and the action state for the given id
"""
try:
action_state = CourseRerunState.objects.find_first(id=course_action_state_id)
except CourseActionStateItemNotFoundError:
return HttpResponseBadRequest()
action_state_info = {
'action': action_state.action,
'state': action_state.state,
'should_display': action_state.should_display
}
return JsonResponse(action_state_info)
def _dismiss_notification(request, course_action_state_id): # pylint: disable=unused-argument
"""
Update the display of the course notification
"""
try:
action_state = CourseRerunState.objects.find_first(id=course_action_state_id)
except CourseActionStateItemNotFoundError:
# Can't dismiss a notification that doesn't exist in the first place
return HttpResponseBadRequest()
if action_state.state == CourseRerunUIStateManager.State.FAILED:
# We remove all permissions for this course key at this time, since
# no further access is required to a course that failed to be created.
remove_all_instructors(action_state.course_key)
# The CourseRerunState is no longer needed by the UI; delete
action_state.delete()
return JsonResponse({'success': True})
# pylint: disable=unused-argument
@login_required
def course_handler(request, course_key_string=None):
"""
The restful handler for course specific requests.
It provides the course tree with the necessary information for identifying and labeling the parts. The root
will typically be a 'course' object but may not be especially as we support modules.
GET
html: return course listing page if not given a course id
html: return html page overview for the given course if given a course id
json: return json representing the course branch's index entry as well as dag w/ all of the children
replaced w/ json docs where each doc has {'_id': , 'display_name': , 'children': }
POST
json: create a course, return resulting json
descriptor (same as in GET course/...). Leaving off /branch/draft would imply create the course w/ default
branches. Cannot change the structure contents ('_id', 'display_name', 'children') but can change the
index entry.
PUT
json: update this course (index entry not xblock) such as repointing head, changing display name, org,
course, run. Return same json as above.
DELETE
json: delete this branch from this course (leaving off /branch/draft would imply delete the course)
"""
try:
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user, depth=None)
return JsonResponse(_course_outline_json(request, course_module))
elif request.method == 'POST': # not sure if this is only post. If one will have ids, it goes after access
return _create_or_rerun_course(request)
elif not has_course_access(request.user, CourseKey.from_string(course_key_string)):
raise PermissionDenied()
elif request.method == 'PUT':
raise NotImplementedError()
elif request.method == 'DELETE':
raise NotImplementedError()
else:
return HttpResponseBadRequest()
elif request.method == 'GET': # assume html
if course_key_string is None:
return course_listing(request)
else:
return course_index(request, CourseKey.from_string(course_key_string))
else:
return HttpResponseNotFound()
except InvalidKeyError:
raise Http404
@login_required
@ensure_csrf_cookie
@require_http_methods(["GET"])
def course_rerun_handler(request, course_key_string):
"""
The restful handler for course reruns.
GET
html: return html page with form to rerun a course for the given course id
"""
# Only global staff (PMs) are able to rerun courses during the soft launch
if not GlobalStaff().has_user(request.user):
raise PermissionDenied()
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user, depth=3)
if request.method == 'GET':
return render_to_response('course-create-rerun.html', {
'source_course_key': course_key,
'display_name': course_module.display_name,
'user': request.user,
'course_creator_status': _get_course_creator_status(request.user),
'allow_unicode_course_id': settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID', False)
})
def _course_outline_json(request, course_module):
"""
Returns a JSON representation of the course module and recursively all of its children.
"""
return create_xblock_info(
course_module,
include_child_info=True,
course_outline=True,
include_children_predicate=lambda xblock: not xblock.category == 'vertical'
)
def _accessible_courses_list(request):
"""
List all courses available to the logged in user by iterating through all the courses
"""
def course_filter(course):
"""
Filter out unusable and inaccessible courses
"""
if isinstance(course, ErrorDescriptor):
return False
# pylint: disable=fixme
# TODO remove this condition when templates purged from db
if course.location.course == 'templates':
return False
return has_course_access(request.user, course.id)
courses = filter(course_filter, modulestore().get_courses())
in_process_course_actions = [
course for course in
CourseRerunState.objects.find_all(
exclude_args={'state': CourseRerunUIStateManager.State.SUCCEEDED}, should_display=True
)
if has_course_access(request.user, course.course_key)
]
return courses, in_process_course_actions
def _accessible_courses_list_from_groups(request):
"""
List all courses available to the logged in user by reversing access group names
"""
courses_list = {}
in_process_course_actions = []
instructor_courses = UserBasedRole(request.user, CourseInstructorRole.ROLE).courses_with_role()
staff_courses = UserBasedRole(request.user, CourseStaffRole.ROLE).courses_with_role()
all_courses = instructor_courses | staff_courses
for course_access in all_courses:
course_key = course_access.course_id
if course_key is None:
# If the course_access does not have a course_id, it's an org-based role, so we fall back
raise AccessListFallback
if course_key not in courses_list:
# check for any course action state for this course
in_process_course_actions.extend(
CourseRerunState.objects.find_all(
exclude_args={'state': CourseRerunUIStateManager.State.SUCCEEDED},
should_display=True,
course_key=course_key,
)
)
# check for the course itself
try:
course = modulestore().get_course(course_key)
except ItemNotFoundError:
# If a user has access to a course that doesn't exist, don't do anything with that course
pass
if course is not None and not isinstance(course, ErrorDescriptor):
# ignore deleted or errored courses
courses_list[course_key] = course
return courses_list.values(), in_process_course_actions
@login_required
@ensure_csrf_cookie
def course_listing(request):
"""
List all courses available to the logged in user
Try to get all courses by first reversing django groups and fallback to old method if it fails
Note: overhead of pymongo reads will increase if getting courses from django groups fails
"""
if GlobalStaff().has_user(request.user):
# user has global access so no need to get courses from django groups
courses, in_process_course_actions = _accessible_courses_list(request)
else:
try:
courses, in_process_course_actions = _accessible_courses_list_from_groups(request)
except AccessListFallback:
# user have some old groups or there was some error getting courses from django groups
# so fallback to iterating through all courses
courses, in_process_course_actions = _accessible_courses_list(request)
def format_course_for_view(course):
"""
Return a dict of the data which the view requires for each course
"""
return {
'display_name': course.display_name,
'course_key': unicode(course.location.course_key),
'url': reverse_course_url('course_handler', course.id),
'lms_link': get_lms_link_for_item(course.location),
'rerun_link': _get_rerun_link_for_item(course.id),
'org': course.display_org_with_default,
'number': course.display_number_with_default,
'run': course.location.run
}
def format_in_process_course_view(uca):
"""
Return a dict of the data which the view requires for each unsucceeded course
"""
return {
'display_name': uca.display_name,
'course_key': unicode(uca.course_key),
'org': uca.course_key.org,
'number': uca.course_key.course,
'run': uca.course_key.run,
'is_failed': True if uca.state == CourseRerunUIStateManager.State.FAILED else False,
'is_in_progress': True if uca.state == CourseRerunUIStateManager.State.IN_PROGRESS else False,
'dismiss_link':
reverse_course_url('course_notifications_handler', uca.course_key, kwargs={
'action_state_id': uca.id,
}) if uca.state == CourseRerunUIStateManager.State.FAILED else ''
}
# remove any courses in courses that are also in the in_process_course_actions list
in_process_action_course_keys = [uca.course_key for uca in in_process_course_actions]
courses = [
format_course_for_view(c)
for c in courses
if not isinstance(c, ErrorDescriptor) and (c.id not in in_process_action_course_keys)
]
in_process_course_actions = [format_in_process_course_view(uca) for uca in in_process_course_actions]
return render_to_response('index.html', {
'courses': courses,
'in_process_course_actions': in_process_course_actions,
'user': request.user,
'request_course_creator_url': reverse('contentstore.views.request_course_creator'),
'course_creator_status': _get_course_creator_status(request.user),
'rerun_creator_status': GlobalStaff().has_user(request.user),
'allow_unicode_course_id': settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID', False),
'allow_course_reruns': settings.FEATURES.get('ALLOW_COURSE_RERUNS', False)
})
def _get_rerun_link_for_item(course_key):
""" Returns the rerun link for the given course key. """
return reverse_course_url('course_rerun_handler', course_key)
@login_required
@ensure_csrf_cookie
def course_index(request, course_key):
"""
Display an editable course overview.
org, course, name: Attributes of the Location for the item to edit
"""
# A depth of None implies the whole course. The course outline needs this in order to compute has_changes.
# A unit may not have a draft version, but one of its components could, and hence the unit itself has changes.
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user, depth=None)
lms_link = get_lms_link_for_item(course_module.location)
sections = course_module.get_children()
course_structure = _course_outline_json(request, course_module)
locator_to_show = request.REQUEST.get('show', None)
course_release_date = get_default_time_display(course_module.start) if course_module.start != DEFAULT_START_DATE else _("Unscheduled")
settings_url = reverse_course_url('settings_handler', course_key)
try:
current_action = CourseRerunState.objects.find_first(course_key=course_key, should_display=True)
except (ItemNotFoundError, CourseActionStateItemNotFoundError):
current_action = None
return render_to_response('course_outline.html', {
'context_course': course_module,
'lms_link': lms_link,
'sections': sections,
'course_structure': course_structure,
'initial_state': course_outline_initial_state(locator_to_show, course_structure) if locator_to_show else None,
'course_graders': json.dumps(
CourseGradingModel.fetch(course_key).graders
),
'rerun_notification_id': current_action.id if current_action else None,
'course_release_date': course_release_date,
'settings_url': settings_url,
'notification_dismiss_url':
reverse_course_url('course_notifications_handler', current_action.course_key, kwargs={
'action_state_id': current_action.id,
}) if current_action else None,
})
def course_outline_initial_state(locator_to_show, course_structure):
"""
Returns the desired initial state for the course outline view. If the 'show' request parameter
was provided, then the view's initial state will be to have the desired item fully expanded
and to scroll to see the new item.
"""
def find_xblock_info(xblock_info, locator):
"""
Finds the xblock info for the specified locator.
"""
if xblock_info['id'] == locator:
return xblock_info
children = xblock_info['child_info']['children'] if xblock_info.get('child_info', None) else None
if children:
for child_xblock_info in children:
result = find_xblock_info(child_xblock_info, locator)
if result:
return result
return None
def collect_all_locators(locators, xblock_info):
"""
Collect all the locators for an xblock and its children.
"""
locators.append(xblock_info['id'])
children = xblock_info['child_info']['children'] if xblock_info.get('child_info', None) else None
if children:
for child_xblock_info in children:
collect_all_locators(locators, child_xblock_info)
selected_xblock_info = find_xblock_info(course_structure, locator_to_show)
if not selected_xblock_info:
return None
expanded_locators = []
collect_all_locators(expanded_locators, selected_xblock_info)
return {
'locator_to_show': locator_to_show,
'expanded_locators': expanded_locators
}
@expect_json
def _create_or_rerun_course(request):
"""
To be called by requests that create a new destination course (i.e., create_new_course and rerun_course)
Returns the destination course_key and overriding fields for the new course.
Raises DuplicateCourseError and InvalidKeyError
"""
if not auth.has_access(request.user, CourseCreatorRole()):
raise PermissionDenied()
try:
org = request.json.get('org')
course = request.json.get('number', request.json.get('course'))
display_name = request.json.get('display_name')
# force the start date for reruns and allow us to override start via the client
start = request.json.get('start', CourseFields.start.default)
run = request.json.get('run')
# allow/disable unicode characters in course_id according to settings
if not settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID'):
if _has_non_ascii_characters(org) or _has_non_ascii_characters(course) or _has_non_ascii_characters(run):
return JsonResponse(
{'error': _('Special characters not allowed in organization, course number, and course run.')},
status=400
)
fields = {'start': start}
if display_name is not None:
fields['display_name'] = display_name
if 'source_course_key' in request.json:
return _rerun_course(request, org, course, run, fields)
else:
return _create_new_course(request, org, course, run, fields)
except DuplicateCourseError:
return JsonResponse({
'ErrMsg': _(
'There is already a course defined with the same '
'organization, course number, and course run. Please '
'change either organization or course number to be unique.'
),
'OrgErrMsg': _(
'Please change either the organization or '
'course number so that it is unique.'),
'CourseErrMsg': _(
'Please change either the organization or '
'course number so that it is unique.'),
})
except InvalidKeyError as error:
return JsonResponse({
"ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format(name=display_name, err=error.message)}
)
def _create_new_course(request, org, number, run, fields):
"""
Create a new course.
Returns the URL for the course overview page.
Raises DuplicateCourseError if the course already exists
"""
store_for_new_course = (
settings.FEATURES.get('DEFAULT_STORE_FOR_NEW_COURSE') or
modulestore().default_modulestore.get_modulestore_type()
)
new_course = create_new_course_in_store(store_for_new_course, request.user, org, number, run, fields)
return JsonResponse({
'url': reverse_course_url('course_handler', new_course.id),
'course_key': unicode(new_course.id),
})
def create_new_course_in_store(store, user, org, number, run, fields):
"""
Create course in store w/ handling instructor enrollment, permissions, and defaulting the wiki slug.
Separated out b/c command line course creation uses this as well as the web interface.
"""
# Set a unique wiki_slug for newly created courses. To maintain active wiki_slugs for
# existing xml courses this cannot be changed in CourseDescriptor.
# # TODO get rid of defining wiki slug in this org/course/run specific way and reconcile
# w/ xmodule.course_module.CourseDescriptor.__init__
wiki_slug = u"{0}.{1}.{2}".format(org, number, run)
definition_data = {'wiki_slug': wiki_slug}
fields.update(definition_data)
with modulestore().default_store(store):
# Creating the course raises DuplicateCourseError if an existing course with this org/name is found
new_course = modulestore().create_course(
org,
number,
run,
user.id,
fields=fields,
)
# Make sure user has instructor and staff access to the new course
add_instructor(new_course.id, user, user)
# Initialize permissions for user in the new course
initialize_permissions(new_course.id, user)
return new_course
def _rerun_course(request, org, number, run, fields):
"""
Reruns an existing course.
Returns the URL for the course listing page.
"""
source_course_key = CourseKey.from_string(request.json.get('source_course_key'))
# verify user has access to the original course
if not has_course_access(request.user, source_course_key):
raise PermissionDenied()
# create destination course key
store = modulestore()
with store.default_store('split'):
destination_course_key = store.make_course_key(org, number, run)
# verify org course and run don't already exist
if store.has_course(destination_course_key, ignore_case=True):
raise DuplicateCourseError(source_course_key, destination_course_key)
# Make sure user has instructor and staff access to the destination course
# so the user can see the updated status for that course
add_instructor(destination_course_key, request.user, request.user)
# Mark the action as initiated
CourseRerunState.objects.initiated(source_course_key, destination_course_key, request.user, fields['display_name'])
# Rerun the course as a new celery task
json_fields = json.dumps(fields, cls=EdxJSONEncoder)
rerun_course.delay(unicode(source_course_key), unicode(destination_course_key), request.user.id, json_fields)
# Return course listing page
return JsonResponse({
'url': reverse_url('course_handler'),
'destination_course_key': unicode(destination_course_key)
})
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
@require_http_methods(["GET"])
def course_info_handler(request, course_key_string):
"""
GET
html: return html for editing the course info handouts and updates.
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', 'text/html'):
return render_to_response(
'course_info.html',
{
'context_course': course_module,
'updates_url': reverse_course_url('course_info_update_handler', course_key),
'handouts_locator': course_key.make_usage_key('course_info', 'handouts'),
'base_asset_url': StaticContent.get_base_url_path_for_course_assets(course_module.id)
}
)
else:
return HttpResponseBadRequest("Only supports html requests")
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
@expect_json
def course_info_update_handler(request, course_key_string, provided_id=None):
"""
restful CRUD operations on course_info updates.
provided_id should be none if it's new (create) and index otherwise.
GET
json: return the course info update models
POST
json: create an update
PUT or DELETE
json: change an existing update
"""
if 'application/json' not in request.META.get('HTTP_ACCEPT', 'application/json'):
return HttpResponseBadRequest("Only supports json requests")
course_key = CourseKey.from_string(course_key_string)
usage_key = course_key.make_usage_key('course_info', 'updates')
if provided_id == '':
provided_id = None
# check that logged in user has permissions to this item (GET shouldn't require this level?)
if not has_course_access(request.user, usage_key.course_key):
raise PermissionDenied()
if request.method == 'GET':
course_updates = get_course_updates(usage_key, provided_id, request.user.id)
if isinstance(course_updates, dict) and course_updates.get('error'):
return JsonResponse(course_updates, course_updates.get('status', 400))
else:
return JsonResponse(course_updates)
elif request.method == 'DELETE':
try:
return JsonResponse(delete_course_update(usage_key, request.json, provided_id, request.user))
except:
return HttpResponseBadRequest(
"Failed to delete",
content_type="text/plain"
)
# can be either and sometimes django is rewriting one to the other:
elif request.method in ('POST', 'PUT'):
try:
return JsonResponse(update_course_updates(usage_key, request.json, provided_id, request.user))
except:
return HttpResponseBadRequest(
"Failed to save",
content_type="text/plain"
)
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "PUT", "POST"))
@expect_json
def settings_handler(request, course_key_string):
"""
Course settings for dates and about pages
GET
html: get the page
json: get the CourseDetails model
PUT
json: update the Course and About xblocks through the CourseDetails model
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
upload_asset_url = reverse_course_url('assets_handler', course_key)
# see if the ORG of this course can be attributed to a 'Microsite'. In that case, the
# course about page should be editable in Studio
about_page_editable = not microsite.get_value_for_org(
course_module.location.org,
'ENABLE_MKTG_SITE',
settings.FEATURES.get('ENABLE_MKTG_SITE', False)
)
short_description_editable = settings.FEATURES.get('EDITABLE_SHORT_DESCRIPTION', True)
return render_to_response('settings.html', {
'context_course': course_module,
'course_locator': course_key,
'lms_link_for_about_page': utils.get_lms_link_for_about_page(course_key),
'course_image_url': utils.course_image_url(course_module),
'details_url': reverse_course_url('settings_handler', course_key),
'about_page_editable': about_page_editable,
'short_description_editable': short_description_editable,
'upload_asset_url': upload_asset_url
})
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
return JsonResponse(
CourseDetails.fetch(course_key),
# encoder serializes dates, old locations, and instances
encoder=CourseSettingsEncoder
)
else: # post or put, doesn't matter.
return JsonResponse(
CourseDetails.update_from_json(course_key, request.json, request.user),
encoder=CourseSettingsEncoder
)
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
@expect_json
def grading_handler(request, course_key_string, grader_index=None):
"""
Course Grading policy configuration
GET
html: get the page
json no grader_index: get the CourseGrading model (graceperiod, cutoffs, and graders)
json w/ grader_index: get the specific grader
PUT
json no grader_index: update the Course through the CourseGrading model
json w/ grader_index: create or update the specific grader (create if index out of range)
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
course_details = CourseGradingModel.fetch(course_key)
return render_to_response('settings_graders.html', {
'context_course': course_module,
'course_locator': course_key,
'course_details': json.dumps(course_details, cls=CourseSettingsEncoder),
'grading_url': reverse_course_url('grading_handler', course_key),
})
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
if grader_index is None:
return JsonResponse(
CourseGradingModel.fetch(course_key),
# encoder serializes dates, old locations, and instances
encoder=CourseSettingsEncoder
)
else:
return JsonResponse(CourseGradingModel.fetch_grader(course_key, grader_index))
elif request.method in ('POST', 'PUT'): # post or put, doesn't matter.
# None implies update the whole model (cutoffs, graceperiod, and graders) not a specific grader
if grader_index is None:
return JsonResponse(
CourseGradingModel.update_from_json(course_key, request.json, request.user),
encoder=CourseSettingsEncoder
)
else:
return JsonResponse(
CourseGradingModel.update_grader_from_json(course_key, request.json, request.user)
)
elif request.method == "DELETE" and grader_index is not None:
CourseGradingModel.delete_grader(course_key, grader_index, request.user)
return JsonResponse()
# pylint: disable=invalid-name
def _config_course_advanced_components(request, course_module):
"""
Check to see if the user instantiated any advanced components. This
is a hack that does the following :
1) adds/removes the open ended panel tab to a course automatically
if the user has indicated that they want to edit the
combinedopendended or peergrading module
2) adds/removes the notes panel tab to a course automatically if
the user has indicated that they want the notes module enabled in
their course
"""
# TODO refactor the above into distinct advanced policy settings
filter_tabs = True # Exceptional conditions will pull this to False
if ADVANCED_COMPONENT_POLICY_KEY in request.json: # Maps tab types to components
tab_component_map = {
'open_ended': OPEN_ENDED_COMPONENT_TYPES,
'notes': NOTE_COMPONENT_TYPES,
}
# Check to see if the user instantiated any notes or open ended components
for tab_type in tab_component_map.keys():
component_types = tab_component_map.get(tab_type)
found_ac_type = False
for ac_type in component_types:
# Check if the user has incorrectly failed to put the value in an iterable.
new_advanced_component_list = request.json[ADVANCED_COMPONENT_POLICY_KEY]['value']
if hasattr(new_advanced_component_list, '__iter__'):
if ac_type in new_advanced_component_list and ac_type in ADVANCED_COMPONENT_TYPES:
# Add tab to the course if needed
changed, new_tabs = add_extra_panel_tab(tab_type, course_module)
# If a tab has been added to the course, then send the
# metadata along to CourseMetadata.update_from_json
if changed:
course_module.tabs = new_tabs
request.json.update({'tabs': {'value': new_tabs}})
# Indicate that tabs should not be filtered out of
# the metadata
filter_tabs = False # Set this flag to avoid the tab removal code below.
found_ac_type = True # break
else:
# If not iterable, return immediately and let validation handle.
return
# If we did not find a module type in the advanced settings,
# we may need to remove the tab from the course.
if not found_ac_type: # Remove tab from the course if needed
changed, new_tabs = remove_extra_panel_tab(tab_type, course_module)
if changed:
course_module.tabs = new_tabs
request.json.update({'tabs': {'value': new_tabs}})
# Indicate that tabs should *not* be filtered out of
# the metadata
filter_tabs = False
return filter_tabs
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT"))
@expect_json
def advanced_settings_handler(request, course_key_string):
"""
Course settings configuration
GET
html: get the page
json: get the model
PUT, POST
json: update the Course's settings. The payload is a json rep of the
metadata dicts.
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
return render_to_response('settings_advanced.html', {
'context_course': course_module,
'advanced_dict': json.dumps(CourseMetadata.fetch(course_module)),
'advanced_settings_url': reverse_course_url('advanced_settings_handler', course_key)
})
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
return JsonResponse(CourseMetadata.fetch(course_module))
else:
try:
# Whether or not to filter the tabs key out of the settings metadata
filter_tabs = _config_course_advanced_components(request, course_module)
# validate data formats and update
is_valid, errors, updated_data = CourseMetadata.validate_and_update_from_json(
course_module,
request.json,
filter_tabs=filter_tabs,
user=request.user,
)
if is_valid:
return JsonResponse(updated_data)
else:
return JsonResponseBadRequest(errors)
# Handle all errors that validation doesn't catch
except (TypeError, ValueError) as err:
return HttpResponseBadRequest(
django.utils.html.escape(err.message),
content_type="text/plain"
)
class TextbookValidationError(Exception):
"An error thrown when a textbook input is invalid"
pass
def validate_textbooks_json(text):
"""
Validate the given text as representing a single PDF textbook
"""
try:
textbooks = json.loads(text)
except ValueError:
raise TextbookValidationError("invalid JSON")
if not isinstance(textbooks, (list, tuple)):
raise TextbookValidationError("must be JSON list")
for textbook in textbooks:
validate_textbook_json(textbook)
# check specified IDs for uniqueness
all_ids = [textbook["id"] for textbook in textbooks if "id" in textbook]
unique_ids = set(all_ids)
if len(all_ids) > len(unique_ids):
raise TextbookValidationError("IDs must be unique")
return textbooks
def validate_textbook_json(textbook):
"""
Validate the given text as representing a list of PDF textbooks
"""
if isinstance(textbook, basestring):
try:
textbook = json.loads(textbook)
except ValueError:
raise TextbookValidationError("invalid JSON")
if not isinstance(textbook, dict):
raise TextbookValidationError("must be JSON object")
if not textbook.get("tab_title"):
raise TextbookValidationError("must have tab_title")
tid = unicode(textbook.get("id", ""))
if tid and not tid[0].isdigit():
raise TextbookValidationError("textbook ID must start with a digit")
return textbook
def assign_textbook_id(textbook, used_ids=()):
"""
Return an ID that can be assigned to a textbook
and doesn't match the used_ids
"""
tid = Location.clean(textbook["tab_title"])
if not tid[0].isdigit():
# stick a random digit in front
tid = random.choice(string.digits) + tid
while tid in used_ids:
# add a random ASCII character to the end
tid = tid + random.choice(string.ascii_lowercase)
return tid
@require_http_methods(("GET", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def textbooks_list_handler(request, course_key_string):
"""
A RESTful handler for textbook collections.
GET
html: return textbook list page (Backbone application)
json: return JSON representation of all textbooks in this course
POST
json: create a new textbook for this course
PUT
json: overwrite all textbooks in the course with the given list
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = _get_course_module(course_key, request.user)
if not "application/json" in request.META.get('HTTP_ACCEPT', 'text/html'):
# return HTML page
upload_asset_url = reverse_course_url('assets_handler', course_key)
textbook_url = reverse_course_url('textbooks_list_handler', course_key)
return render_to_response('textbooks.html', {
'context_course': course,
'textbooks': course.pdf_textbooks,
'upload_asset_url': upload_asset_url,
'textbook_url': textbook_url,
})
# from here on down, we know the client has requested JSON
if request.method == 'GET':
return JsonResponse(course.pdf_textbooks)
elif request.method == 'PUT':
try:
textbooks = validate_textbooks_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
tids = set(t["id"] for t in textbooks if "id" in t)
for textbook in textbooks:
if not "id" in textbook:
tid = assign_textbook_id(textbook, tids)
textbook["id"] = tid
tids.add(tid)
if not any(tab['type'] == PDFTextbookTabs.type for tab in course.tabs):
course.tabs.append(PDFTextbookTabs())
course.pdf_textbooks = textbooks
store.update_item(course, request.user.id)
return JsonResponse(course.pdf_textbooks)
elif request.method == 'POST':
# create a new textbook for the course
try:
textbook = validate_textbook_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
if not textbook.get("id"):
tids = set(t["id"] for t in course.pdf_textbooks if "id" in t)
textbook["id"] = assign_textbook_id(textbook, tids)
existing = course.pdf_textbooks
existing.append(textbook)
course.pdf_textbooks = existing
if not any(tab['type'] == PDFTextbookTabs.type for tab in course.tabs):
course.tabs.append(PDFTextbookTabs())
store.update_item(course, request.user.id)
resp = JsonResponse(textbook, status=201)
resp["Location"] = reverse_course_url(
'textbooks_detail_handler',
course.id,
kwargs={'textbook_id': textbook["id"]}
)
return resp
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
def textbooks_detail_handler(request, course_key_string, textbook_id):
"""
JSON API endpoint for manipulating a textbook via its internal ID.
Used by the Backbone application.
GET
json: return JSON representation of textbook
POST or PUT
json: update textbook based on provided information
DELETE
json: remove textbook
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user)
matching_id = [tb for tb in course_module.pdf_textbooks
if unicode(tb.get("id")) == unicode(textbook_id)]
if matching_id:
textbook = matching_id[0]
else:
textbook = None
if request.method == 'GET':
if not textbook:
return JsonResponse(status=404)
return JsonResponse(textbook)
elif request.method in ('POST', 'PUT'): # can be either and sometimes
# django is rewriting one to the other
try:
new_textbook = validate_textbook_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
new_textbook["id"] = textbook_id
if textbook:
i = course_module.pdf_textbooks.index(textbook)
new_textbooks = course_module.pdf_textbooks[0:i]
new_textbooks.append(new_textbook)
new_textbooks.extend(course_module.pdf_textbooks[i + 1:])
course_module.pdf_textbooks = new_textbooks
else:
course_module.pdf_textbooks.append(new_textbook)
store.update_item(course_module, request.user.id)
return JsonResponse(new_textbook, status=201)
elif request.method == 'DELETE':
if not textbook:
return JsonResponse(status=404)
i = course_module.pdf_textbooks.index(textbook)
remaining_textbooks = course_module.pdf_textbooks[0:i]
remaining_textbooks.extend(course_module.pdf_textbooks[i + 1:])
course_module.pdf_textbooks = remaining_textbooks
store.update_item(course_module, request.user.id)
return JsonResponse()
class GroupConfigurationsValidationError(Exception):
"""
An error thrown when a group configurations input is invalid.
"""
pass
class GroupConfiguration(object):
"""
Prepare Group Configuration for the course.
"""
def __init__(self, json_string, course, configuration_id=None):
"""
Receive group configuration as a json (`json_string`), deserialize it
and validate.
"""
self.configuration = GroupConfiguration.parse(json_string)
self.course = course
self.assign_id(configuration_id)
self.assign_group_ids()
self.validate()
@staticmethod
def parse(json_string):
"""
Deserialize given json that represents group configuration.
"""
try:
configuration = json.loads(json_string)
except ValueError:
raise GroupConfigurationsValidationError(_("invalid JSON"))
return configuration
def validate(self):
"""
Validate group configuration representation.
"""
if not self.configuration.get("name"):
raise GroupConfigurationsValidationError(_("must have name of the configuration"))
if len(self.configuration.get('groups', [])) < 1:
raise GroupConfigurationsValidationError(_("must have at least one group"))
def generate_id(self, used_ids):
"""
Generate unique id for the group configuration.
If this id is already used, we generate new one.
"""
cid = random.randint(100, 10 ** 12)
while cid in used_ids:
cid = random.randint(100, 10 ** 12)
return cid
def assign_id(self, configuration_id=None):
"""
Assign id for the json representation of group configuration.
"""
self.configuration['id'] = int(configuration_id) if configuration_id else self.generate_id(self.get_used_ids())
def assign_group_ids(self):
"""
Assign ids for the group_configuration's groups.
"""
used_ids = [g.id for p in self.course.user_partitions for g in p.groups]
# Assign ids to every group in configuration.
for group in self.configuration.get('groups', []):
if group.get('id') is None:
group["id"] = self.generate_id(used_ids)
used_ids.append(group["id"])
def get_used_ids(self):
"""
Return a list of IDs that already in use.
"""
return set([p.id for p in self.course.user_partitions])
def get_user_partition(self):
"""
Get user partition for saving in course.
"""
groups = [Group(g["id"], g["name"]) for g in self.configuration["groups"]]
return UserPartition(
self.configuration["id"],
self.configuration["name"],
self.configuration["description"],
groups
)
@staticmethod
def get_usage_info(course, store):
"""
Get usage information for all Group Configurations.
"""
split_tests = store.get_items(course.id, qualifiers={'category': 'split_test'})
return GroupConfiguration._get_usage_info(store, course, split_tests)
@staticmethod
def add_usage_info(course, store):
"""
Add usage information to group configurations jsons in course.
Returns json of group configurations updated with usage information.
"""
usage_info = GroupConfiguration.get_usage_info(course, store)
configurations = []
for partition in course.user_partitions:
configuration = partition.to_json()
configuration['usage'] = usage_info.get(partition.id, [])
configurations.append(configuration)
return configurations
@staticmethod
def _get_usage_info(store, course, split_tests):
"""
Returns all units names, their urls and validation messages.
Returns:
{'user_partition_id':
[
{
'label': 'Unit 1 / Experiment 1',
'url': 'url_to_unit_1',
'validation': {'message': 'a validation message', 'type': 'warning'}
},
{
'label': 'Unit 2 / Experiment 2',
'url': 'url_to_unit_2',
'validation': {'message': 'another validation message', 'type': 'error'}
}
],
}
"""
usage_info = {}
for split_test in split_tests:
if split_test.user_partition_id not in usage_info:
usage_info[split_test.user_partition_id] = []
unit_location = store.get_parent_location(split_test.location)
if not unit_location:
log.warning("Parent location of split_test module not found: %s", split_test.location)
continue
try:
unit = store.get_item(unit_location)
except ItemNotFoundError:
log.warning("Unit not found: %s", unit_location)
continue
unit_url = reverse_usage_url(
'container_handler',
course.location.course_key.make_usage_key(unit.location.block_type, unit.location.name)
)
usage_info[split_test.user_partition_id].append({
'label': '{} / {}'.format(unit.display_name, split_test.display_name),
'url': unit_url,
'validation': split_test.general_validation_message,
})
return usage_info
@staticmethod
def update_usage_info(store, course, configuration):
"""
Update usage information for particular Group Configuration.
Returns json of particular group configuration updated with usage information.
"""
# Get all Experiments that use particular Group Configuration in course.
split_tests = store.get_items(
course.id,
category='split_test',
content={'user_partition_id': configuration.id}
)
configuration_json = configuration.to_json()
usage_information = GroupConfiguration._get_usage_info(store, course, split_tests)
configuration_json['usage'] = usage_information.get(configuration.id, [])
return configuration_json
@require_http_methods(("GET", "POST"))
@login_required
@ensure_csrf_cookie
def group_configurations_list_handler(request, course_key_string):
"""
A RESTful handler for Group Configurations
GET
html: return Group Configurations list page (Backbone application)
POST
json: create new group configuration
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = _get_course_module(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', 'text/html'):
group_configuration_url = reverse_course_url('group_configurations_list_handler', course_key)
course_outline_url = reverse_course_url('course_handler', course_key)
split_test_enabled = SPLIT_TEST_COMPONENT_TYPE in ADVANCED_COMPONENT_TYPES and SPLIT_TEST_COMPONENT_TYPE in course.advanced_modules
configurations = GroupConfiguration.add_usage_info(course, store)
return render_to_response('group_configurations.html', {
'context_course': course,
'group_configuration_url': group_configuration_url,
'course_outline_url': course_outline_url,
'configurations': configurations if split_test_enabled else None,
})
elif "application/json" in request.META.get('HTTP_ACCEPT'):
if request.method == 'POST':
# create a new group configuration for the course
try:
new_configuration = GroupConfiguration(request.body, course).get_user_partition()
except GroupConfigurationsValidationError as err:
return JsonResponse({"error": err.message}, status=400)
course.user_partitions.append(new_configuration)
response = JsonResponse(new_configuration.to_json(), status=201)
response["Location"] = reverse_course_url(
'group_configurations_detail_handler',
course.id,
kwargs={'group_configuration_id': new_configuration.id} # pylint: disable=no-member
)
store.update_item(course, request.user.id)
return response
else:
return HttpResponse(status=406)
@login_required
@ensure_csrf_cookie
@require_http_methods(("POST", "PUT", "DELETE"))
def group_configurations_detail_handler(request, course_key_string, group_configuration_id):
"""
JSON API endpoint for manipulating a group configuration via its internal ID.
Used by the Backbone application.
POST or PUT
json: update group configuration based on provided information
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = _get_course_module(course_key, request.user)
matching_id = [p for p in course.user_partitions
if unicode(p.id) == unicode(group_configuration_id)]
if matching_id:
configuration = matching_id[0]
else:
configuration = None
if request.method in ('POST', 'PUT'): # can be either and sometimes
# django is rewriting one to the other
try:
new_configuration = GroupConfiguration(request.body, course, group_configuration_id).get_user_partition()
except GroupConfigurationsValidationError as err:
return JsonResponse({"error": err.message}, status=400)
if configuration:
index = course.user_partitions.index(configuration)
course.user_partitions[index] = new_configuration
else:
course.user_partitions.append(new_configuration)
store.update_item(course, request.user.id)
configuration = GroupConfiguration.update_usage_info(store, course, new_configuration)
return JsonResponse(configuration, status=201)
elif request.method == "DELETE":
if not configuration:
return JsonResponse(status=404)
# Verify that group configuration is not already in use.
usages = GroupConfiguration.get_usage_info(course, store)
if usages.get(int(group_configuration_id)):
return JsonResponse(
{"error": _("This Group Configuration is already in use and cannot be removed.")},
status=400
)
index = course.user_partitions.index(configuration)
course.user_partitions.pop(index)
store.update_item(course, request.user.id)
return JsonResponse(status=204)
def _get_course_creator_status(user):
"""
Helper method for returning the course creator status for a particular user,
taking into account the values of DISABLE_COURSE_CREATION and ENABLE_CREATOR_GROUP.
If the user passed in has not previously visited the index page, it will be
added with status 'unrequested' if the course creator group is in use.
"""
if user.is_staff:
course_creator_status = 'granted'
elif settings.FEATURES.get('DISABLE_COURSE_CREATION', False):
course_creator_status = 'disallowed_for_this_site'
elif settings.FEATURES.get('ENABLE_CREATOR_GROUP', False):
course_creator_status = get_course_creator_status(user)
if course_creator_status is None:
# User not grandfathered in as an existing user, has not previously visited the dashboard page.
# Add the user to the course creator admin table with status 'unrequested'.
add_user_with_status_unrequested(user)
course_creator_status = get_course_creator_status(user)
else:
course_creator_status = 'granted'
return course_creator_status
|
c0710204/edx-platform
|
cms/djangoapps/contentstore/views/course.py
|
Python
|
agpl-3.0
| 60,590
|
# This file is part of beets.
# Copyright 2015, Fabrice Laporte.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Provides the %bucket{} function for path formatting.
"""
from datetime import datetime
import re
import string
from itertools import tee, izip
from beets import plugins, ui
class BucketError(Exception):
pass
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return izip(a, b)
def span_from_str(span_str):
"""Build a span dict from the span string representation.
"""
def normalize_year(d, yearfrom):
"""Convert string to a 4 digits year
"""
if yearfrom < 100:
raise BucketError("%d must be expressed on 4 digits" % yearfrom)
# if two digits only, pick closest year that ends by these two
# digits starting from yearfrom
if d < 100:
if (d % 100) < (yearfrom % 100):
d = (yearfrom - yearfrom % 100) + 100 + d
else:
d = (yearfrom - yearfrom % 100) + d
return d
years = [int(x) for x in re.findall('\d+', span_str)]
if not years:
raise ui.UserError("invalid range defined for year bucket '%s': no "
"year found" % span_str)
try:
years = [normalize_year(x, years[0]) for x in years]
except BucketError as exc:
raise ui.UserError("invalid range defined for year bucket '%s': %s" %
(span_str, exc))
res = {'from': years[0], 'str': span_str}
if len(years) > 1:
res['to'] = years[-1]
return res
def complete_year_spans(spans):
"""Set the `to` value of spans if empty and sort them chronologically.
"""
spans.sort(key=lambda x: x['from'])
for (x, y) in pairwise(spans):
if 'to' not in x:
x['to'] = y['from'] - 1
if spans and 'to' not in spans[-1]:
spans[-1]['to'] = datetime.now().year
def extend_year_spans(spans, spanlen, start=1900, end=2014):
"""Add new spans to given spans list so that every year of [start,end]
belongs to a span.
"""
extended_spans = spans[:]
for (x, y) in pairwise(spans):
# if a gap between two spans, fill the gap with as much spans of
# spanlen length as necessary
for span_from in range(x['to'] + 1, y['from'], spanlen):
extended_spans.append({'from': span_from})
# Create spans prior to declared ones
for span_from in range(spans[0]['from'] - spanlen, start, -spanlen):
extended_spans.append({'from': span_from})
# Create spans after the declared ones
for span_from in range(spans[-1]['to'] + 1, end, spanlen):
extended_spans.append({'from': span_from})
complete_year_spans(extended_spans)
return extended_spans
def build_year_spans(year_spans_str):
"""Build a chronologically ordered list of spans dict from unordered spans
stringlist.
"""
spans = []
for elem in year_spans_str:
spans.append(span_from_str(elem))
complete_year_spans(spans)
return spans
def str2fmt(s):
"""Deduces formatting syntax from a span string.
"""
regex = re.compile("(?P<bef>\D*)(?P<fromyear>\d+)(?P<sep>\D*)"
"(?P<toyear>\d*)(?P<after>\D*)")
m = re.match(regex, s)
res = {'fromnchars': len(m.group('fromyear')),
'tonchars': len(m.group('toyear'))}
res['fmt'] = "%s%%s%s%s%s" % (m.group('bef'),
m.group('sep'),
'%s' if res['tonchars'] else '',
m.group('after'))
return res
def format_span(fmt, yearfrom, yearto, fromnchars, tonchars):
"""Return a span string representation.
"""
args = (str(yearfrom)[-fromnchars:])
if tonchars:
args = (str(yearfrom)[-fromnchars:], str(yearto)[-tonchars:])
return fmt % args
def extract_modes(spans):
"""Extract the most common spans lengths and representation formats
"""
rangelen = sorted([x['to'] - x['from'] + 1 for x in spans])
deflen = sorted(rangelen, key=rangelen.count)[-1]
reprs = [str2fmt(x['str']) for x in spans]
deffmt = sorted(reprs, key=reprs.count)[-1]
return deflen, deffmt
def build_alpha_spans(alpha_spans_str, alpha_regexs):
"""Extract alphanumerics from string and return sorted list of chars
[from...to]
"""
spans = []
ASCII_DIGITS = string.digits + string.ascii_lowercase
for elem in alpha_spans_str:
if elem in alpha_regexs:
spans.append(re.compile(alpha_regexs[elem]))
else:
bucket = sorted([x for x in elem.lower() if x.isalnum()])
if bucket:
beginIdx = ASCII_DIGITS.index(bucket[0])
endIdx = ASCII_DIGITS.index(bucket[-1])
else:
raise ui.UserError("invalid range defined for alpha bucket "
"'%s': no alphanumeric character found" %
elem)
spans.append(
re.compile(
"^[" + ASCII_DIGITS[beginIdx:endIdx + 1] +
ASCII_DIGITS[beginIdx:endIdx + 1].upper() + "]"
)
)
return spans
class BucketPlugin(plugins.BeetsPlugin):
def __init__(self):
super(BucketPlugin, self).__init__()
self.template_funcs['bucket'] = self._tmpl_bucket
self.config.add({
'bucket_year': [],
'bucket_alpha': [],
'bucket_alpha_regex': {},
'extrapolate': False
})
self.setup()
def setup(self):
"""Setup plugin from config options
"""
self.year_spans = build_year_spans(self.config['bucket_year'].get())
if self.year_spans and self.config['extrapolate']:
[self.ys_len_mode,
self.ys_repr_mode] = extract_modes(self.year_spans)
self.year_spans = extend_year_spans(self.year_spans,
self.ys_len_mode)
self.alpha_spans = build_alpha_spans(
self.config['bucket_alpha'].get(),
self.config['bucket_alpha_regex'].get()
)
def find_bucket_year(self, year):
"""Return bucket that matches given year or return the year
if no matching bucket.
"""
for ys in self.year_spans:
if ys['from'] <= int(year) <= ys['to']:
if 'str' in ys:
return ys['str']
else:
return format_span(self.ys_repr_mode['fmt'],
ys['from'], ys['to'],
self.ys_repr_mode['fromnchars'],
self.ys_repr_mode['tonchars'])
return year
def find_bucket_alpha(self, s):
"""Return alpha-range bucket that matches given string or return the
string initial if no matching bucket.
"""
for (i, span) in enumerate(self.alpha_spans):
if span.match(s):
return self.config['bucket_alpha'].get()[i]
return s[0].upper()
def _tmpl_bucket(self, text, field=None):
if not field and len(text) == 4 and text.isdigit():
field = 'year'
if field == 'year':
func = self.find_bucket_year
else:
func = self.find_bucket_alpha
return func(text)
|
andremiller/beets
|
beetsplug/bucket.py
|
Python
|
mit
| 8,043
|
import webapp2
class Root(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.write('Hello, World!\n')
class Test(webapp2.RequestHandler):
def get(self):
self.response.write('foo was set to %s' % self.request.get("foo"))
app = webapp2.WSGIApplication([
('/', Root),
('/test', Test),
], debug=True)
|
step15/gae-examples
|
pytest/test.py
|
Python
|
mit
| 396
|
#!/usr/bin/env python
'''
this script is a tool to clean and process data from logs from stdout
in order to import it to a matlab variable
and save the data to separate files (death.txt, fitness.txt, etc.)
finds the lowest number of columns within run attributes (death, fitness, population)
and clips other attributes columns to this one
'''
import sys
import os
import log_to_files
from collections import defaultdict
attrs1 = ['fitness', 'population']
attrs2 = ['fight', 'reproduction', 'death', 'migration']
def parse_dir(directory, old_format=False):
logfiles = [os.path.join(directory,name) for name in os.listdir(directory) if name.startswith(proj) and not name.endswith('_run')]
stats = defaultdict(list)
for logfile in logfiles:
lines = log_to_files.read_zeus_lines(logfile)
old_format = log_to_files.is_format_old(lines)
# print logfile, len(lines)
if old_format:
attrs = attrs1 + attrs2
for attr in attrs:
data = [line.split(':')[1].strip() for line in lines if line.startswith(attr)]
if data:
# print attr, len(data)
stats[attr].append(data)
else:
for attr in attrs1:
isl_dict = defaultdict(list)
data = [line.split(' ') for line in lines if line.startswith(attr)]
for line in data:
isl_dict[line[1]].append(line[2])
vals = zip(*isl_dict.values())
if attr == 'fitness':
data2 = [max(tup) for tup in vals]
elif attr == 'population':
data2 = [sum(tup) for tup in vals]
if data2:
# write_stats_to_file(out_dir, attr, data2)
stats[attr].append(data2)
for attr in attrs2:
# data = [float(line.split(' ')[-1].strip()) for line in lines if line.startswith(attr)]
data = [line.split(' ')[-1].strip() for line in lines if line.startswith(attr)]
# print attr, data
if data:
# write_stats_to_file(out_dir, attr, data)
stats[attr].append(data)
# find minimal number of columns in a run
mincols = min(min([len(elem) for elem in columns]) for columns in stats.values())
print mincols
for key, lists in stats.items():
print [len(elem) for elem in lists]
minlists = [elem[:mincols] for elem in lists]
# keylists2 =
print key, 'raw:', [len(elem) for elem in lists]
# print 'mincols:', mincols
print key, 'cleaned:', [len(elem) for elem in minlists]
stats[key] = [' '.join(elem) for elem in minlists]
log_to_files.write_stats_to_file(directory, key, stats[key])
print 'merged', len(logfiles), 'logfiles (rows) - ', mincols, 'columns'
if __name__ == '__main__':
proj = 'emas'
if len(sys.argv) < 2:
print 'Usage:'
print '\tpython log_to_matlab.py <directory_with_logfiles> ...'
else:
directories = sys.argv[1:]
for directory in directories:
print 'merging logs from:', directory #,' <- ', os.listdir(directory)
parse_dir(directory)
|
ParaPhraseAGH/erlang-emas
|
scripts/logs_to_matlab.py
|
Python
|
mit
| 3,200
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 ~ 2013 Deepin, Inc.
# 2011 ~ 2013 Hou ShaoHui
#
# Author: Hou ShaoHui <houshao55@gmail.com>
# Maintainer: Hou ShaoHui <houshao55@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import gtk
import cairo
import webkit
import webbrowser
import javascriptcore as jscore
from sina import Sina
from utils import parse_sina_datetime, get_parent_dir
from constant import MAIN_WINDOW_WIDTH, MAIN_WINDOW_HEIGHT
static_dir = os.path.join(get_parent_dir(__file__, 2), "static", "html")
class PopupWindow(gtk.Window):
def __init__(self):
gtk.Window.__init__(self)
self.set_position(gtk.WIN_POS_CENTER)
self.set_colormap(gtk.gdk.Screen().get_rgba_colormap())
self.set_size_request(MAIN_WINDOW_WIDTH, MAIN_WINDOW_HEIGHT)
self.connect("destroy", gtk.main_quit)
# self.connect("expose-event", self.on_expose_event)
self.sina = Sina()
self.webview = webkit.WebView()
self.webview.set_transparent(True)
print "file://%s" % os.path.join(static_dir, "timeline.html")
self.webview.open("file://%s" % os.path.join(static_dir, "timeline.html"))
self.webview.connect('new-window-policy-decision-requested', self.navigation_request_cb)
self.webview.connect('navigation-policy-decision-requested', self.navigation_request_cb)
self.webview.connect("load-finished", lambda w, e: self.on_button_clicked(w))
scrolled_window = gtk.ScrolledWindow()
scrolled_window.add(self.webview)
vbox = gtk.VBox()
button = gtk.Button("更多")
button.connect("clicked", self.on_button_clicked)
vbox.pack_start(scrolled_window, True, True)
vbox.pack_start(button, False, False)
self.count = 20
self.js_context = jscore.JSContext(self.webview.get_main_frame().get_global_context()).globalObject
self.add(vbox)
self.show_all()
gtk.main()
def navigation_request_cb(self, view, frame, request, action, decision):
""" Handle clicks on links, etc. """
uri = request.get_uri()
webbrowser.open(uri)
return True
def on_button_clicked(self, widget):
timeline = self.sina.GET_statuses__home_timeline(count=self.count, page=1)
statuses = timeline.get("statuses", [])
if len(statuses) > 0:
for message in statuses:
message["created_at"] = parse_sina_datetime((message.get("created_at")))
self.js_context.hello(statuses)
self.count += 20
def on_expose_event(self, widget, event):
cr = widget.window.cairo_create()
rect = widget.allocation
# Clear color to transparent window.
if self.is_composited():
cr.rectangle(*rect)
cr.set_source_rgba(1, 1, 1, 0.0)
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.paint()
else:
cr.rectangle(rect.x, rect.y, rect.width, rect.height)
cr.set_operator(cairo.OPERATOR_SOURCE)
cr.set_source_rgb(0.9, 0.9, 0.9)
cr.fill()
PopupWindow()
|
lovesnow/weido
|
src/weido.py
|
Python
|
gpl-3.0
| 3,943
|
"""
mbus for python
"""
|
Cougar/python-mbus
|
mbus/__init__.py
|
Python
|
bsd-3-clause
| 24
|
__author__ = 'Denis Mikhalkin'
from engine.handlers import SQSHandler
from engine import Engine, ResourceCondition, Resource, EventCondition
import logging
from boto import sqs
import threading
from time import sleep
import unittest
class TestSQSRepository(unittest.TestCase):
def test(self):
logging.basicConfig()
logging.root.setLevel(logging.INFO)
# engine = Engine({"aws_properties": {"profile_name":"pam"}, "repositoryPath":"/home/denismo/Documents/WS/DevOpsGears/repositories/sqsRepository"})
engine = Engine({"aws_properties": {"profile_name":"pam"}, "repositoryPath":"E:\\WS\\Python\\DevOpsGears\\repositories\\sqsRepository"})
engine.start()
condition = threading.Condition()
conn = sqs.connect_to_region("ap-southeast-2")
queue = conn.lookup("testqueue")
queue.write(queue.new_message("test"))
with condition:
condition.wait(120)
print "Finished"
if __name__ == '__main__':
unittest.main()
|
denismo/DevOpsGears
|
tests/testSQSRepository.py
|
Python
|
gpl-3.0
| 1,014
|
# -*-coding:utf-8 -*
# Copyright (c) 2011-2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Adding and Removing elements from domain testcases
List of tested functions :
--------------------------
- [listDomainElements] function
- [addElement] function
- [removeElement] function
Test cases :
------------
- Testing nominal case
- Testing addElement errors
- Testing removeElement errors
"""
import os
from Util.PfwUnitTestLib import PfwTestCase
from Util import ACTLogging
log=ACTLogging.Logger()
class TestCases(PfwTestCase):
def setUp(self):
self.pfw.sendCmd("setTuningMode", "on")
self.domain_name = "Domain_0"
self.elem_0_path = "/Test/Test/TEST_DIR"
self.elem_1_path = "/Test/Test/TEST_DOMAIN_0"
self.elem_2_path = "/Test/Test/TEST_DOMAIN_1"
def tearDown(self):
self.pfw.sendCmd("setTuningMode", "off")
def test_Nominal_Case(self):
"""
Testing nominal case
--------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- list and backup initial domain elements
- add a domain element
- remove a domain element
- list and check domains elements
Tested commands :
~~~~~~~~~~~~~~~~~
- [listDomainElements] function
- [addElement] function
- [removeElement] function
Expected result :
~~~~~~~~~~~~~~~~~
- all operations succeed
"""
log.D(self.test_Nominal_Case.__doc__)
# List and backup initial domain elements
log.I("Listing initial domain %s elements" % (self.domain_name))
out, err = self.pfw.sendCmd("listDomainElements",str(self.domain_name))
assert err == None, "ERROR : command [listDomainElements] - Error while listing domain elements"
f_DomainElements_Backup = open("f_DomainElements_Backup", "w")
f_DomainElements_Backup.write(out)
f_DomainElements_Backup.close()
log.I("command [listDomainElements] correctly executed")
f_DomainElements_Backup = open("f_DomainElements_Backup", "r")
element_nbr_init = 0
line=f_DomainElements_Backup.readline()
while line!="":
line=f_DomainElements_Backup.readline()
element_nbr_init+=1
f_DomainElements_Backup.close()
log.I("Actual domain %s elements number is %s" % (self.domain_name,element_nbr_init))
# Adding a new domain element
log.I("Adding a new domain element to domain %s" % (self.domain_name))
out, err = self.pfw.sendCmd("addElement", str(self.domain_name), str(self.elem_1_path))
assert err == None, "ERROR : command [addElement] - Error while adding new domain element %s" % (self.elem_1_path)
assert out == "Done", "ERROR : command [addElement] - Error while adding new domain element %s" % (self.elem_1_path)
log.I("Adding a new domain element to domain %s" % (self.domain_name))
out, err = self.pfw.sendCmd("addElement", str(self.domain_name), str(self.elem_2_path))
assert err == None, "ERROR : command [addElement] - Error while adding new domain element %s" % (self.elem_2_path)
assert out == "Done", "ERROR : command [addElement] - Error while adding new domain element %s" % (self.elem_2_path)
log.I("New domain elements %s and %s added to domain %s" % (self.elem_1_path, self.elem_2_path, self.domain_name))
# Removing a domain element
log.I("Removing domain element %s from domain %s" % (self.elem_1_path,self.domain_name))
out, err = self.pfw.sendCmd("removeElement", str(self.domain_name), str(self.elem_1_path))
assert err == None, "ERROR : command [removeElement] - Error while removing domain element %s" % (self.elem_1_path)
assert out == "Done", "ERROR : command [removeElement] - Error while removing domain element %s" % (self.elem_1_path)
# Checking final domain elements
log.I("Listing final domain %s elements" % (self.domain_name))
out, err = self.pfw.sendCmd("listDomainElements",str(self.domain_name))
assert err == None, "ERROR : command [listDomainElements] - Error while listing domain elements"
f_DomainElements = open("f_DomainElements", "w")
f_DomainElements.write(out)
f_DomainElements.close()
log.I("command [listDomainElements] correctly executed")
f_DomainElements = open("f_DomainElements", "r")
element_nbr = 0
line=f_DomainElements.readline()
while line!="":
line=f_DomainElements.readline()
element_nbr+=1
f_DomainElements.close()
log.I("Actual domain %s elements number is %s" % (self.domain_name,element_nbr))
log.I("Checking domain %s elements names conformity" % (self.domain_name))
f_DomainElements = open("f_DomainElements", "r")
f_DomainElements_Backup = open("f_DomainElements_Backup", "r")
for line in range(element_nbr):
# initial domain elements shall not have been impacted by current test
if (line < element_nbr_init):
element_name = f_DomainElements.readline().strip('\n')
element_name_backup = f_DomainElements_Backup.readline().strip('\n')
assert element_name==element_name_backup, "ERROR : Error while modifying domain elements on domain %s" % (self.domain_name)
# last listed element shall be equal to the only one element added previously
else:
element_name = f_DomainElements.readline().strip('\n')
assert element_name==str(self.elem_2_path), "ERROR : Error while modifying domain elements on domain %s" % (self.domain_name)
log.I("Actual domain %s elements names conform to expected values" % (self.domain_name))
# Temporary files deletion
f_DomainElements.close()
f_DomainElements_Backup.close()
os.remove("f_DomainElements_Backup")
os.remove("f_DomainElements")
# Removing created domain element
out, err = self.pfw.sendCmd("removeElement", str(self.domain_name), str(self.elem_2_path))
assert err == None, "ERROR : command [removeElement] - Error while removing domain element %s" % (self.elem_2_path)
assert out == "Done", "ERROR : command [removeElement] - Error while removing domain element %s" % (self.elem_2_path)
def test_addElement_Error(self):
"""
Testing addElement error
------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- add an already existing domain element
- add a non defined domain element
Tested commands :
~~~~~~~~~~~~~~~~~
- [addElement] function
- [listDomainElements] function
Expected result :
~~~~~~~~~~~~~~~~~
- Errors correctly detected
- No side effect
"""
log.D(self.test_addElement_Error.__doc__)
# List and backup initial domain elements
log.I("Listing initial domain %s elements" % (self.domain_name))
out, err = self.pfw.sendCmd("listDomainElements",str(self.domain_name))
assert err == None, "ERROR : command [listDomainElements] - Error while listing domain elements"
f_DomainElements_Backup = open("f_DomainElements_Backup", "w")
f_DomainElements_Backup.write(out)
f_DomainElements_Backup.close()
log.I("command [listDomainElements] correctly executed")
f_DomainElements_Backup = open("f_DomainElements_Backup", "r")
element_nbr_init = 0
line=f_DomainElements_Backup.readline()
while line!="":
line=f_DomainElements_Backup.readline()
element_nbr_init+=1
f_DomainElements_Backup.close()
log.I("Actual domain %s elements number is %s" % (self.domain_name,element_nbr_init))
# Adding a new domain element errors
log.I("Adding an already existing domain element to domain %s" % (self.domain_name))
out, err = self.pfw.sendCmd("addElement", str(self.domain_name), str(self.elem_0_path))
assert err == None, "ERROR : command [addElement] - Error while adding new domain element %s" % (self.elem_0_path)
assert out != "Done", "ERROR : command [addElement] - Error not detected while adding an already existing domain element to domain %s" % (self.domain_name)
log.I("Adding a non defined domain element to domain %s" % (self.domain_name))
out, err = self.pfw.sendCmd("addElement", str(self.domain_name), "Non_Defined_Element")
assert err == None, "ERROR : command [addElement] - Error while adding new domain element %s" % (self.elem_2_path)
assert out != "Done", "ERROR : command [addElement] - Error not detected while adding a non defined domain element to domain %s" % (self.domain_name)
log.I("Error when adding elements correctly detected")
# Checking final domain elements
log.I("Listing final domain %s elements" % (self.domain_name))
out, err = self.pfw.sendCmd("listDomainElements",str(self.domain_name))
assert err == None, "ERROR : command [listDomainElements] - Error while listing domain elements"
f_DomainElements = open("f_DomainElements", "w")
f_DomainElements.write(out)
f_DomainElements.close()
log.I("command [listDomainElements] correctly executed")
f_DomainElements = open("f_DomainElements", "r")
element_nbr = 0
line=f_DomainElements.readline()
while line!="":
line=f_DomainElements.readline()
element_nbr+=1
f_DomainElements.close()
log.I("Actual domain %s elements number is %s" % (self.domain_name,element_nbr))
log.I("Checking domain %s elements names conformity" % (self.domain_name))
f_DomainElements = open("f_DomainElements", "r")
f_DomainElements_Backup = open("f_DomainElements_Backup", "r")
for line in range(element_nbr):
# initial domain elements shall not have been impacted by current test
element_name = f_DomainElements.readline().strip('\n')
element_name_backup = f_DomainElements_Backup.readline().strip('\n')
assert element_name==element_name_backup, "ERROR : domain %s elements affected by addElement errors" % (self.domain_name)
log.I("Actual domain %s elements names conform to expected values" % (self.domain_name))
# Temporary files deletion
f_DomainElements.close()
f_DomainElements_Backup.close()
os.remove("f_DomainElements_Backup")
os.remove("f_DomainElements")
def test_removeElement_Error(self):
"""
Testing removeElement error
---------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- remove a non defined domain element
- remove a domain element on a wrong domain name
Tested commands :
~~~~~~~~~~~~~~~~~
- [removeElement] function
- [listDomainElements] function
Expected result :
~~~~~~~~~~~~~~~~~
- Errors correctly detected
- No side effect
"""
log.D(self.test_removeElement_Error.__doc__)
# List and backup initial domain elements
log.I("Listing initial domain %s elements" % (self.domain_name))
out, err = self.pfw.sendCmd("listDomainElements",str(self.domain_name))
assert err == None, "ERROR : command [listDomainElements] - Error while listing domain elements"
f_DomainElements_Backup = open("f_DomainElements_Backup", "w")
f_DomainElements_Backup.write(out)
f_DomainElements_Backup.close()
log.I("command [listDomainElements] correctly executed")
f_DomainElements_Backup = open("f_DomainElements_Backup", "r")
element_nbr_init = 0
line=f_DomainElements_Backup.readline()
while line!="":
line=f_DomainElements_Backup.readline()
element_nbr_init+=1
f_DomainElements_Backup.close()
log.I("Actual domain %s elements number is %s" % (self.domain_name,element_nbr_init))
# Error when removing domain elements
log.I("Removing a domain element from a non defined domain")
out, err = self.pfw.sendCmd("removeElement", "Wrong_Domain_Name", str(self.elem_0_path))
assert err == None, "ERROR : command [removeElement] - Error when removing domain element %s" % (self.elem_0_path)
assert out != "Done", "ERROR : command [removeElement] - Error not detected when removing domain element %s from an undefined domain"% (self.elem_0_path)
log.I("Removing a non existent domain element from domain %s" % (self.domain_name))
out, err = self.pfw.sendCmd("removeElement", str(self.domain_name), "Wrong_Element_Name")
assert err == None, "ERROR : command [removeElement] - Error when removing domain element %s" % (self.elem_0_path)
assert out != "Done", "ERROR : command [removeElement] - Error not detected when removing a non existent domain element from domain %s" % (self.domain_name)
log.I("Error when removing elements correctly detected")
# Checking final domain elements
log.I("Listing final domain %s elements" % (self.domain_name))
out, err = self.pfw.sendCmd("listDomainElements",str(self.domain_name))
assert err == None, "ERROR : command [listDomainElements] - Error while listing domain elements"
f_DomainElements = open("f_DomainElements", "w")
f_DomainElements.write(out)
f_DomainElements.close()
log.I("command [listDomainElements] correctly executed")
f_DomainElements = open("f_DomainElements", "r")
element_nbr = 0
line=f_DomainElements.readline()
while line!="":
line=f_DomainElements.readline()
element_nbr+=1
f_DomainElements.close()
log.I("Actual domain %s elements number is %s" % (self.domain_name,element_nbr))
log.I("Checking domain %s elements names conformity" % (self.domain_name))
f_DomainElements = open("f_DomainElements", "r")
f_DomainElements_Backup = open("f_DomainElements_Backup", "r")
for line in range(element_nbr):
# initial domain elements shall not have been impacted by current test
element_name = f_DomainElements.readline().strip('\n')
element_name_backup = f_DomainElements_Backup.readline().strip('\n')
assert element_name==element_name_backup, "ERROR : domain %s elements affected by addElement errors" % (self.domain_name)
log.I("Actual domain %s elements names conform to expected values" % (self.domain_name))
# Temporary files deletion
f_DomainElements.close()
f_DomainElements_Backup.close()
os.remove("f_DomainElements_Backup")
os.remove("f_DomainElements")
|
Makohoek/parameter-framework
|
test/functional-tests/PfwTestCase/Domains/tDomain_Elements.py
|
Python
|
bsd-3-clause
| 16,700
|
# POK header
#
# The following file is a part of the POK project. Any modification should
# be made according to the POK licence. You CANNOT use this file or a part
# of a file for your own project.
#
# For more information on the POK licence, please see our LICENCE FILE
#
# Please follow the coding guidelines described in doc/CODING_GUIDELINES
#
# Copyright (c) 2007-2021 POK team
from avocado import Test, fail_on
from avocado.utils import build, process
import os
class ExecutionTest(Test):
def setUp(self):
self.dir = self.params.get("dir")
self.compiler = self.params.get("compiler")
self.qemu = self.params.get("qemu")
self.processors = self.params.get("proc")
if os.access("/dev/kvm", os.R_OK):
self.qemu += " -accel kvm"
self.expected = self.params.get("expected",
default=os.path.join(self.dir, "expected.txt"))
self.preserved_output = self.params.get("preserved-output",
default=os.path.join(self.dir, "preserved-output.txt"))
@fail_on(process.CmdError)
def test(self):
try:
os.unlink(self.preserved_output)
except OSError:
pass
build.make(self.dir, extra_args="clean")
build.make(self.dir, env={"MAKEFLAGS": "-j1"},
extra_args="CC='{}'".format(self.compiler))
output = process.system_output(
"{} -nographic -smp {} -kernel {}/pok.elf".format(self.qemu, self.processors, self.dir))
try:
output = output.split(b"POK kernel initialized\n", 1)[1]
except IndexError:
raise Exception("unable to find POK kernel startup message")
try:
expected = open(self.expected, "rb").read()
except FileNotFoundError:
expected = b""
self.log.debug("expected = {}".format(expected))
try:
self.assertEqual(output.rstrip(b"\n"), expected.rstrip(b"\n"))
except Exception as e:
open(self.preserved_output, "wb").write(output + b'\n')
raise e
|
pok-kernel/pok
|
misc/execution_test.py
|
Python
|
bsd-2-clause
| 2,197
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
from keystoneclient.fixture import exception
from keystoneclient.openstack.common import timeutils
class _Service(dict):
def add_endpoint(self, public, admin=None, internal=None,
tenant_id=None, region=None):
data = {'tenantId': tenant_id or uuid.uuid4().hex,
'publicURL': public,
'adminURL': admin or public,
'internalURL': internal or public,
'region': region}
self.setdefault('endpoints', []).append(data)
return data
class Token(dict):
"""A V2 Keystone token that can be used for testing.
This object is designed to allow clients to generate a correct V2 token for
use in there test code. It should prevent clients from having to know the
correct token format and allow them to test the portions of token handling
that matter to them and not copy and paste sample.
"""
def __init__(self, token_id=None,
expires=None, tenant_id=None, tenant_name=None, user_id=None,
user_name=None):
super(Token, self).__init__()
self.token_id = token_id or uuid.uuid4().hex
self.user_id = user_id or uuid.uuid4().hex
self.user_name = user_name or uuid.uuid4().hex
if not expires:
expires = timeutils.utcnow() + datetime.timedelta(hours=1)
try:
self.expires = expires
except (TypeError, AttributeError):
# expires should be able to be passed as a string so ignore
self.expires_str = expires
if tenant_id or tenant_name:
self.set_scope(tenant_id, tenant_name)
@property
def root(self):
return self.setdefault('access', {})
@property
def _token(self):
return self.root.setdefault('token', {})
@property
def token_id(self):
return self._token['id']
@token_id.setter
def token_id(self, value):
self._token['id'] = value
@property
def expires_str(self):
return self._token['expires']
@expires_str.setter
def expires_str(self, value):
self._token['expires'] = value
@property
def expires(self):
return timeutils.parse_isotime(self.expires_str)
@expires.setter
def expires(self, value):
self.expires_str = timeutils.isotime(value)
@property
def _user(self):
return self.root.setdefault('user', {})
@property
def user_id(self):
return self._user['id']
@user_id.setter
def user_id(self, value):
self._user['id'] = value
@property
def user_name(self):
return self._user['name']
@user_name.setter
def user_name(self, value):
self._user['name'] = value
@property
def tenant_id(self):
return self._token.get('tenant', {}).get('id')
@tenant_id.setter
def tenant_id(self, value):
self._token.setdefault('tenant', {})['id'] = value
@property
def tenant_name(self):
return self._token.get('tenant', {}).get('name')
@tenant_name.setter
def tenant_name(self, value):
self._token.setdefault('tenant', {})['name'] = value
def validate(self):
scoped = 'tenant' in self.token
catalog = self.root.get('serviceCatalog')
if catalog and not scoped:
msg = 'You cannot have a service catalog on an unscoped token'
raise exception.FixtureValidationError(msg)
if scoped and not self.user.get('roles'):
msg = 'You must have roles on a token to scope it'
raise exception.FixtureValidationError(msg)
def add_role(self, name=None, id=None):
roles = self._user.setdefault('roles', [])
data = {'id': id or uuid.uuid4().hex,
'name': name or uuid.uuid4().hex}
roles.append(data)
return data
def add_service(self, type, name=None):
name = name or uuid.uuid4().hex
service = _Service(name=name, type=type)
self.root.setdefault('serviceCatalog', []).append(service)
return service
def set_scope(self, id=None, name=None):
self.tenant_id = id or uuid.uuid4().hex
self.tenant_name = name or uuid.uuid4().hex
|
metacloud/python-keystoneclient
|
keystoneclient/fixture/v2.py
|
Python
|
apache-2.0
| 4,823
|
'''
Copyright (c) Microsoft. All rights reserved.
This code is licensed under the MIT License (MIT).
THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
Developed by Minigraph
Author: James Stanard
'''
import os
import sys
import shutil
from glob import glob
from uuid import uuid4
TEMPLATES_FOLDER = "./Tools/Scripts/ProjectTemplates"
def copy_template_file(filename, project, guid):
'''Copies one template file and replaces templated values'''
template_filename = os.path.join(TEMPLATES_FOLDER, filename)
output_filename = os.path.join(project, filename)
output_filename = output_filename.replace('AppTemplate', project)
output_filename = output_filename.replace('LibTemplate', project)
with open(template_filename, 'r') as infile:
with open(output_filename, 'w') as outfile:
contents = infile.read()
contents = contents.replace('TEMPLATE_NAME', project)
contents = contents.replace('TEMPLATE_GUID', guid)
outfile.write(contents)
def copy_app_template(project, guid):
'''Instantiates a new solution from a template'''
shutil.copy(os.path.join(TEMPLATES_FOLDER, 'packages.config'), project)
shutil.copy(os.path.join(TEMPLATES_FOLDER, 'pch.h'), project)
shutil.copy(os.path.join(TEMPLATES_FOLDER, 'pch.cpp'), project)
copy_template_file('Main.cpp', project, guid)
copy_template_file('AppTemplate_VS14.sln', project, guid)
copy_template_file('AppTemplate_VS14.vcxproj', project, guid)
copy_template_file('AppTemplate_VS14.vcxproj.filters', project, guid)
copy_template_file('AppTemplate_VS15.sln', project, guid)
copy_template_file('AppTemplate_VS15.vcxproj', project, guid)
copy_template_file('AppTemplate_VS15.vcxproj.filters', project, guid)
for file in glob(os.path.join(TEMPLATES_FOLDER, '*.png')):
shutil.copy(file, project)
def copy_lib_template(project, guid):
'''Instantiates a new library project from a template'''
shutil.copy(os.path.join(TEMPLATES_FOLDER, 'pch.h'), project)
shutil.copy(os.path.join(TEMPLATES_FOLDER, 'pch.cpp'), project)
copy_template_file('LibTemplate_VS14.vcxproj', project, guid)
copy_template_file('LibTemplate_VS14.vcxproj.filters', project, guid)
copy_template_file('LibTemplate_VS15.vcxproj', project, guid)
copy_template_file('LibTemplate_VS15.vcxproj.filters', project, guid)
def create_project():
if len(sys.argv) != 3 or sys.argv[1].lower() != 'app' and sys.argv[1].lower() != 'lib':
print('Usage: {0} [app|lib] <ProjectName>'.format(sys.argv[0]))
return
project_name = sys.argv[2]
folder_contents = set(os.listdir())
expected_folders = set(['3rdParty', 'Core', 'Tools'])
if not expected_folders.issubset(folder_contents):
print('Run this script from the root of MiniEngine')
elif project_name in folder_contents:
print('Project already exists')
else:
os.mkdir(project_name)
if sys.argv[1].lower() == 'app':
copy_app_template(project_name, str(uuid4()).upper())
else:
copy_lib_template(project_name, str(uuid4()).upper())
if __name__ == "__main__":
create_project()
|
KevindeQ/DirectX-Graphics-Samples
|
MiniEngine/Tools/Scripts/CreateNewProject.py
|
Python
|
mit
| 3,338
|
# -*- coding: utf-8 -*-
from datetime import datetime
from threading import Thread
from .follower import BaseFollower
from .log import logger
class RiceQuantFollower(BaseFollower):
def __init__(self):
super().__init__()
self.client = None
def login(self, user=None, password=None, **kwargs):
from rqopen_client import RQOpenClient
self.client = RQOpenClient(user, password, logger=logger)
def follow(
self,
users,
run_id,
track_interval=1,
trade_cmd_expire_seconds=120,
cmd_cache=True,
entrust_prop="limit",
send_interval=0,
):
"""跟踪ricequant对应的模拟交易,支持多用户多策略
:param users: 支持easytrader的用户对象,支持使用 [] 指定多个用户
:param run_id: ricequant 的模拟交易ID,支持使用 [] 指定多个模拟交易
:param track_interval: 轮训模拟交易时间,单位为秒
:param trade_cmd_expire_seconds: 交易指令过期时间, 单位为秒
:param cmd_cache: 是否读取存储历史执行过的指令,防止重启时重复执行已经交易过的指令
:param entrust_prop: 委托方式, 'limit' 为限价,'market' 为市价, 仅在银河实现
:param send_interval: 交易发送间隔, 默认为0s。调大可防止卖出买入时卖出单没有及时成交导致的买入金额不足
"""
users = self.warp_list(users)
run_ids = self.warp_list(run_id)
if cmd_cache:
self.load_expired_cmd_cache()
self.start_trader_thread(
users, trade_cmd_expire_seconds, entrust_prop, send_interval
)
workers = []
for id_ in run_ids:
strategy_name = self.extract_strategy_name(id_)
strategy_worker = Thread(
target=self.track_strategy_worker,
args=[id_, strategy_name],
kwargs={"interval": track_interval},
)
strategy_worker.start()
workers.append(strategy_worker)
logger.info("开始跟踪策略: %s", strategy_name)
for worker in workers:
worker.join()
def extract_strategy_name(self, run_id):
ret_json = self.client.get_positions(run_id)
if ret_json["code"] != 200:
logger.error(
"fetch data from run_id %s fail, msg %s",
run_id,
ret_json["msg"],
)
raise RuntimeError(ret_json["msg"])
return ret_json["resp"]["name"]
def extract_day_trades(self, run_id):
ret_json = self.client.get_day_trades(run_id)
if ret_json["code"] != 200:
logger.error(
"fetch day trades from run_id %s fail, msg %s",
run_id,
ret_json["msg"],
)
raise RuntimeError(ret_json["msg"])
return ret_json["resp"]["trades"]
def query_strategy_transaction(self, strategy, **kwargs):
transactions = self.extract_day_trades(strategy)
transactions = self.project_transactions(transactions, **kwargs)
return self.order_transactions_sell_first(transactions)
@staticmethod
def stock_shuffle_to_prefix(stock):
assert (
len(stock) == 11
), "stock {} must like 123456.XSHG or 123456.XSHE".format(stock)
code = stock[:6]
if stock.find("XSHG") != -1:
return "sh" + code
if stock.find("XSHE") != -1:
return "sz" + code
raise TypeError("not valid stock code: {}".format(code))
def project_transactions(self, transactions, **kwargs):
new_transactions = []
for transaction in transactions:
new_transaction = {}
new_transaction["price"] = transaction["price"]
new_transaction["amount"] = int(abs(transaction["quantity"]))
new_transaction["datetime"] = datetime.strptime(
transaction["time"], "%Y-%m-%d %H:%M:%S"
)
new_transaction["stock_code"] = self.stock_shuffle_to_prefix(
transaction["order_book_id"]
)
new_transaction["action"] = (
"buy" if transaction["quantity"] > 0 else "sell"
)
new_transactions.append(new_transaction)
return new_transactions
|
msincenselee/vnpy
|
vnpy/api/easytrader/ricequant_follower.py
|
Python
|
mit
| 4,413
|
#!/usr/bin/env python
"""
Script to get data_internal from KPI data_internalbase and render dashboard HTML files.
"""
from __future__ import print_function
import click
from datetime import datetime
from distutils.dir_util import copy_tree
import logging
import jinja2
import json
import os
import urllib
import yaml
logging.basicConfig(level=logging.WARNING,
format='[%(levelname)s] [%(asctime)s]: %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S')
# Get package version
script_dir = os.path.dirname(os.path.realpath(__file__))
with open (os.path.join(os.path.dirname(script_dir), 'version.txt')) as f:
p_version = f.read()
# Command line options
@click.command( context_settings = dict( help_option_names = ['-h', '--help'] ))
@click.option('--outdir', '-o', required=True, help = "Create dashboards in the specified output directory.")
@click.option('--demo', is_flag=True)
@click.option('--genstat_url', '-g', default="https://genomics-status.scilifelab.se")
@click.version_option(p_version)
def make_dashboards(outdir, demo, genstat_url):
"""
Function to get data_internal from KPI data_internalbase and render dashboard HTML files.
"""
### CONFIGURATION VARS
templates_dir = os.path.join(script_dir, 'templates')
outdir = os.path.realpath(outdir)
logging.info("Making reports in {}".format(outdir))
# Paths relative to make_dashboards/templates/
external_fn = os.path.join('external','index.html')
ngi_website_fn = os.path.join('ngi_website','index.html')
### GET THE EXTERNAL DATA
external_url = '{}/api/v1/stats'.format(genstat_url)
data_external = json.load(urllib.urlopen(external_url))
data_external['date_rendered'] = datetime.now().strftime("%Y-%m-%d, %H:%M")
data_external['p_version'] = p_version
# Translations for lowercase keys
with open("key_names.yaml", 'r') as f:
data_external['key_names'] = yaml.load(f, Loader=yaml.SafeLoader)
data_external['json'] = json.dumps(data_external, indent=4)
### GET THE DELIVERY TIMES DATA
dtimes_url = '{}/api/v1/stats/year_deliverytime_application'.format(genstat_url)
dtimes = json.load(urllib.urlopen(dtimes_url))
dtimes_json = json.dumps(dtimes, indent=4)
### RENDER THE TEMPLATES
# Copy across the templates - needed so that associated assets are there
copy_tree(templates_dir, outdir)
# Load the templates
try:
env = jinja2.Environment(loader=jinja2.FileSystemLoader(templates_dir))
external_template = env.get_template(external_fn)
ngi_website_template = env.get_template(ngi_website_fn)
except:
raise IOError ("Could not load dashboard template files")
# Render templates and save
external_output_fn = os.path.join(outdir, external_fn)
ngi_website_output_fn = os.path.join(outdir, ngi_website_fn)
# External template
external_output = external_template.render(d = data_external, dt_data = dtimes_json)
try:
with open (os.path.join(outdir, external_output_fn), 'w') as f:
print(external_output, file=f)
except IOError as e:
raise IOError ("Could not print report to '{}' - {}".format(external_output_fn, IOError(e)))
# ngi_website template
ngi_website_output = ngi_website_template.render(d = data_external, dt_data = dtimes_json)
try:
with open (os.path.join(outdir, ngi_website_output_fn), 'w') as f:
print(ngi_website_output, file=f)
except IOError as e:
raise IOError ("Could not print report to '{}' - {}".format(ngi_website_output_fn, IOError(e)))
if __name__ == '__main__':
try:
conf_file = os.path.join(os.environ.get('HOME'), '.dashboardrc')
with open(conf_file, "r") as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
except IOError:
click.secho("Could not open the config file {}".format(conf_file), fg="red")
config = {}
make_dashboards(default_map=config)
|
NationalGenomicsInfrastructure/NGI_dashboards
|
make_dashboards/make_dashboards.py
|
Python
|
mit
| 3,999
|
# Copyright 2012 VMware, Inc.
#
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import httplib
from neutron.openstack.common import log as logging
from neutron.plugins.vmware.api_client import base
from neutron.plugins.vmware.api_client import eventlet_client
from neutron.plugins.vmware.api_client import eventlet_request
from neutron.plugins.vmware.api_client import exception
from neutron.plugins.vmware.api_client import version
LOG = logging.getLogger(__name__)
class NsxApiClient(eventlet_client.EventletApiClient):
"""The Nsx API Client."""
def __init__(self, api_providers, user, password,
concurrent_connections=base.DEFAULT_CONCURRENT_CONNECTIONS,
gen_timeout=base.GENERATION_ID_TIMEOUT,
use_https=True,
connect_timeout=base.DEFAULT_CONNECT_TIMEOUT,
http_timeout=75, retries=2, redirects=2):
'''Constructor. Adds the following:
:param http_timeout: how long to wait before aborting an
unresponsive controller (and allow for retries to another
controller in the cluster)
:param retries: the number of concurrent connections.
:param redirects: the number of concurrent connections.
'''
super(NsxApiClient, self).__init__(
api_providers, user, password,
concurrent_connections=concurrent_connections,
gen_timeout=gen_timeout, use_https=use_https,
connect_timeout=connect_timeout)
self._request_timeout = http_timeout * retries
self._http_timeout = http_timeout
self._retries = retries
self._redirects = redirects
self._version = None
# NOTE(salvatore-orlando): This method is not used anymore. Login is now
# performed automatically inside the request eventlet if necessary.
def login(self, user=None, password=None):
'''Login to NSX controller.
Assumes same password is used for all controllers.
:param user: controller user (usually admin). Provided for
backwards compatibility. In the normal mode of operation
this should be None.
:param password: controller password. Provided for backwards
compatibility. In the normal mode of operation this should
be None.
'''
if user:
self._user = user
if password:
self._password = password
return self._login()
def request(self, method, url, body="", content_type="application/json"):
'''Issues request to controller.'''
g = eventlet_request.GenericRequestEventlet(
self, method, url, body, content_type, auto_login=True,
http_timeout=self._http_timeout,
retries=self._retries, redirects=self._redirects)
g.start()
response = g.join()
LOG.debug(_('Request returns "%s"'), response)
# response is a modified HTTPResponse object or None.
# response.read() will not work on response as the underlying library
# request_eventlet.ApiRequestEventlet has already called this
# method in order to extract the body and headers for processing.
# ApiRequestEventlet derived classes call .read() and
# .getheaders() on the HTTPResponse objects and store the results in
# the response object's .body and .headers data members for future
# access.
if response is None:
# Timeout.
LOG.error(_('Request timed out: %(method)s to %(url)s'),
{'method': method, 'url': url})
raise exception.RequestTimeout()
status = response.status
if status == httplib.UNAUTHORIZED:
raise exception.UnAuthorizedRequest()
# Fail-fast: Check for exception conditions and raise the
# appropriate exceptions for known error codes.
if status in exception.ERROR_MAPPINGS:
LOG.error(_("Received error code: %s"), status)
LOG.error(_("Server Error Message: %s"), response.body)
exception.ERROR_MAPPINGS[status](response)
# Continue processing for non-error condition.
if (status != httplib.OK and status != httplib.CREATED
and status != httplib.NO_CONTENT):
LOG.error(_("%(method)s to %(url)s, unexpected response code: "
"%(status)d (content = '%(body)s')"),
{'method': method, 'url': url,
'status': response.status, 'body': response.body})
return None
if not self._version:
self._version = version.find_version(response.headers)
return response.body
def get_version(self):
if not self._version:
# Determine the controller version by querying the
# cluster nodes. Currently, the version will be the
# one of the server that responds.
self.request('GET', '/ws.v1/control-cluster/node')
if not self._version:
LOG.error(_('Unable to determine NSX version. '
'Plugin might not work as expected.'))
return self._version
|
samsu/neutron
|
plugins/vmware/api_client/client.py
|
Python
|
apache-2.0
| 5,785
|
"""mandelbrot benchmark"""
from time import time
def pprint(arr, w):
x = []
for a in arr:
x.append(a)
if len(x) >= w:
print( [ round(y,2) for y in x] )
x = []
def mandelbrot_numpy(size=512, exit_limit=100):
img_array = numpy.zeros([size, size], int)
for y in range(size):
for x in range(size):
c = complex(x / float(size) * 4 - 2,
y / float(size) * 4 - 2)
z = c
for i in range(exit_limit):
z = (z**2) + c
img_array[y, x] += 1
if abs(z) > 2:
# z is escaping to infinity, so point is not in set
break
else:
# if loop is exausted, point is inside the set
img_array[y, x] = 0
return img_array
def main():
@returns( array=[512,512] )
@typedef( x=float, y=float, tempX=float, i=int, runaway=int, c=vec2)
@gpu.main
def gpufunc():
c = get_global_id()
x = 0.0
y = 0.0
tempX = 0.0
i = 0
runaway = 0
for i in range(100):
tempX = x * x - y * y + float(c.x)
y = 2.0 * x * y + float(c.y)
x = tempX
if runaway == 0 and x * x + y * y > 100.0:
runaway = i
return float(runaway) * 0.01
start = time()
if PYTHON == 'PYTHONJS':
res = gpufunc()
#pprint(res, 32)
else:
res = mandelbrot_numpy()
print(time()-start)
|
pombredanne/Rusthon
|
regtests/bench/mandelbrot.py
|
Python
|
bsd-3-clause
| 1,215
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Your application """
class Application:
def __init__(self):
self.plugins = {}
self.init_plugins()
def enable_debug(self):
from model.Common import Common
Common.set_debug(True)
def init_plugins(self):
from src.plugins.PluginWebmin import PluginWebmin
from src.plugins.PluginShell import PluginShell
from src.plugins.PluginPhpMyAdmin import PluginPhpMyAdmin
from src.plugins.PluginMonit import PluginMonit
self.plugins['phpmyadmin'] = PluginPhpMyAdmin()
self.plugins['shellinabox'] = PluginShell()
self.plugins['webmin'] = PluginWebmin()
self.plugins['monit'] = PluginMonit()
def show_plugins(self):
print "list of available plugins :"
for plugin in self.plugins.items():
print " * " + plugin[0] + " - " + plugin[1].description
def status(self, plugin_name):
if plugin_name in self.plugins.keys():
print "Status of the %s plugin :" % (plugin_name,)
plugin = self.plugins[plugin_name]
plugin.show_status()
elif plugin_name == "all":
print "Status of all plugins :"
for plugin in self.plugins.iteritems():
plugin[1].show_status()
else:
print "Plugin %s is unknown" % (plugin_name,)
return False
def install(self, plugin_name):
if plugin_name in self.plugins.keys():
print "Enable the %s plugin :" % (plugin_name,)
plugin = self.plugins[plugin_name]
plugin.install()
else:
print "Plugin %s is unknown" % (plugin_name,)
return False
def enable(self, plugin_name):
if plugin_name in self.plugins.keys():
print "Enable the %s plugin :" % (plugin_name,)
plugin = self.plugins[plugin_name]
plugin.enable()
elif plugin_name == "all":
print "Enable all the plugins :"
for plugin in self.plugins.iteritems():
plugin[1].enable()
else:
print "Plugin %s is unknown" % (plugin_name,)
return False
def disable(self, plugin_name):
if plugin_name in self.plugins.keys():
print "Disable the %s plugin :" % (plugin_name,)
plugin = self.plugins[plugin_name]
plugin.disable()
elif plugin_name == "all":
print "Disable all the plugins :"
for plugin in self.plugins.iteritems():
plugin[1].disable()
else:
print "Plugin %s is unknown" % (plugin_name,)
return False
def main(self, options, args):
if not args:
args = ['all']
if len(args) == 2:
options.action = args[1]
if options.action == "status":
self.status(args[0])
elif options.action == "install":
self.install(args[0])
elif options.action == "enable":
self.enable(args[0])
elif options.action == "disable":
self.disable(args[0])
else:
print "Action %s is unknown" % (options.action,)
#else:
# print "Hum ... You don't gave me anything to do :'("
# print "Use -h / --help if you wan't to see possibles parameters to feed me"
#just for development
if __name__ == "__main__":
app = Application()
app.show_plugins()
#app.status("all")
app.status("webmin")
app.enable("webmin")
app.status("webmin")
app.disable("webmin")
app.status("webmin")
|
kefniark/turnkey-tools
|
src/app.py
|
Python
|
mit
| 3,640
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
depends_on = (
("actstream", "0007_auto__add_field_follow_started"),
)
def forwards(self, orm):
# Removing unique constraint on 'ContactRole', fields ['contact', 'layer', 'role']
db.delete_unique('layers_contactrole', ['contact_id', 'layer_id', 'role_id'])
# Deleting model 'Link'
db.delete_table('layers_link')
# Deleting model 'TopicCategory'
db.delete_table('layers_topiccategory')
# Deleting model 'ContactRole'
db.delete_table('layers_contactrole')
# Deleting field 'Layer.csw_mdsource'
db.delete_column('layers_layer', 'csw_mdsource')
# Deleting field 'Layer.csw_schema'
db.delete_column('layers_layer', 'csw_schema')
# Deleting field 'Layer.csw_typename'
db.delete_column('layers_layer', 'csw_typename')
# Deleting field 'Layer.constraints_other'
db.delete_column('layers_layer', 'constraints_other')
# Deleting field 'Layer.date'
db.delete_column('layers_layer', 'date')
# Deleting field 'Layer.owner'
db.delete_column('layers_layer', 'owner_id')
# Deleting field 'Layer.uuid'
db.delete_column('layers_layer', 'uuid')
# Deleting field 'Layer.title'
db.delete_column('layers_layer', 'title')
# Deleting field 'Layer.date_type'
db.delete_column('layers_layer', 'date_type')
# Deleting field 'Layer.csw_insert_date'
db.delete_column('layers_layer', 'csw_insert_date')
# Deleting field 'Layer.temporal_extent_end'
db.delete_column('layers_layer', 'temporal_extent_end')
# Deleting field 'Layer.distribution_url'
db.delete_column('layers_layer', 'distribution_url')
# Deleting field 'Layer.metadata_xml'
db.delete_column('layers_layer', 'metadata_xml')
# Deleting field 'Layer.data_quality_statement'
db.delete_column('layers_layer', 'data_quality_statement')
# Deleting field 'Layer.temporal_extent_start'
db.delete_column('layers_layer', 'temporal_extent_start')
# Deleting field 'Layer.bbox_x1'
db.delete_column('layers_layer', 'bbox_x1')
# Deleting field 'Layer.bbox_x0'
db.delete_column('layers_layer', 'bbox_x0')
# Deleting field 'Layer.distribution_description'
db.delete_column('layers_layer', 'distribution_description')
# Deleting field 'Layer.abstract'
db.delete_column('layers_layer', 'abstract')
# Deleting field 'Layer.supplemental_information'
db.delete_column('layers_layer', 'supplemental_information')
# Deleting field 'Layer.edition'
db.delete_column('layers_layer', 'edition')
# Deleting field 'Layer.category'
db.delete_column('layers_layer', 'category_id')
# Deleting field 'Layer.spatial_representation_type'
db.delete_column('layers_layer', 'spatial_representation_type')
# Deleting field 'Layer.bbox_y0'
db.delete_column('layers_layer', 'bbox_y0')
# Deleting field 'Layer.bbox_y1'
db.delete_column('layers_layer', 'bbox_y1')
# Deleting field 'Layer.topic_category'
db.delete_column('layers_layer', 'topic_category')
# Deleting field 'Layer.purpose'
db.delete_column('layers_layer', 'purpose')
# Deleting field 'Layer.srid'
db.delete_column('layers_layer', 'srid')
# Deleting field 'Layer.language'
db.delete_column('layers_layer', 'language')
# Deleting field 'Layer.keywords_region'
db.delete_column('layers_layer', 'keywords_region')
# Deleting field 'Layer.maintenance_frequency'
db.delete_column('layers_layer', 'maintenance_frequency')
# Deleting field 'Layer.csw_anytext'
db.delete_column('layers_layer', 'csw_anytext')
# Deleting field 'Layer.csw_type'
db.delete_column('layers_layer', 'csw_type')
# Deleting field 'Layer.metadata_uploaded'
db.delete_column('layers_layer', 'metadata_uploaded')
# Deleting field 'Layer.csw_wkt_geometry'
db.delete_column('layers_layer', 'csw_wkt_geometry')
# Deleting field 'Layer.constraints_use'
db.delete_column('layers_layer', 'constraints_use')
# Remove stale contents
from django.contrib.contenttypes.models import ContentType
ct = ContentType.objects.get(app_label='layers', model='contactrole')
ct.delete()
def backwards(self, orm):
raise RuntimeError("Cannot reverse this migration.")
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 15, 4, 29, 40, 959238)'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 15, 4, 29, 40, 964863)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 15, 4, 29, 40, 964803)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'relationships': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_to'", 'symmetrical': 'False', 'through': "orm['relationships.Relationship']", 'to': "orm['auth.User']"}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'base.contactrole': {
'Meta': {'unique_together': "(('contact', 'resource', 'role'),)", 'object_name': 'ContactRole'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Profile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.ResourceBase']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Role']"})
},
'base.resourcebase': {
'Meta': {'object_name': 'ResourceBase'},
'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bbox_x0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_x1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_y0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_y1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.TopicCategory']", 'null': 'True', 'blank': 'True'}),
'constraints_other': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'constraints_use': ('django.db.models.fields.CharField', [], {'default': "'copyright'", 'max_length': '255'}),
'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['people.Profile']", 'through': "orm['base.ContactRole']", 'symmetrical': 'False'}),
'csw_anytext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'csw_insert_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'csw_mdsource': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '256'}),
'csw_schema': ('django.db.models.fields.CharField', [], {'default': "'http://www.isotc211.org/2005/gmd'", 'max_length': '64'}),
'csw_type': ('django.db.models.fields.CharField', [], {'default': "'dataset'", 'max_length': '32'}),
'csw_typename': ('django.db.models.fields.CharField', [], {'default': "'gmd:MD_Metadata'", 'max_length': '32'}),
'csw_wkt_geometry': ('django.db.models.fields.TextField', [], {'default': "'SRID=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'"}),
'data_quality_statement': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_type': ('django.db.models.fields.CharField', [], {'default': "'publication'", 'max_length': '255'}),
'distribution_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'distribution_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'edition': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords_region': ('django.db.models.fields.CharField', [], {'default': "'USA'", 'max_length': '3'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'eng'", 'max_length': '3'}),
'maintenance_frequency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'metadata_uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'metadata_xml': ('django.db.models.fields.TextField', [], {'default': '\'<gmd:MD_Metadata xmlns:gmd="http://www.isotc211.org/2005/gmd"/>\'', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'purpose': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'spatial_representation_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'srid': ('django.db.models.fields.CharField', [], {'default': "'EPSG:4326'", 'max_length': '255'}),
'supplemental_information': ('django.db.models.fields.TextField', [], {'default': "u'No information provided'"}),
'temporal_extent_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'temporal_extent_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.Thumbnail']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'base.thumbnail': {
'Meta': {'object_name': 'Thumbnail'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thumb_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'thumb_spec': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'})
},
'base.topiccategory': {
'Meta': {'ordering': "('name',)", 'object_name': 'TopicCategory'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'layers.attribute': {
'Meta': {'object_name': 'Attribute'},
'attribute': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'attribute_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'attribute_type': ('django.db.models.fields.CharField', [], {'default': "'xsd:string'", 'max_length': '50'}),
'average': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'display_order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_stats_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_set'", 'to': "orm['layers.Layer']"}),
'max': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'median': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'min': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'stddev': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'sum': ('django.db.models.fields.CharField', [], {'default': "'NA'", 'max_length': '255', 'null': 'True'}),
'unique_values': ('django.db.models.fields.TextField', [], {'default': "'NA'", 'null': 'True', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'layers.layer': {
'Meta': {'object_name': 'Layer', '_ormbases': ['base.ResourceBase']},
'default_style': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'layer_default_style'", 'null': 'True', 'to': "orm['layers.Style']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'popular_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resourcebase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['base.ResourceBase']", 'unique': 'True', 'primary_key': 'True'}),
'share_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'store': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'storeType': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'styles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'layer_styles'", 'symmetrical': 'False', 'to': "orm['layers.Style']"}),
'typename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'workspace': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'layers.style': {
'Meta': {'object_name': 'Style'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sld_body': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sld_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'sld_url': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'sld_version': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'workspace': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'people.profile': {
'Meta': {'object_name': 'Profile'},
'area': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'delivery': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'profile'", 'unique': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'voice': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'people.role': {
'Meta': {'object_name': 'Role'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'relationships.relationship': {
'Meta': {'ordering': "('created',)", 'unique_together': "(('from_user', 'to_user', 'status', 'site'),)", 'object_name': 'Relationship'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'from_users'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'relationships'", 'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['relationships.RelationshipStatus']"}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_users'", 'to': "orm['auth.User']"}),
'weight': ('django.db.models.fields.FloatField', [], {'default': '1.0', 'null': 'True', 'blank': 'True'})
},
'relationships.relationshipstatus': {
'Meta': {'ordering': "('name',)", 'object_name': 'RelationshipStatus'},
'from_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'symmetrical_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'to_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['layers']
|
AnnalisaS/migration_geonode
|
geonode/layers/migrations/0008_auto__del_link__del_topiccategory__del_contactrole__del_unique_contact.py
|
Python
|
gpl-3.0
| 25,702
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Core Keras layers.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import types as python_types
import numpy as np
from tensorflow.contrib.keras.python.keras import activations
from tensorflow.contrib.keras.python.keras import backend as K
from tensorflow.contrib.keras.python.keras import constraints
from tensorflow.contrib.keras.python.keras import initializers
from tensorflow.contrib.keras.python.keras import regularizers
from tensorflow.contrib.keras.python.keras.engine import InputSpec
from tensorflow.contrib.keras.python.keras.engine import Layer
from tensorflow.contrib.keras.python.keras.utils.generic_utils import deserialize_keras_object
from tensorflow.contrib.keras.python.keras.utils.generic_utils import func_dump
from tensorflow.contrib.keras.python.keras.utils.generic_utils import func_load
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import core as tf_core_layers
from tensorflow.python.util import tf_inspect
class Masking(Layer):
"""Masks a sequence by using a mask value to skip timesteps.
For each timestep in the input tensor (dimension #1 in the tensor),
if all values in the input tensor at that timestep
are equal to `mask_value`, then the timestep will be masked (skipped)
in all downstream layers (as long as they support masking).
If any downstream layer does not support masking yet receives such
an input mask, an exception will be raised.
Example:
Consider a Numpy data array `x` of shape `(samples, timesteps, features)`,
to be fed to a LSTM layer.
You want to mask timestep #3 and #5 because you lack data for
these timesteps. You can:
- set `x[:, 3, :] = 0.` and `x[:, 5, :] = 0.`
- insert a `Masking` layer with `mask_value=0.` before the LSTM layer:
```python
model = Sequential()
model.add(Masking(mask_value=0., input_shape=(timesteps, features)))
model.add(LSTM(32))
```
"""
def __init__(self, mask_value=0., **kwargs):
super(Masking, self).__init__(**kwargs)
self.supports_masking = True
self.mask_value = mask_value
def compute_mask(self, inputs, mask=None):
return K.any(K.not_equal(inputs, self.mask_value), axis=-1)
def call(self, inputs):
boolean_mask = K.any(
K.not_equal(inputs, self.mask_value), axis=-1, keepdims=True)
return inputs * K.cast(boolean_mask, K.floatx())
def get_config(self):
config = {'mask_value': self.mask_value}
base_config = super(Masking, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Dropout(tf_core_layers.Dropout, Layer):
"""Applies Dropout to the input.
Dropout consists in randomly setting
a fraction `rate` of input units to 0 at each update during training time,
which helps prevent overfitting.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
noise_shape: 1D integer tensor representing the shape of the
binary dropout mask that will be multiplied with the input.
For instance, if your inputs have shape
`(batch_size, timesteps, features)` and
you want the dropout mask to be the same for all timesteps,
you can use `noise_shape=(batch_size, 1, features)`.
seed: A Python integer to use as random seed.
"""
def __init__(self, rate, noise_shape=None, seed=None, **kwargs):
self.supports_masking = True
# Inheritance call order:
# 1) tf.layers.Dropout, 2) keras.layers.Layer, 3) tf.layers.Layer
super(Dropout, self).__init__(**kwargs)
def call(self, inputs, training=None):
if training is None:
training = K.learning_phase()
output = super(Dropout, self).call(inputs, training=training)
if training is K.learning_phase():
output._uses_learning_phase = True # pylint: disable=protected-access
return output
def get_config(self):
config = {'rate': self.rate}
base_config = super(Dropout, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class SpatialDropout1D(Dropout):
"""Spatial 1D version of Dropout.
This version performs the same function as Dropout, however it drops
entire 1D feature maps instead of individual elements. If adjacent frames
within feature maps are strongly correlated (as is normally the case in
early convolution layers) then regular dropout will not regularize the
activations and will otherwise just result in an effective learning rate
decrease. In this case, SpatialDropout1D will help promote independence
between feature maps and should be used instead.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
Input shape:
3D tensor with shape:
`(samples, timesteps, channels)`
Output shape:
Same as input
References:
- [Efficient Object Localization Using Convolutional
Networks](https://arxiv.org/abs/1411.4280)
"""
def __init__(self, rate, **kwargs):
super(SpatialDropout1D, self).__init__(rate, **kwargs)
self.input_spec = InputSpec(ndim=3)
def _get_noise_shape(self, inputs):
input_shape = K.shape(inputs)
noise_shape = (input_shape[0], 1, input_shape[2])
return noise_shape
class SpatialDropout2D(Dropout):
"""Spatial 2D version of Dropout.
This version performs the same function as Dropout, however it drops
entire 2D feature maps instead of individual elements. If adjacent pixels
within feature maps are strongly correlated (as is normally the case in
early convolution layers) then regular dropout will not regularize the
activations and will otherwise just result in an effective learning rate
decrease. In this case, SpatialDropout2D will help promote independence
between feature maps and should be used instead.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
data_format: 'channels_first' or 'channels_last'.
In 'channels_first' mode, the channels dimension
(the depth) is at index 1,
in 'channels_last' mode is it at index 3.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
4D tensor with shape:
`(samples, channels, rows, cols)` if data_format='channels_first'
or 4D tensor with shape:
`(samples, rows, cols, channels)` if data_format='channels_last'.
Output shape:
Same as input
References:
- [Efficient Object Localization Using Convolutional
Networks](https://arxiv.org/abs/1411.4280)
"""
def __init__(self, rate, data_format=None, **kwargs):
super(SpatialDropout2D, self).__init__(rate, **kwargs)
if data_format is None:
data_format = K.image_data_format()
if data_format not in {'channels_last', 'channels_first'}:
raise ValueError('data_format must be in '
'{"channels_last", "channels_first"}')
self.data_format = data_format
self.input_spec = InputSpec(ndim=4)
def _get_noise_shape(self, inputs):
input_shape = K.shape(inputs)
if self.data_format == 'channels_first':
noise_shape = (input_shape[0], input_shape[1], 1, 1)
elif self.data_format == 'channels_last':
noise_shape = (input_shape[0], 1, 1, input_shape[3])
else:
raise ValueError('Invalid data_format:', self.data_format)
return noise_shape
class SpatialDropout3D(Dropout):
"""Spatial 3D version of Dropout.
This version performs the same function as Dropout, however it drops
entire 3D feature maps instead of individual elements. If adjacent voxels
within feature maps are strongly correlated (as is normally the case in
early convolution layers) then regular dropout will not regularize the
activations and will otherwise just result in an effective learning rate
decrease. In this case, SpatialDropout3D will help promote independence
between feature maps and should be used instead.
Arguments:
rate: float between 0 and 1. Fraction of the input units to drop.
data_format: 'channels_first' or 'channels_last'.
In 'channels_first' mode, the channels dimension (the depth)
is at index 1, in 'channels_last' mode is it at index 4.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
5D tensor with shape:
`(samples, channels, dim1, dim2, dim3)` if data_format='channels_first'
or 5D tensor with shape:
`(samples, dim1, dim2, dim3, channels)` if data_format='channels_last'.
Output shape:
Same as input
References:
- [Efficient Object Localization Using Convolutional
Networks](https://arxiv.org/abs/1411.4280)
"""
def __init__(self, rate, data_format=None, **kwargs):
super(SpatialDropout3D, self).__init__(rate, **kwargs)
if data_format is None:
data_format = K.image_data_format()
if data_format not in {'channels_last', 'channels_first'}:
raise ValueError('data_format must be in '
'{"channels_last", "channels_first"}')
self.data_format = data_format
self.input_spec = InputSpec(ndim=5)
def _get_noise_shape(self, inputs):
input_shape = K.shape(inputs)
if self.data_format == 'channels_first':
noise_shape = (input_shape[0], input_shape[1], 1, 1, 1)
elif self.data_format == 'channels_last':
noise_shape = (input_shape[0], 1, 1, 1, input_shape[4])
else:
raise ValueError('Invalid data_format:', self.data_format)
return noise_shape
class Activation(Layer):
"""Applies an activation function to an output.
Arguments:
activation: name of activation function to use
or alternatively, a Theano or TensorFlow operation.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
"""
def __init__(self, activation, **kwargs):
super(Activation, self).__init__(**kwargs)
self.supports_masking = True
self.activation = activations.get(activation)
def call(self, inputs):
return self.activation(inputs)
def get_config(self):
config = {'activation': activations.serialize(self.activation)}
base_config = super(Activation, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Reshape(Layer):
"""Reshapes an output to a certain shape.
Arguments:
target_shape: target shape. Tuple of integers,
does not include the samples dimension (batch size).
Input shape:
Arbitrary, although all dimensions in the input shaped must be fixed.
Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
`(batch_size,) + target_shape`
Example:
```python
# as first layer in a Sequential model
model = Sequential()
model.add(Reshape((3, 4), input_shape=(12,)))
# now: model.output_shape == (None, 3, 4)
# note: `None` is the batch dimension
# as intermediate layer in a Sequential model
model.add(Reshape((6, 2)))
# now: model.output_shape == (None, 6, 2)
# also supports shape inference using `-1` as dimension
model.add(Reshape((-1, 2, 2)))
# now: model.output_shape == (None, 3, 2, 2)
```
"""
def __init__(self, target_shape, **kwargs):
super(Reshape, self).__init__(**kwargs)
self.target_shape = tuple(target_shape)
def _fix_unknown_dimension(self, input_shape, output_shape):
"""Find and replace a missing dimension in an output shape.
This is a near direct port of the internal Numpy function
`_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`
Arguments:
input_shape: shape of array being reshaped
output_shape: desired shape of the array with at most
a single -1 which indicates a dimension that should be
derived from the input shape.
Returns:
The new output shape with a -1 replaced with its computed value.
Raises a ValueError if the total array size of the output_shape is
different then the input_shape, or more then one unknown dimension
is specified.
Raises:
ValueError: in case of invalid values
for `input_shape` or `input_shape`.
"""
output_shape = list(output_shape)
msg = 'total size of new array must be unchanged'
known, unknown = 1, None
for index, dim in enumerate(output_shape):
if dim < 0:
if unknown is None:
unknown = index
else:
raise ValueError('Can only specify one unknown dimension.')
else:
known *= dim
original = np.prod(input_shape, dtype=int)
if unknown is not None:
if known == 0 or original % known != 0:
raise ValueError(msg)
output_shape[unknown] = original // known
elif original != known:
raise ValueError(msg)
return output_shape
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = [input_shape[0]]
output_shape += self._fix_unknown_dimension(input_shape[1:],
self.target_shape)
return tensor_shape.TensorShape(output_shape)
def call(self, inputs):
# In case the target shape is not fully defined,
# we need access to the shape of x.
target_shape = self.target_shape
if -1 in target_shape:
# target shape not fully defined
target_shape = self._compute_output_shape(inputs.get_shape())
target_shape = target_shape.as_list()[1:]
return K.reshape(inputs, (-1,) + tuple(target_shape))
def get_config(self):
config = {'target_shape': self.target_shape}
base_config = super(Reshape, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Permute(Layer):
"""Permutes the dimensions of the input according to a given pattern.
Useful for e.g. connecting RNNs and convnets together.
Example:
```python
model = Sequential()
model.add(Permute((2, 1), input_shape=(10, 64)))
# now: model.output_shape == (None, 64, 10)
# note: `None` is the batch dimension
```
Arguments:
dims: Tuple of integers. Permutation pattern, does not include the
samples dimension. Indexing starts at 1.
For instance, `(2, 1)` permutes the first and second dimension
of the input.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same as the input shape, but with the dimensions re-ordered according
to the specified pattern.
"""
def __init__(self, dims, **kwargs):
super(Permute, self).__init__(**kwargs)
self.dims = tuple(dims)
self.input_spec = InputSpec(ndim=len(self.dims) + 1)
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
output_shape = copy.copy(input_shape)
for i, dim in enumerate(self.dims):
target_dim = input_shape[dim]
output_shape[i + 1] = target_dim
return tensor_shape.TensorShape(output_shape)
def call(self, inputs):
return K.permute_dimensions(inputs, (0,) + self.dims)
def get_config(self):
config = {'dims': self.dims}
base_config = super(Permute, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Flatten(Layer):
"""Flattens the input. Does not affect the batch size.
Example:
```python
model = Sequential()
model.add(Convolution2D(64, 3, 3,
border_mode='same',
input_shape=(3, 32, 32)))
# now: model.output_shape == (None, 64, 32, 32)
model.add(Flatten())
# now: model.output_shape == (None, 65536)
```
"""
def __init__(self, **kwargs):
super(Flatten, self).__init__(**kwargs)
self.input_spec = InputSpec(min_ndim=3)
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if not all(input_shape[1:]):
raise ValueError('The shape of the input to "Flatten" '
'is not fully defined '
'(got ' + str(input_shape[1:]) + '. '
'Make sure to pass a complete "input_shape" '
'or "batch_input_shape" argument to the first '
'layer in your model.')
return tensor_shape.TensorShape([input_shape[0], np.prod(input_shape[1:])])
def call(self, inputs):
outputs = K.batch_flatten(inputs)
outputs.set_shape(self._compute_output_shape(inputs.get_shape()))
return outputs
class RepeatVector(Layer):
"""Repeats the input n times.
Example:
```python
model = Sequential()
model.add(Dense(32, input_dim=32))
# now: model.output_shape == (None, 32)
# note: `None` is the batch dimension
model.add(RepeatVector(3))
# now: model.output_shape == (None, 3, 32)
```
Arguments:
n: integer, repetition factor.
Input shape:
2D tensor of shape `(num_samples, features)`.
Output shape:
3D tensor of shape `(num_samples, n, features)`.
"""
def __init__(self, n, **kwargs):
super(RepeatVector, self).__init__(**kwargs)
self.n = n
self.input_spec = InputSpec(ndim=2)
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
return tensor_shape.TensorShape([input_shape[0], self.n, input_shape[1]])
def call(self, inputs):
return K.repeat(inputs, self.n)
def get_config(self):
config = {'n': self.n}
base_config = super(RepeatVector, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Lambda(Layer):
"""Wraps arbitrary expression as a `Layer` object.
Examples:
```python
# add a x -> x^2 layer
model.add(Lambda(lambda x: x ** 2))
```
```python
# add a layer that returns the concatenation
# of the positive part of the input and
# the opposite of the negative part
def antirectifier(x):
x -= K.mean(x, axis=1, keepdims=True)
x = K.l2_normalize(x, axis=1)
pos = K.relu(x)
neg = K.relu(-x)
return K.concatenate([pos, neg], axis=1)
model.add(Lambda(antirectifier))
```
Arguments:
function: The function to be evaluated.
Takes input tensor as first argument.
arguments: optional dictionary of keyword arguments to be passed
to the function.
Input shape:
Arbitrary. Use the keyword argument input_shape
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Specified by `output_shape` argument
(or auto-inferred when using TensorFlow).
"""
def __init__(self, function, mask=None, arguments=None, **kwargs):
super(Lambda, self).__init__(**kwargs)
self.function = function
self.arguments = arguments if arguments else {}
if mask is not None:
self.supports_masking = True
self.mask = mask
def call(self, inputs, mask=None):
arguments = self.arguments
arg_spec = tf_inspect.getargspec(self.function)
if 'mask' in arg_spec.args:
arguments['mask'] = mask
return self.function(inputs, **arguments)
def compute_mask(self, inputs, mask=None):
if callable(self.mask):
return self.mask(inputs, mask)
return self.mask
def get_config(self):
if isinstance(self.function, python_types.LambdaType):
function = func_dump(self.function)
function_type = 'lambda'
else:
function = self.function.__name__
function_type = 'function'
config = {
'function': function,
'function_type': function_type,
'arguments': self.arguments
}
base_config = super(Lambda, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
globs = globals()
if custom_objects:
globs = dict(list(globs.items()) + list(custom_objects.items()))
function_type = config.pop('function_type')
if function_type == 'function':
# Simple lookup in custom objects
function = deserialize_keras_object(
config['function'],
custom_objects=custom_objects,
printable_module_name='function in Lambda layer')
elif function_type == 'lambda':
# Unsafe deserialization from bytecode
function = func_load(config['function'], globs=globs)
else:
raise TypeError('Unknown function type:', function_type)
config['function'] = function
return cls(**config)
class Dense(tf_core_layers.Dense, Layer):
"""Just your regular densely-connected NN layer.
`Dense` implements the operation:
`output = activation(dot(input, kernel) + bias)`
where `activation` is the element-wise activation function
passed as the `activation` argument, `kernel` is a weights matrix
created by the layer, and `bias` is a bias vector created by the layer
(only applicable if `use_bias` is `True`).
Note: if the input to the layer has a rank greater than 2, then
it is flattened prior to the initial dot product with `kernel`.
Example:
```python
# as first layer in a sequential model:
model = Sequential()
model.add(Dense(32, input_shape=(16,)))
# now the model will take as input arrays of shape (*, 16)
# and output arrays of shape (*, 32)
# after the first layer, you don't need to specify
# the size of the input anymore:
model.add(Dense(32))
```
Arguments:
units: Positive integer, dimensionality of the output space.
activation: Activation function to use.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to
the output of the layer (its "activation")..
kernel_constraint: Constraint function applied to
the `kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
Input shape:
nD tensor with shape: `(batch_size, ..., input_dim)`.
The most common situation would be
a 2D input with shape `(batch_size, input_dim)`.
Output shape:
nD tensor with shape: `(batch_size, ..., units)`.
For instance, for a 2D input with shape `(batch_size, input_dim)`,
the output would have shape `(batch_size, units)`.
"""
def __init__(self,
units,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
# Inheritance call order:
# 1) tf.layers.Dense, 2) keras.layers.Layer, 3) tf.layers.Layer
super(Dense, self).__init__(
units,
activation=activations.get(activation),
use_bias=use_bias,
kernel_initializer=initializers.get(kernel_initializer),
bias_initializer=initializers.get(bias_initializer),
kernel_regularizer=regularizers.get(kernel_regularizer),
bias_regularizer=regularizers.get(bias_regularizer),
activity_regularizer=regularizers.get(activity_regularizer),
**kwargs)
# TODO(fchollet): move weight constraint support to core layers.
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.supports_masking = True
def build(self, input_shape):
super(Dense, self).build(input_shape)
# TODO(fchollet): move weight constraint support to core layers.
if self.kernel_constraint:
self.constraints[self.kernel] = self.kernel_constraint
if self.use_bias and self.bias_constraint:
self.constraints[self.bias] = self.bias_constraint
def get_config(self):
config = {
'units': self.units,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer':
regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super(Dense, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class ActivityRegularization(Layer):
"""Layer that applies an update to the cost function based input activity.
Arguments:
l1: L1 regularization factor (positive float).
l2: L2 regularization factor (positive float).
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
"""
def __init__(self, l1=0., l2=0., **kwargs):
super(ActivityRegularization, self).__init__(**kwargs)
self.supports_masking = True
self.l1 = l1
self.l2 = l2
self.activity_regularizer = regularizers.L1L2(l1=l1, l2=l2)
def get_config(self):
config = {'l1': self.l1, 'l2': self.l2}
base_config = super(ActivityRegularization, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/tensorflow/contrib/keras/python/keras/layers/core.py
|
Python
|
bsd-2-clause
| 27,677
|
"""
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import urllib
from jsonrpc.json import dumps, loads
class JSONRPCException(Exception):
def __init__(self, rpcError):
Exception.__init__(self)
self.error = rpcError
class ServiceProxy(object):
def __init__(self, serviceURL, serviceName=None):
self.__serviceURL = serviceURL
self.__serviceName = serviceName
def __getattr__(self, name):
if self.__serviceName != None:
name = "%s.%s" % (self.__serviceName, name)
return ServiceProxy(self.__serviceURL, name)
def __call__(self, *args):
postdata = dumps({"method": self.__serviceName, 'params': args, 'id':'jsonrpc'})
respdata = urllib.urlopen(self.__serviceURL, postdata).read()
resp = loads(respdata)
if resp['error'] != None:
raise JSONRPCException(resp['error'])
else:
return resp['result']
|
mungerd/latbuilder
|
web-ui/share/latbuilder/web-ui/services/jsonrpc/proxy.py
|
Python
|
gpl-3.0
| 1,711
|
#! /usr/bin/env python
"""
The MIT License (MIT)
Copyright (c) 2015 creon (creon.nu@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
import json
import hmac
import time
import urllib
import urllib2
import random
import hashlib
import httplib
import threading
import datetime
class Exchange(object):
def __init__(self, fee):
self.fee = fee
self._shift = 1
self._nonce = 0
def adjust(self, error):
if 'exception caught:' not in error:
self._shift = ((self._shift + 7) % 200) - 100 # -92 15 -78 29 -64 43 -50 57 ...
def nonce(self, factor=1000.0):
n = int((time.time() + self._shift) * float(factor))
if self._nonce >= n:
n = self._nonce + 10
self._nonce = n
return n
class Bittrex(Exchange):
def __init__(self):
super(Bittrex, self).__init__(0.0025)
self.placed = {}
self.closed = []
def __repr__(self):
return "bittrex"
def adjust(self, error):
pass
def post(self, method, params, key, secret, throttle=5):
data = 'https://bittrex.com/api/v1.1' + method + '?apikey=%s&nonce=%d&' % (
key, self.nonce()) + urllib.urlencode(params)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
headers = {'apisign': sign}
connection = httplib.HTTPSConnection('bittrex.com', timeout=10)
connection.request('GET', data, headers=headers)
response = json.loads(connection.getresponse().read())
if throttle > 0 and not response['success'] and 'THROTTLED' in response['message']:
time.sleep(2)
return self.post(method, params, key, secret, throttle - 1)
return response
def get(self, method, params):
data = 'https://bittrex.com/api/v1.1' + method + '?' + urllib.urlencode(params)
connection = httplib.HTTPSConnection('bittrex.com', timeout=10)
connection.request('GET', data, headers={})
return json.loads(connection.getresponse().read())
def cancel_orders(self, unit, side, key, secret):
response = self.post('/market/getopenorders', {'market': "%s-NBT" % unit.upper()}, key, secret)
if not response['success']:
response['error'] = response['message']
return response
if not response['result']:
response['result'] = []
response['removed'] = []
response['amount'] = 0.0
for order in response['result']:
if side == 'all' or (side == 'bid' and 'BUY' in order['OrderType']) or (
side == 'ask' and 'SELL' in order['OrderType']):
ret = self.post('/market/cancel', {'uuid': order['OrderUuid']}, key, secret)
if not ret['success'] and ret['message'] != "ORDER_NOT_OPEN":
if not 'error' in response: response = {'error': ""}
response['error'] += "," + ret['message']
else:
response['removed'].append(order['OrderUuid'])
response['amount'] += order['Quantity']
if not 'error' in response and key in self.placed and unit in self.placed[key]:
if side == 'all':
self.placed[key][unit]['bid'] = False
self.placed[key][unit]['ask'] = False
else:
self.placed[key][unit][side] = False
return response
def place_order(self, unit, side, key, secret, amount, price):
ret = self.cancel_orders(unit, side, key, secret)
if 'error' in ret: return ret
amount += ret['amount']
if side == 'bid':
amount *= (1.0 - self.fee)
params = {'market': "%s-NBT" % unit.upper(), "rate": price, "quantity": amount}
response = self.post('/market/buylimit' if side == 'bid' else '/market/selllimit', params, key, secret)
if response['success']:
response['id'] = response['result']['uuid']
if not key in self.placed:
self.placed[key] = {}
if not unit in self.placed[key]:
self.placed[key][unit] = {'bid': False, 'ask': False}
self.placed[key][unit][side] = response['id']
else:
response['error'] = response['message']
response['residual'] = ret['amount']
return response
def get_balance(self, unit, key, secret):
response = self.post('/account/getbalance', {'currency': unit.upper()}, key, secret)
if response['success']:
try:
response['balance'] = float(response['result']['Available'])
except:
response['balance'] = 0.0
else:
response['error'] = response['message']
return response
def get_price(self, unit):
response = self.get('/public/getticker', {'market': '%s-NBT' % unit})
if response['success']:
response.update({'bid': response['result']['Bid'], 'ask': response['result']['Ask']})
else:
response['error'] = response['message']
return response
def create_request(self, unit, key=None, secret=None):
if not secret or not key:
return None, None
uuids = []
if key in self.placed and unit in self.placed[key]:
if self.placed[key][unit]['bid']:
uuids.append(self.placed[key][unit]['bid'])
if self.placed[key][unit]['ask']:
uuids.append(self.placed[key][unit]['ask'])
requests = []
signatures = []
for uuid in uuids:
data = 'https://bittrex.com/api/v1.1/account/getorder?apikey=%s&nonce=%d&uuid=%s' % (
key, self.nonce(), uuid)
requests.append(data)
signatures.append(hmac.new(secret, data, hashlib.sha512).hexdigest())
return {'requests': json.dumps(requests), 'signs': json.dumps(signatures)}, None
def validate_request(self, key, unit, data, signs):
orders = []
last_error = ""
requests = json.loads(data['requests'])
signs = json.loads(data['signs'])
if len(requests) != len(signs):
return {
'error': 'missmatch between requests and signatures (%d vs %d)' % (len(data['requests']), len(signs))}
if len(requests) > 2:
return {'error': 'too many requests received: %d' % len(requests)}
connection = httplib.HTTPSConnection('bittrex.com', timeout=5)
for data, sign in zip(requests, signs):
uuid = data.split('=')[-1]
if not uuid in self.closed:
headers = {'apisign': sign}
connection.request('GET', data, headers=headers)
response = json.loads(connection.getresponse().read())
if response['success']:
try:
opened = int(
datetime.datetime.strptime(response['result']['Opened'], '%Y-%m-%dT%H:%M:%S.%f').strftime(
"%s"))
except:
opened = 0
try:
closed = int(
datetime.datetime.strptime(response['result']['Closed'], '%Y-%m-%dT%H:%M:%S.%f').strftime(
"%s"))
except:
closed = sys.maxint
if closed < time.time() - 60:
self.closed.append(uuid)
orders.append({
'id': response['result']['OrderUuid'],
'price': response['result']['Limit'],
'type': 'ask' if 'SELL' in response['result']['Type'] else 'bid',
'amount': response['result']['QuantityRemaining'],
# if not closed == sys.maxint else response['result']['Quantity'],
'opened': opened,
'closed': closed,
})
else:
last_error = response['message']
if not orders and last_error != "":
return {'error': last_error}
return orders
class Poloniex(Exchange):
def __init__(self):
super(Poloniex, self).__init__(0.002)
def __repr__(self):
return "poloniex"
def adjust(self, error):
if "Nonce must be greater than" in error: # (TODO: regex)
if ':' in error: error = error.split(':')[1].strip()
error = error.replace('.', '').split()
self._shift += 100.0 + (int(error[5]) - int(error[8])) / 1000.0
else:
self._shift = self._shift + 100.0
def post(self, method, params, key, secret):
request = {'nonce': self.nonce(), 'command': method}
request.update(params)
data = urllib.urlencode(request)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
headers = {'Sign': sign, 'Key': key}
return json.loads(urllib2.urlopen(urllib2.Request('https://poloniex.com/tradingApi', data, headers)).read())
def cancel_orders(self, unit, side, key, secret):
response = self.post('returnOpenOrders', {'currencyPair': "%s_NBT" % unit.upper()}, key, secret)
if 'error' in response: return response
for order in response:
if side == 'all' or (side == 'bid' and order['type'] == 'buy') or (
side == 'ask' and order['type'] == 'sell'):
ret = self.post('cancelOrder',
{'currencyPair': "%s_NBT" % unit.upper(), 'orderNumber': order['orderNumber']}, key,
secret)
if 'error' in ret:
if isinstance(response, list): response = {'error': ""}
response['error'] += "," + ret['error']
return response
def place_order(self, unit, side, key, secret, amount, price):
params = {'currencyPair': "%s_NBT" % unit.upper(), "rate": price, "amount": amount}
response = self.post('buy' if side == 'bid' else 'sell', params, key, secret)
if not 'error' in response:
response['id'] = int(response['orderNumber'])
return response
def get_balance(self, unit, key, secret):
response = self.post('returnBalances', {}, key, secret)
if not 'error' in response:
response['balance'] = float(response[unit.upper()])
return response
def get_price(self, unit):
response = json.loads(urllib2.urlopen('https://poloniex.com/public?' +
urllib.urlencode({'command': 'returnOrderBook',
'currencyPair': "%s_NBT" % unit.upper(), 'depth': 1}),
timeout=5).read())
if not 'error' in response:
response.update({'bid': None, 'ask': None})
if response['bid']: response['bid'] = float(response['bid'][0])
if response['ask']: response['ask'] = float(response['ask'][0])
return response
def create_request(self, unit, key=None, secret=None):
if not secret: return None, None
request = {'command': 'returnOpenOrders', 'nonce': self.nonce(), 'currencyPair': "%s_NBT" % unit.upper()}
data = urllib.urlencode(request)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
return request, sign
def validate_request(self, key, unit, data, sign):
headers = {'Sign': sign, 'Key': key}
ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/tradingApi', urllib.urlencode(data), headers),
timeout=5)
response = json.loads(ret.read())
if 'error' in response: return response
return [{
'id': int(order['orderNumber']),
'price': float(order['rate']),
'type': 'ask' if order['type'] == 'sell' else 'bid',
'amount': float(order['amount']),
} for order in response]
class CCEDK(Exchange):
def __init__(self):
super(CCEDK, self).__init__(0.002)
self.pair_id = {}
self.currency_id = {}
failed = False
while not self.pair_id or not self.currency_id:
try:
response = None
if not self.pair_id:
url = 'https://www.ccedk.com/api/v1/stats/marketdepthfull'
response = json.loads(urllib2.urlopen(urllib2.Request(url), timeout=15).read())
for unit in response['response']['entities']:
if unit['pair_name'][:4] == 'nbt/':
self.pair_id[unit['pair_name'][4:]] = unit['pair_id']
if not self.currency_id:
url = 'https://www.ccedk.com/api/v1/currency/list'
response = json.loads(urllib2.urlopen(urllib2.Request(url), timeout=15).read())
for unit in response['response']['entities']:
self.currency_id[unit['iso'].lower()] = unit['currency_id']
except Exception as e:
if response and not response['response']:
self.adjust(",".join(response['errors'].values()))
if failed:
print >> sys.stderr, "could not retrieve ccedk ids, will adjust shift to", self._shift, \
"reason:", ",".join(response['errors'].values())
else:
print >> sys.stderr, "could not retrieve ccedk ids, server is unreachable", e
failed = True
time.sleep(1)
def __repr__(self):
return "ccedk"
def nonce(self, factor=1.0):
n = int(time.time() + self._shift)
if n == self._nonce:
n = self._nonce + 1
self._nonce = n
return n
def adjust(self, error):
if "incorrect range" in error: # (TODO: regex)
if ':' in error:
error = error.split(':')[1].strip()
try:
minimum = int(error.strip().split()[-3].replace('`', ''))
maximum = int(error.strip().split()[-1].replace('`', ''))
current = int(error.strip().split()[-7].split('`')[3])
except:
self._shift += random.randrange(-10, 10)
else:
if current < maximum:
new_shift = (minimum + 2 * maximum) / 3 - current
if new_shift < 0:
new_shift = 10
else:
new_shift = (2 * minimum + maximum) / 3 - current
if new_shift != 0:
self._shift += new_shift
else:
self._shift += random.randrange(-10, 10)
else:
self._shift += random.randrange(-10, 10)
def post(self, method, params, key, secret):
request = {'nonce': self.nonce()} # TODO: check for unique nonce
request.update(params)
data = urllib.urlencode(request)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
headers = {"Content-type": "application/x-www-form-urlencoded", "Key": key, "Sign": sign}
url = 'https://www.ccedk.com/api/v1/' + method
response = json.loads(urllib2.urlopen(urllib2.Request(url, data, headers), timeout=15).read())
if response['errors'] is True:
response['error'] = ",".join(response['errors'].values())
return response
def cancel_orders(self, unit, side, key, secret):
response = self.post('order/list', {}, key, secret)
if not response['response'] or not response['response']['entities']:
return response
for order in response['response']['entities']:
if side == 'all' \
or (side == 'bid' and order['type'] == 'buy') \
or (side == 'ask' and order['type'] == 'sell'):
if order['pair_id'] == self.pair_id[unit.lower()]:
ret = self.post('order/cancel', {'order_id': order['order_id']}, key, secret)
if ret['errors'] is True:
if 'error' not in response:
response['error'] = ""
response['error'] += ",".join(ret['errors'].values())
return response
def place_order(self, unit, side, key, secret, amount, price):
params = {"type": 'buy' if side == 'bid' else 'sell',
"price": price,
"pair_id": int(self.pair_id[unit.lower()]),
"amount": amount}
response = self.post('order/new', params, key, secret)
if response['errors'] is True:
response['error'] = ",".join(response['errors'].values())
else:
response['id'] = int(response['response']['entity']['order_id'])
return response
def get_balance(self, unit, key, secret):
params = {"currency_id": self.currency_id[unit.lower()]}
response = self.post('balance/info', params, key, secret)
if response['errors'] is True:
response['error'] = ",".join(response['errors'].values())
else:
response['balance'] = float(response['response']['entity']['balance'])
return response
def get_price(self, unit):
url = 'https://www.ccedk.com/api/v1/orderbook/info?' + urllib.urlencode({'pair_id': self.pair_id[unit.lower()]})
response = json.loads(urllib2.urlopen(urllib2.Request(url), timeout=5).read())
if response['errors'] is True:
response['error'] = ",".join(response['errors'].values())
return response
response.update({'bid': None, 'ask': None})
if response['response']['entities']['bids']:
response['bid'] = float(response['response']['entities']['bids'][0]['price'])
if response['response']['entities']['asks']:
response['ask'] = float(response['response']['entities']['asks'][0]['price'])
return response
def create_request(self, unit, key=None, secret=None):
if not secret:
return None, None
request = {'nonce': self.nonce()}
data = urllib.urlencode(request)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
return request, sign
def validate_request(self, key, unit, data, sign):
headers = {"Content-type": "application/x-www-form-urlencoded", "Key": key, "Sign": sign}
url = 'https://www.ccedk.com/api/v1/order/list'
response = json.loads(urllib2.urlopen(urllib2.Request(url, urllib.urlencode(data), headers), timeout=5).read())
if response['errors'] is True:
response['error'] = ",".join(response['errors'].values())
return response
if not response['response']['entities']:
response['response']['entities'] = []
return [{
'id': int(order['order_id']),
'price': float(order['price']),
'type': 'ask' if order['type'] == 'sell' else 'bid',
'amount': float(order['volume']),
} for order in response['response']['entities'] if order['pair_id'] == self.pair_id[unit.lower()]]
class BitcoinCoId(Exchange):
def __init__(self):
super(BitcoinCoId, self).__init__(0.0)
try:
ping = time.time()
response = json.loads(urllib2.urlopen(urllib2.Request('https://vip.bitcoin.co.id/api/summaries')).read())
self._shift = float(response['tickers']['btc_idr']['server_time']) - ping
except:
pass
def __repr__(self):
return "bitcoincoid"
def adjust(self, error):
if "Nonce must be greater than" in error: # (TODO: regex)
if ':' in error: error = error.split(':')[1].strip()
error = error.replace('.', '').split()
self._shift += 100.0 + (int(error[5]) - int(error[8])) / 1000.0
else:
self._shift = self._shift + 100.0
def nonce(self, factor=1000.0):
n = int((time.time() + self._shift) * float(factor))
if n - self._nonce < 300:
n = self._nonce + 300
self._nonce = n
return n
def post(self, method, params, key, secret):
request = {'nonce': self.nonce(), 'method': method}
request.update(params)
data = urllib.urlencode(request)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
headers = {'Sign': sign, 'Key': key}
response = json.loads(urllib2.urlopen(urllib2.Request('https://vip.bitcoin.co.id/tapi', data, headers)).read())
return response
def cancel_orders(self, unit, side, key, secret):
response = self.post('openOrders', {'pair': 'nbt_' + unit.lower()}, key, secret)
if response['success'] == 0 or not response['return']['orders']: return response
for order in response['return']['orders']:
if side == 'all' or (side == 'bid' and order['type'] == 'buy') or (
side == 'ask' and order['type'] == 'sell'):
params = {'pair': 'nbt_' + unit.lower(), 'order_id': order['order_id'], 'type': order['type']}
ret = self.post('cancelOrder', params, key, secret)
if 'error' in ret:
if not 'error' in response: response['error'] = ""
response['error'] += "," + ret['error']
return response
def place_order(self, unit, side, key, secret, amount, price):
params = {'pair': 'nbt_' + unit.lower(), 'type': 'buy' if side == 'bid' else 'sell', 'price': price}
if side == 'bid':
params[unit.lower()] = amount * price
else:
params['nbt'] = amount
params[unit] = amount * price
response = self.post('trade', params, key, secret)
if response['success'] == 1:
response['id'] = int(response['return']['order_id'])
return response
def get_balance(self, unit, key, secret):
response = self.post('getInfo', {}, key, secret)
if response['success'] == 1:
response['balance'] = float(response['return']['balance'][unit.lower()])
return response
def get_price(self, unit):
response = json.loads(
urllib2.urlopen(urllib2.Request('https://vip.bitcoin.co.id/api/nbt_%s/depth' % unit.lower()),
timeout=5).read())
if 'error' in response:
return response
response.update({'bid': None, 'ask': None})
if response['buy']: response['bid'] = float(response['buy'][0][0])
if response['sell']: response['ask'] = float(response['sell'][0][0])
return response
def create_request(self, unit, key=None, secret=None):
if not secret: return None, None
request = {'nonce': self.nonce(), 'pair': 'nbt_' + unit.lower(), 'method': 'openOrders'}
data = urllib.urlencode(request)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
return request, sign
def validate_request(self, key, unit, data, sign):
headers = {"Key": key, "Sign": sign}
response = json.loads(
urllib2.urlopen(urllib2.Request('https://vip.bitcoin.co.id/tapi', urllib.urlencode(data), headers),
timeout=5).read())
if response['success'] == 0:
return response
if not response['return']['orders']:
response['return']['orders'] = []
return [{
'id': int(order['order_id']),
'price': float(order['price']),
'type': 'ask' if order['type'] == 'sell' else 'bid',
'amount': float(order['remain_' + (unit.lower() if order['type'] == 'buy' else 'nbt')]) / (
float(order['price']) if order['type'] == 'buy' else 1.0),
} for order in response['return']['orders']]
class BTER(Exchange):
def __init__(self):
super(BTER, self).__init__(0.002)
def __repr__(self):
return "bter"
def adjust(self, error):
pass
def https_request(self, method, params, headers=None, timeout=None):
if not headers: headers = {}
connection = httplib.HTTPSConnection('data.bter.com', timeout=timeout)
connection.request('POST', '/api/1/private/' + method, params, headers)
response = connection.getresponse().read()
return json.loads(response)
def post(self, method, params, key, secret):
data = urllib.urlencode(params)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
headers = {'Sign': sign, 'Key': key, "Content-type": "application/x-www-form-urlencoded"}
return self.https_request(method, data, headers)
def cancel_orders(self, unit, side, key, secret):
response = self.post('orderlist', {}, key, secret)
if not response['result']:
response['error'] = response['msg']
return response
if not response['orders']: response['orders'] = []
for order in response['orders']:
if side == 'all' or (side == 'ask' and order['sell_type'] != unit) or (
side == 'bid' and order['buy_type'] != unit):
if order['pair'] == 'nbt_' + unit.lower():
params = {'order_id': order['oid']}
ret = self.post('cancelorder', params, key, secret)
if not ret['result']:
if not 'error' in response: response['error'] = ""
response['error'] += "," + ret['msg']
return response
def place_order(self, unit, side, key, secret, amount, price):
params = {'pair': 'nbt_' + unit.lower(), 'type': 'buy' if side == 'bid' else 'sell', 'rate': price,
'amount': amount}
response = self.post('placeorder', params, key, secret)
if response['result']:
response['id'] = int(response['order_id'])
else:
response['error'] = response['msg']
return response
def get_balance(self, unit, key, secret):
response = self.post('getfunds', {}, key, secret)
if response['result']:
if unit.upper() in response['available_funds']:
response['balance'] = float(response['available_funds'][unit.upper()])
else:
response['balance'] = 0.0
else:
response['error'] = response['msg']
return response
def get_price(self, unit):
connection = httplib.HTTPSConnection('data.bter.com', timeout=5)
connection.request('GET', '/api/1/depth/nbt_' + unit.lower())
response = json.loads(connection.getresponse().read())
if not 'result' in response or not response['result']:
response['error'] = response['msg'] if 'msg' in response else 'invalid response: %s' % str(response)
return response
response.update({'bid': None, 'ask': None})
if response['bids']: response['bid'] = float(response['bids'][0][0])
if response['asks']: response['ask'] = float(response['asks'][-1][0])
return response
def create_request(self, unit, key=None, secret=None):
if not secret: return None, None
request = {} # no nonce required
data = urllib.urlencode(request)
sign = hmac.new(secret, data, hashlib.sha512).hexdigest()
return request, sign
def validate_request(self, key, unit, data, sign):
headers = {'Sign': sign, 'Key': key, "Content-type": "application/x-www-form-urlencoded"}
response = self.https_request('orderlist', urllib.urlencode(data), headers, timeout=15)
if not 'result' in response or not response['result']:
response['error'] = response['msg'] if 'msg' in response else 'invalid response: %s' % str(response)
return response
if not response['orders']:
response['orders'] = []
return [{
'id': int(order['oid']),
'price': float(order['rate']),
'type': 'ask' if order['buy_type'].lower() == unit.lower() else 'bid',
'amount': float(order['amount']) / (
1.0 if order['buy_type'].lower() == unit.lower() else float(order['rate'])),
} for order in response['orders'] if order['pair'] == 'nbt_' + unit.lower()]
class Peatio(Exchange):
def __init__(self):
super(Peatio, self).__init__(0.002)
def __repr__(self):
return "testing"
def adjust(self, error):
if "is invalid, current timestamp is" in error:
try:
tonce = int(error.split()[2])
times = int(error.split()[-1].replace('.', ''))
self._shift = int(float(times - tonce) / 1000.0)
except:
print error
pass
else:
print error
def urlencode(self, params): # from https://github.com/JohnnyZhao/peatio-client-python/blob/master/lib/auth.py#L11
keys = sorted(params.keys())
query = ''
for key in keys:
value = params[key]
if key != "orders":
query = "%s&%s=%s" % (query, key, value) if len(query) else "%s=%s" % (key, value)
else:
d = {key: params[key]}
for v in value:
ks = v.keys()
ks.sort()
for k in ks:
item = "orders[][%s]=%s" % (k, v[k])
query = "%s&%s" % (query, item) if len(query) else "%s" % item
return query
def query(self, qtype, method, params, key, secret):
request = {'tonce': self.nonce(), 'access_key': key}
request.update(params)
data = self.urlencode(request)
msg = "%s|/api/v2/%s|%s" % (qtype, method, data)
data += "&signature=" + hmac.new(secret, msg, hashlib.sha256).hexdigest()
connection = httplib.HTTPSConnection('178.62.140.24', timeout=5)
connection.request(qtype, '/api/v2/' + method + '?' + data)
return json.loads(connection.getresponse().read())
def post(self, method, params, key, secret):
return self.query('POST', method, params, key, secret)
def get(self, method, params, key, secret):
return self.query('GET', method, params, key, secret)
def cancel_orders(self, unit, side, key, secret):
response = self.get('orders.json', {'market': "nbt%s" % unit.lower()}, key, secret)
if 'error' in response:
response['error'] = response['error']['message']
return response
for order in response:
if side == 'all' or (side == 'bid' and order['side'] == 'buy') or (
side == 'ask' and order['side'] == 'sell'):
ret = self.post('order/delete.json', {'id': order['id']}, key, secret)
if 'error' in ret:
if isinstance(response, list): response = {'error': ""}
response['error'] += "," + ret['error']['message']
return response
def place_order(self, unit, side, key, secret, amount, price):
params = {'market': "nbt%s" % unit.lower(), "side": 'buy' if side == 'bid' else 'sell', "volume": amount,
"price": price}
response = self.post('orders', params, key, secret)
if 'error' in response:
response['error'] = response['error']['message']
else:
response['id'] = int(response['id'])
return response
def get_balance(self, unit, key, secret):
response = self.get('members/me.json', {}, key, secret)
if 'error' in response:
response['error'] = response['error']['message']
else:
response['balance'] = 0.0
for pair in response['accounts']:
if pair['currency'] == unit.lower():
response['balance'] = float(pair['balance'])
return response
def get_price(self, unit):
connection = httplib.HTTPSConnection('178.62.140.24', timeout=15)
connection.request('GET',
'/api/v2/depth.json?' + self.urlencode({'market': "nbt%s" % unit.lower(), 'limit': 1}))
response = json.loads(connection.getresponse().read())
if 'error' in response:
response['error'] = response['error']['message']
return response
response.update({'bid': None, 'ask': None})
if response['bids']: response['bid'] = float(response['bids'][0][0])
if response['asks']: response['ask'] = float(response['asks'][-1][0])
return response
def create_request(self, unit, key=None, secret=None):
if not secret: return None, None
request = {'tonce': self.nonce(), 'access_key': key, 'market': "nbt%s" % unit.lower()}
data = self.urlencode(request)
msg = "GET|/api/v2/orders.json|%s" % data
request['signature'] = hmac.new(secret, msg, hashlib.sha256).hexdigest()
return request, ''
def validate_request(self, key, unit, data, sign):
if not 'market' in data or data['market'] != "nbt%s" % unit.lower():
return {'error': 'invalid market'}
connection = httplib.HTTPSConnection('178.62.140.24', timeout=15)
connection.request('GET', '/api/v2/orders.json?' + self.urlencode(data))
response = json.loads(connection.getresponse().read())
if 'error' in response:
response['error'] = response['error']['message']
return response
return [{
'id': int(order['id']),
'price': float(order['price']),
'type': 'ask' if order['side'] == 'sell' else 'bid',
'amount': float(order['remaining_volume']),
} for order in response]
|
inuitwallet/plunge
|
client/exchanges.py
|
Python
|
mit
| 35,228
|
from tensorflow.keras import backend as K
abs_definitions = [
{'name': 'add_class',
'nargs': '+',
'type': int,
'help': 'flag to add abstention (per task)'},
{'name': 'alpha',
'nargs': '+',
'type': float,
'help': 'abstention penalty coefficient (per task)'},
{'name': 'min_acc',
'nargs': '+',
'type': float,
'help': 'minimum accuracy required (per task)'},
{'name': 'max_abs',
'nargs': '+',
'type': float,
'help': 'maximum abstention fraction allowed (per task)'},
{'name': 'alpha_scale_factor',
'nargs': '+',
'type': float,
'help': 'scaling factor for modifying alpha (per task)'},
{'name': 'init_abs_epoch',
'action': 'store',
'type': int,
'help': 'number of epochs to skip before modifying alpha'},
{'name': 'n_iters',
'action': 'store',
'type': int,
'help': 'number of iterations to iterate alpha'},
{'name': 'acc_gain',
'type': float,
'default': 5.0,
'help': 'factor to weight accuracy when determining new alpha scale'},
{'name': 'abs_gain',
'type': float,
'default': 1.0,
'help': 'factor to weight abstention fraction when determining new alpha scale'},
{'name': 'task_list',
'nargs': '+',
'type': int,
'help': 'list of task indices to use'},
{'name': 'task_names',
'nargs': '+',
'type': int,
'help': 'list of names corresponding to each task to use'},
]
def adjust_alpha(gParameters, X_test, truths_test, labels_val, model, alpha, add_index):
task_names = gParameters['task_names']
task_list = gParameters['task_list']
# retrieve truth-pred pair
avg_loss = 0.0
ret = []
ret_k = []
# set abstaining classifier parameters
max_abs = gParameters['max_abs']
min_acc = gParameters['min_acc']
alpha_scale_factor = gParameters['alpha_scale_factor']
# print('labels_test', labels_test)
# print('Add_index', add_index)
feature_test = X_test
# label_test = keras.utils.to_categorical(truths_test)
# loss = model.evaluate(feature_test, [label_test[0], label_test[1],label_test[2], label_test[3]])
loss = model.evaluate(feature_test, labels_val)
avg_loss = avg_loss + loss[0]
pred = model.predict(feature_test)
# print('pred',pred.shape, pred)
abs_gain = gParameters['abs_gain']
acc_gain = gParameters['acc_gain']
accs = []
abst = []
for k in range((alpha.shape[0])):
if k in task_list:
truth_test = truths_test[:, k]
alpha_k = K.eval(alpha[k])
pred_classes = pred[k].argmax(axis=-1)
# true_classes = labels_test[k].argmax(axis=-1)
true_classes = truth_test
# print('pred_classes',pred_classes.shape, pred_classes)
# print('true_classes',true_classes.shape, true_classes)
# print('labels',label_test.shape, label_test)
true = K.eval(K.sum(K.cast(K.equal(pred_classes, true_classes), 'int64')))
false = K.eval(K.sum(K.cast(K.not_equal(pred_classes, true_classes), 'int64')))
abstain = K.eval(K.sum(K.cast(K.equal(pred_classes, add_index[k] - 1), 'int64')))
print(true, false, abstain)
total = false + true
tot_pred = total - abstain
abs_acc = 0.0
abs_frac = abstain / total
if tot_pred > 0:
abs_acc = true / tot_pred
scale_k = alpha_scale_factor[k]
min_scale = scale_k
max_scale = 1. / scale_k
acc_error = abs_acc - min_acc[k]
acc_error = min(acc_error, 0.0)
abs_error = abs_frac - max_abs[k]
abs_error = max(abs_error, 0.0)
new_scale = 1.0 + acc_gain * acc_error + abs_gain * abs_error
# threshold to avoid huge swings
new_scale = min(new_scale, max_scale)
new_scale = max(new_scale, min_scale)
print('Scaling factor: ', new_scale)
K.set_value(alpha[k], new_scale * alpha_k)
print_abs_stats(task_names[k], new_scale * alpha_k, true, false, abstain, max_abs[k])
ret_k.append(truth_test)
ret_k.append(pred)
ret.append(ret_k)
accs.append(abs_acc)
abst.append(abs_frac)
else:
accs.append(1.0)
accs.append(0.0)
write_abs_stats(gParameters['output_dir'] + 'abs_stats.csv', alpha, accs, abst)
return ret, alpha
def loss_param(alpha, mask):
def loss(y_true, y_pred):
cost = 0
base_pred = (1 - mask) * y_pred
# base_true = (1 - mask) * y_true
base_true = y_true
base_cost = K.sparse_categorical_crossentropy(base_true, base_pred)
abs_pred = K.mean(mask * (y_pred), axis=-1)
# add some small value to prevent NaN when prediction is abstained
abs_pred = K.clip(abs_pred, K.epsilon(), 1. - K.epsilon())
cost = (1. - abs_pred) * base_cost - (alpha) * K.log(1. - abs_pred)
return cost
return loss
def print_abs_stats(
task_name,
alpha,
num_true,
num_false,
num_abstain,
max_abs):
# Compute interesting values
total = num_true + num_false
tot_pred = total - num_abstain
abs_frac = num_abstain / total
abs_acc = 1.0
if tot_pred > 0:
abs_acc = num_true / tot_pred
print(' task, alpha, true, false, abstain, total, tot_pred, abs_frac, max_abs, abs_acc')
print('{:>12s}, {:10.5e}, {:8d}, {:8d}, {:8d}, {:8d}, {:8d}, {:10.5f}, {:10.5f}, {:10.5f}'
.format(task_name, alpha,
num_true, num_false - num_abstain, num_abstain, total,
tot_pred, abs_frac, max_abs, abs_acc))
def write_abs_stats(stats_file, alphas, accs, abst):
# Open file for appending
abs_file = open(stats_file, 'a')
# we write all the results
for k in range((alphas.shape[0])):
abs_file.write("%10.5e," % K.get_value(alphas[k]))
for k in range((alphas.shape[0])):
abs_file.write("%10.5e," % accs[k])
for k in range((alphas.shape[0])):
abs_file.write("%10.5e," % abst[k])
abs_file.write("\n")
|
ECP-CANDLE/Benchmarks
|
Pilot1/NT3/abstain_functions.py
|
Python
|
mit
| 6,278
|
nd<caret>
|
siosio/intellij-community
|
python/testData/completion/className/pythonSkeletonsVariantsNotSuggested/pythonSkeletonsVariantsNotSuggested.py
|
Python
|
apache-2.0
| 10
|
import requests
import time
while 1:
r = requests.put("http://localhost:3000/api/4", data={"temperature": 24, "led": 1})
print r.text
time.sleep(1)
|
phodal/iot-code
|
chapter5/test-post.py
|
Python
|
mit
| 160
|
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
token = client.tokens.create(ttl=3600)
print(token.username)
|
teoreteetik/api-snippets
|
rest/token/list-post-1-hour-example/list-post-1-hour-example.6.x.py
|
Python
|
mit
| 354
|
# coding: utf-8
from collections import namedtuple
from pandas.io.msgpack.exceptions import * # noqa
from pandas.io.msgpack._version import version # noqa
class ExtType(namedtuple("ExtType", "code data")):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
raise ValueError("code must be 0~127")
return super().__new__(cls, code, data)
import os # noqa
from pandas.io.msgpack._packer import Packer # noqa
from pandas.io.msgpack._unpacker import unpack, unpackb, Unpacker # noqa
def pack(o, stream, **kwargs):
"""
Pack object `o` and write it to `stream`
See :class:`Packer` for options.
"""
packer = Packer(**kwargs)
stream.write(packer.pack(o))
def packb(o, **kwargs):
"""
Pack object `o` and return packed bytes
See :class:`Packer` for options.
"""
return Packer(**kwargs).pack(o)
# alias for compatibility to simplejson/marshal/pickle.
load = unpack
loads = unpackb
dump = pack
dumps = packb
|
toobaz/pandas
|
pandas/io/msgpack/__init__.py
|
Python
|
bsd-3-clause
| 1,223
|
from __future__ import absolute_import
import operator
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_delete, post_save
from django.utils import timezone
from sentry.db.models import Model, sane_repr
from sentry.db.models.fields import FlexibleForeignKey, JSONField
from sentry.ownership.grammar import load_schema
from sentry.utils.cache import cache
from functools import reduce
READ_CACHE_DURATION = 3600
class ProjectOwnership(Model):
__core__ = True
project = FlexibleForeignKey("sentry.Project", unique=True)
raw = models.TextField(null=True)
schema = JSONField(null=True)
fallthrough = models.BooleanField(default=True)
auto_assignment = models.BooleanField(default=False)
date_created = models.DateTimeField(default=timezone.now)
last_updated = models.DateTimeField(default=timezone.now)
is_active = models.BooleanField(default=True)
# An object to indicate ownership is implicitly everyone
Everyone = object()
class Meta:
app_label = "sentry"
db_table = "sentry_projectownership"
__repr__ = sane_repr("project_id", "is_active")
@classmethod
def get_cache_key(self, project_id):
return u"projectownership_project_id:1:{}".format(project_id)
@classmethod
def get_ownership_cached(cls, project_id):
"""
Cached read access to projectownership.
This method implements a negative cache which saves us
a pile of read queries in post_processing as most projects
don't have ownership rules.
See the post_save and post_delete signals below for additional
cache updates.
"""
cache_key = cls.get_cache_key(project_id)
ownership = cache.get(cache_key)
if ownership is None:
try:
ownership = cls.objects.get(project_id=project_id)
except cls.DoesNotExist:
ownership = False
cache.set(cache_key, ownership, READ_CACHE_DURATION)
return ownership or None
@classmethod
def get_owners(cls, project_id, data):
"""
For a given project_id, and event data blob.
If Everyone is returned, this means we implicitly are
falling through our rules and everyone is responsible.
If an empty list is returned, this means there are explicitly
no owners.
"""
ownership = cls.get_ownership_cached(project_id)
if not ownership:
ownership = cls(project_id=project_id)
rules = cls._matching_ownership_rules(ownership, project_id, data)
if not rules:
return cls.Everyone if ownership.fallthrough else [], None
owners = {o for rule in rules for o in rule.owners}
owners_to_actors = resolve_actors(owners, project_id)
ordered_actors = []
for rule in rules:
for o in rule.owners:
if o in owners and owners_to_actors.get(o) is not None:
ordered_actors.append(owners_to_actors[o])
owners.remove(o)
return ordered_actors, rules
@classmethod
def get_autoassign_owner(cls, project_id, data):
"""
Get the auto-assign owner for a project if there are any.
Will return None if there are no owners, or a list of owners.
"""
ownership = cls.get_ownership_cached(project_id)
if not ownership or not ownership.auto_assignment:
return None
rules = cls._matching_ownership_rules(ownership, project_id, data)
if not rules:
return None
score = 0
owners = None
# Automatic assignment prefers the owner with the longest
# matching pattern as the match is more specific.
for rule in rules:
candidate = len(rule.matcher.pattern)
if candidate > score:
score = candidate
owners = rule.owners
actors = [_f for _f in resolve_actors(owners, project_id).values() if _f]
# Can happen if the ownership rule references a user/team that no longer
# is assigned to the project or has been removed from the org.
if not actors:
return None
return actors[0].resolve()
@classmethod
def _matching_ownership_rules(cls, ownership, project_id, data):
rules = []
if ownership.schema is not None:
for rule in load_schema(ownership.schema):
if rule.test(data):
rules.append(rule)
return rules
def resolve_actors(owners, project_id):
""" Convert a list of Owner objects into a dictionary
of {Owner: Actor} pairs. Actors not identified are returned
as None. """
from sentry.api.fields.actor import Actor
from sentry.models import User, Team
if not owners:
return {}
users, teams = [], []
owners_lookup = {}
for owner in owners:
# teams aren't technical case insensitive, but teams also
# aren't allowed to have non-lowercase in slugs, so
# this kinda works itself out correctly since they won't match
owners_lookup[(owner.type, owner.identifier.lower())] = owner
if owner.type == "user":
users.append(owner)
elif owner.type == "team":
teams.append(owner)
actors = {}
if users:
actors.update(
{
("user", email.lower()): Actor(u_id, User)
for u_id, email in User.objects.filter(
reduce(operator.or_, [Q(emails__email__iexact=o.identifier) for o in users]),
# We don't require verified emails
# emails__is_verified=True,
is_active=True,
sentry_orgmember_set__organizationmemberteam__team__projectteam__project_id=project_id,
)
.distinct()
.values_list("id", "emails__email")
}
)
if teams:
actors.update(
{
("team", slug): Actor(t_id, Team)
for t_id, slug in Team.objects.filter(
slug__in=[o.identifier for o in teams], projectteam__project_id=project_id
).values_list("id", "slug")
}
)
return {o: actors.get((o.type, o.identifier.lower())) for o in owners}
# Signals update the cached reads used in post_processing
post_save.connect(
lambda instance, **kwargs: cache.set(
ProjectOwnership.get_cache_key(instance.project_id), instance, READ_CACHE_DURATION
),
sender=ProjectOwnership,
weak=False,
)
post_delete.connect(
lambda instance, **kwargs: cache.set(
ProjectOwnership.get_cache_key(instance.project_id), False, READ_CACHE_DURATION
),
sender=ProjectOwnership,
weak=False,
)
|
beeftornado/sentry
|
src/sentry/models/projectownership.py
|
Python
|
bsd-3-clause
| 6,907
|
"""
Description of the video:
Mimic of Star Wars' opening title. A text with a (false)
perspective effect goes towards the end of space, on a
background made of stars. Slight fading effect on the text.
"""
import numpy as np
from skimage import transform as tf
from moviepy.editor import *
from moviepy.video.tools.drawing import color_gradient
# RESOLUTION
w = 720
h = w*9/16 # 16/9 screen
moviesize = w,h
# THE RAW TEXT
txt = "\n".join([
"A long time ago, in a faraway galaxy,",
"there lived a prince and a princess",
"who had never seen the stars, for they",
"lived deep underground.",
"",
"Many years before, the prince's",
"grandfather had ventured out to the",
"surface and had been burnt to ashes by",
"solar winds.",
"",
"One day, as the princess was coding",
"and the prince was shopping online, a",
"meteor landed just a few megameters",
"from the couple's flat."
])
# Add blanks
txt = 10*"\n" +txt + 10*"\n"
# CREATE THE TEXT IMAGE
clip_txt = TextClip(txt,color='white', align='West',fontsize=25,
font='Xolonium-Bold', method='label')
# SCROLL THE TEXT IMAGE BY CROPPING A MOVING AREA
txt_speed = 27
fl = lambda gf,t : gf(t)[int(txt_speed*t):int(txt_speed*t)+h,:]
moving_txt= clip_txt.fl(fl, apply_to=['mask'])
# ADD A VANISHING EFFECT ON THE TEXT WITH A GRADIENT MASK
grad = color_gradient(moving_txt.size,p1=(0,2*h/3),
p2=(0,h/4),col1=0.0,col2=1.0)
gradmask = ImageClip(grad,ismask=True)
fl = lambda pic : np.minimum(pic,gradmask.img)
moving_txt.mask = moving_txt.mask.fl_image(fl)
# WARP THE TEXT INTO A TRAPEZOID (PERSPECTIVE EFFECT)
def trapzWarp(pic,cx,cy,ismask=False):
""" Complicated function (will be latex packaged as a fx) """
Y,X = pic.shape[:2]
src = np.array([[0,0],[X,0],[X,Y],[0,Y]])
dst = np.array([[cx*X,cy*Y],[(1-cx)*X,cy*Y],[X,Y],[0,Y]])
tform = tf.ProjectiveTransform()
tform.estimate(src,dst)
im = tf.warp(pic, tform.inverse, output_shape=(Y,X))
return im if ismask else (im*255).astype('uint8')
fl_im = lambda pic : trapzWarp(pic,0.2,0.3)
fl_mask = lambda pic : trapzWarp(pic,0.2,0.3, ismask=True)
warped_txt= moving_txt.fl_image(fl_im)
warped_txt.mask = warped_txt.mask.fl_image(fl_mask)
# BACKGROUND IMAGE, DARKENED AT 60%
stars = ImageClip('../../videos/stars.jpg')
stars_darkened = stars.fl_image(lambda pic: (0.6*pic).astype('int16'))
# COMPOSE THE MOVIE
final = CompositeVideoClip([
stars, warped_txt.set_pos(('center','bottom'))],
size = moviesize)
# WRITE TO A FILE
final.set_duration(8).write_videofile("starworms.avi", fps=5)
# This script is heavy (30s of computations to render 8s of video)
"""=====================================================================
CODE FOR THE VIDEO TUTORIAL
We will now code the video tutorial for this video.
When you think about it, it is a code for a video explaining how to
make another video using some code (this is so meta !).
This code uses the variables of the previous code (it should be placed
after that previous code to work).
====================================================================="""
def annotate(clip,txt,txt_color='white',bg_color=(0,0,255)):
""" Writes a text at the bottom of the clip. """
txtclip = TextClip(txt, fontsize=20, font='Ubuntu-bold',
color=txt_color)
txtclip = txtclip.on_color((clip.w,txtclip.h+6), color=(0,0,255),
pos=(6,'center'))
cvc = CompositeVideoClip([clip , txtclip.set_pos((0,'bottom'))])
return cvc.set_duration(clip.duration)
def resizeCenter(clip):
return clip.resize( height=h).set_pos('center')
def composeCenter(clip):
return CompositeVideoClip([clip.set_pos('center')],size=moviesize)
annotated_clips = [ annotate(clip,text) for clip,text in [
(composeCenter(resizeCenter(stars)).subclip(0,3),
"This is a public domain picture of stars"),
(CompositeVideoClip([stars],moviesize).subclip(0,3),
"We only keep one part."),
(CompositeVideoClip([stars_darkened],moviesize).subclip(0,3),
"We darken it a little."),
(composeCenter(resizeCenter(clip_txt)).subclip(0,3),
"We generate a text image."),
(composeCenter(moving_txt.set_mask(None)).subclip(6,9),
"We scroll the text by cropping a moving region of it."),
(composeCenter(gradmask.to_RGB()).subclip(0,2),
"We add this mask to the clip."),
(composeCenter(moving_txt).subclip(6,9),
"Here is the result"),
(composeCenter(warped_txt).subclip(6,9),
"We now warp this clip in a trapezoid."),
(final.subclip(6,9),
"We finally superimpose with the stars.")
]]
# Concatenate and write to a file
concatenate(annotated_clips).write_videofile('tutorial.avi', fps=5)
|
DevinGeo/moviepy
|
examples/star_worms.py
|
Python
|
mit
| 4,800
|
# yaranullin/game/tmx_wrapper.py
#
# Copyright (c) 2012 Marco Scopesi <marco.scopesi@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import os
from xml.etree import ElementTree
from yaranullin.config import YR_SAVE_DIR
from yaranullin.event_system import post, connect
class ParseError(SyntaxError):
''' Error parsing tmx file '''
def _get_object_layer(tag, layer_name):
''' Get a layer of a tmx map '''
for objectgroup in tag.findall('objectgroup'):
if objectgroup.attrib['name'] == layer_name:
return objectgroup
def _get_property(tag, name):
''' Get a property of a tmx map '''
properties = tag.find('properties')
if properties is None:
raise KeyError("No child element 'properties' inside tag '%s'" %
repr(tag))
for prop in properties.findall('property'):
if prop.attrib['name'] == name:
return prop.attrib['value']
raise KeyError("Property '%s' is not available" % name)
def _set_property(tag, name, value):
''' Set a property of a tmx map '''
properties = tag.find('properties')
if not properties:
properties = ElementTree.Element('properties')
tag.append(properties)
properties.append(ElementTree.Element('property', name=name, value=value))
class TmxWrapper(object):
def __init__(self):
self._maps = {}
connect('game-event-pawn-moved', self.move_pawn)
def move_pawn(self, event_dict):
''' Change pawn position '''
bname = event_dict['bname']
pname = event_dict['pname']
pos = event_dict['pos']
try:
xml_board = self._maps[bname]
except KeyError:
# The board was not loaded from a file
return
pawn_layer = _get_object_layer(xml_board, 'pawns')
for pawn in pawn_layer.findall('object'):
name = pawn.attrib['name']
if name == pname:
pawn.attrib['x'] = pos[0]
pawn.attrib['y'] = pos[1]
def load_board_from_file(self, fname):
''' Load and return a board from a tmx file '''
complete_path = os.path.join(YR_SAVE_DIR, fname)
with open(complete_path) as tmx_file:
tmx_map = tmx_file.read()
bname = os.path.splitext(os.path.basename(fname))[0]
self.load_board_from_tmx(bname, tmx_map)
def load_board_from_tmx(self, bname, tmx_map):
''' Load and return a board from a string '''
try:
tmx_map = ElementTree.fromstring(tmx_map)
except:
raise ParseError("Error parsing '%s'" % bname)
events = []
# Save basic board attribute
size = int(tmx_map.attrib['width']), int(tmx_map.attrib['height'])
tilewidth = int(tmx_map.attrib['tilewidth'])
if tilewidth != int(tmx_map.attrib['tileheight']):
raise ParseError("tilewidth != tileheight: tiles must be square")
# Append a new board event
board_event = ('game-request-board-new', dict(name=bname, size=size))
events.append(board_event)
# Find pawn object group
pawn_layer = _get_object_layer(tmx_map, 'pawns')
if pawn_layer is not None:
for pawn in pawn_layer.findall('object'):
name = pawn.attrib['name']
# Minimum width and height must be 1
size = (max(int(pawn.attrib['width']) // tilewidth, 1),
max(int(pawn.attrib['height']) // tilewidth, 1))
pos = (int(pawn.attrib['x']) // tilewidth,
int(pawn.attrib['y']) // tilewidth)
try:
initiative = int(_get_property(pawn, 'initiative'))
except KeyError:
raise ParseError("Error parsing pawn '%s': missing "
"initiative value" % name)
new_pawn_event = ('game-request-pawn-new', dict(bname=bname,
pname=name, initiative=initiative, pos=pos,
size=size))
events.append(new_pawn_event)
# Now add the board to _maps
self._maps[bname] = tmx_map
for event in events:
post(event[0], event[1])
def get_tmx_board(self, bname):
''' Return an tmx version of board '''
if bname in self._maps:
return ElementTree.tostring(self._maps[bname])
|
ciappi/Yaranullin
|
yaranullin/game/tmx_wrapper.py
|
Python
|
isc
| 5,096
|
RERUN_ERRORS = [
"can't receive further commands",
'Original error: Error: ESOCKETTIMEDOUT',
"The server didn't respond in time.",
'An unknown server-side error occurred while processing the command.',
'Could not proxy command to remote server. Original error: Error: socket hang up',
'The server returned an invalid or incomplete response.',
'502 Bad Gateway',
'Unexpected server error',
'504 Gateway Time-out',
'Internal Server Error',
'failed to start the browser or device',
'ERROR The test with session id',
"503 Service Unavailable",
"object has no attribute",
"[Errno 104] Connection reset by peer",
"Sauce could not start your job",
"HTTP Error 303",
"http.client.RemoteDisconnected: Remote end closed connection without response",
"[Errno 110] Connection timed out",
"replacement transaction underpriced",
"StaleElementReferenceException",
"'GetStartedButton' is not found on the screen",
"'AccessKeyButton' is not found on the screen",
"'SignInPhraseText' is not found on the screen"
]
def should_rerun_test(test_error):
for rerun_error in RERUN_ERRORS:
if rerun_error in test_error:
return True
return False
|
status-im/status-react
|
test/appium/support/test_rerun.py
|
Python
|
mpl-2.0
| 1,245
|
# wiener.py - functions related to the Wiener index of a graph
#
# Copyright 2015 NetworkX developers.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""Functions related to the Wiener index of a graph."""
from __future__ import division
from itertools import chain
from .components import is_connected
from .components import is_strongly_connected
from .shortest_paths import shortest_path_length as spl
__all__ = ['wiener_index']
#: Rename the :func:`chain.from_iterable` function for the sake of
#: brevity.
chaini = chain.from_iterable
def wiener_index(G, weight=None):
"""Returns the Wiener index of the given graph.
The *Wiener index* of a graph is the sum of the shortest-path
distances between each pair of reachable nodes. For pairs of nodes
in undirected graphs, only one orientation of the pair is counted.
Parameters
----------
G : NetworkX graph
weight : object
The edge attribute to use as distance when computing
shortest-path distances. This is passed directly to the
:func:`networkx.shortest_path_length` function.
Returns
-------
float
The Wiener index of the graph `G`.
Raises
------
NetworkXError
If the graph `G` is not connected.
Notes
-----
If a pair of nodes is not reachable, the distance is assumed to be
infinity. This means that for graphs that are not
strongly-connected, this function returns ``inf``.
The Wiener index is not usually defined for directed graphs, however
this function uses the natural generalization of the Wiener index to
directed graphs.
Examples
--------
The Wiener index of the (unweighted) complete graph on *n* nodes
equals the number of pairs of the *n* nodes, since each pair of
nodes is at distance one::
>>> import networkx as nx
>>> n = 10
>>> G = nx.complete_graph(n)
>>> nx.wiener_index(G) == n * (n - 1) / 2
True
Graphs that are not strongly-connected have infinite Wiener index::
>>> G = nx.empty_graph(2)
>>> nx.wiener_index(G)
inf
"""
is_directed = G.is_directed()
if (is_directed and not is_strongly_connected(G)) or \
(not is_directed and not is_connected(G)):
return float('inf')
total = sum(chaini(p.values() for v, p in spl(G, weight=weight)))
# Need to account for double counting pairs of nodes in undirected graphs.
return total if is_directed else total / 2
|
cmtm/networkx
|
networkx/algorithms/wiener.py
|
Python
|
bsd-3-clause
| 2,586
|
from django.conf import settings
from django.conf.urls import patterns, url
from django.core.exceptions import ImproperlyConfigured
from django.db.models.loading import get_models
extra_views_available = True
try:
from extra_views import InlineFormSet
except ImportError:
extra_views_available = False
from .forms import get_form_class
from .models import EasyCrudModel
from .views import ListView, CreateView, DetailView, UpdateView, DeleteView, EasyCrudFormsetMixin
if extra_views_available:
from .views import CreateWithInlinesView, UpdateWithInlinesView
from .utils import get_model_by_name
def easycrud_urlpatterns():
model_list = [m for m in get_models() if issubclass(m, EasyCrudModel)]
pattern_list = []
for model in model_list:
name = model.model_name.replace(' ', '')
url_list = []
url_list.append(url('^%s/$' % name, ListView.as_view(model=model), name='%s_list' % name))
url_list.append(url('^%s/(?P<pk>\d+)/$' % name, DetailView.as_view(model=model), name='%s_detail' % name))
if model._easycrud_meta.inline_models:
if 'dynamic_formset' not in settings.INSTALLED_APPS:
raise ImproperlyConfigured('The dynamic-formset app needs to be installed to use inline models')
if not extra_views_available:
raise ImproperlyConfigured('The extra-views app needs to be available to use inline models')
inlines = []
for inline in model._easycrud_meta.inline_models:
if isinstance(inline, dict):
model_name = inline['model']
attrs = inline.copy()
if 'form_class' in attrs and isinstance(attrs['form_class'], basestring):
attrs['form_class'] = get_form_class(attrs['form_class'])
else:
model_name = inline
attrs = {}
attrs['model'] = get_model_by_name(model_name)
attrs['extra'] = 0
inlines.append(type(model_name + 'Inline', (EasyCrudFormsetMixin, InlineFormSet), attrs))
if model.has_create:
url_list.append(url('^%s/create/$' % name, CreateWithInlinesView.as_view(model=model, inlines=inlines), name='%s_create' % name))
if model.has_update:
url_list.append(url('^%s/(?P<pk>\d+)/update/$' % name, UpdateWithInlinesView.as_view(model=model, inlines=inlines), name='%s_update' % name))
else:
if model.has_create:
url_list.append(url('^%s/create/$' % name, CreateView.as_view(model=model), name='%s_create' % name))
if model.has_update:
url_list.append(url('^%s/(?P<pk>\d+)/update/$' % name, UpdateView.as_view(model=model), name='%s_update' % name))
if model.has_delete:
url_list.append(url('^%s/(?P<pk>\d+)/delete/$' % name, DeleteView.as_view(model=model), name='%s_delete' % name))
pattern_list += patterns('', *url_list)
return pattern_list
|
dekkers/django-easycrud
|
easycrud/urls.py
|
Python
|
bsd-2-clause
| 3,053
|
#! /usr/bin/env python
# -*- coding:Utf8 -*-
# Rechercher l'indice d'un caractère donné dans une chaîne
def trouve(ch, car, deb=0):
"trouve l'indice du caractère car dans la chaîne ch"
i = deb
while i < len(ch):
if ch[i] == car:
return i # le caractère est trouvé -> on termine
i = i + 1
return -1 # toute la chaîne a été scannée sans succès
# Test :
if __name__ == '__main__':
print(trouve("Coucou c'est moi", "z"))
print(trouve("Juliette & Roméo", "&"))
print(trouve("César & Cléopâtre", "r", 5))
|
widowild/messcripts
|
exercice/python3/solutions_exercices/exercice_10_03.py
|
Python
|
gpl-3.0
| 583
|
from django.views.generic.detail import DetailView
from django.views.generic.edit import UpdateView, DeleteView
from catalog.views.base import GenericListView, GenericCreateView
from catalog.models import Astronaut, CrewedMission
from catalog.forms import AstronautForm
from catalog.filters import AstronautFilter
from django.core.urlresolvers import reverse_lazy
from django.core.urlresolvers import reverse
from django.http import Http404
class AstronautListView(GenericListView):
model = Astronaut
f = AstronautFilter
display_data = ('organization', 'nationality', 'birth_date')
class AstronautDetailView(DetailView):
model = Astronaut
template_name = "catalog/astronaut_detail.html"
class AstronautCreateView(GenericCreateView):
model = Astronaut
form_class = AstronautForm
success_url = reverse_lazy("astronaut_list")
def form_valid(self, form):
obj = form.save(commit=False)
obj.creator = self.request.user
obj.save()
return super(AstronautUpdateView, self).form_valid(form)
def get_success_url(self):
return reverse("astronaut_detail", args=(self.object.pk,))
class AstronautUpdateView(UpdateView):
model = Astronaut
form_class = AstronautForm
template_name = "catalog/generic_update.html"
initial = {}
def form_valid(self, form):
obj = form.save(commit=False)
obj.modifier = self.request.user
obj.save()
return super(AstronautUpdateView, self).form_valid(form)
def get_success_url(self):
return reverse("astronaut_detail", args=(self.object.pk,))
class AstronautDeleteView(DeleteView):
model = Astronaut
template_name = "catalog/generic_delete.html"
success_url = reverse_lazy("astronaut_list")
|
Hattivat/hypergolic-django
|
hypergolic/catalog/views/astronaut_views.py
|
Python
|
agpl-3.0
| 1,773
|
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2014 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
from trytond.model import ModelSQL, ModelView, fields
from trytond.pool import PoolMeta, Pool
from trytond.pyson import Eval, Bool
from trytond.transaction import Transaction
__all__ = ['Carrier', 'CarrierZonePriceList']
__metaclass__ = PoolMeta
class Carrier:
__name__ = 'carrier'
zone_currency = fields.Many2One(
'currency.currency', 'Currency',
states={
'invisible': Eval('carrier_cost_method') != 'zone',
'required': Eval('carrier_cost_method') == 'zone',
'readonly': Bool(Eval('zone_price_list', [])),
},
depends=['carrier_cost_method', 'zone_price_list'])
zone_price_list = fields.One2Many(
'carrier.zone_price_list', 'carrier',
'Price List',
states={
'invisible': Eval('carrier_cost_method') != 'zone',
},
depends=['carrier_cost_method'])
zone_currency_digits = fields.Function(
fields.Integer(
'Zone Currency Digits', on_change_with=['zone_currency']
), 'on_change_with_zone_currency_digits'
)
@classmethod
def __setup__(cls):
super(Carrier, cls).__setup__()
selection = ('zone', 'Zone')
if selection not in cls.carrier_cost_method.selection:
cls.carrier_cost_method.selection.append(selection)
def on_change_with_zone_currency_digits(self, name=None):
if self.zone_currency:
return self.zone_currency.digits
return 2
def get_sale_price(self):
Address = Pool().get('party.address')
ZonePriceList = Pool().get('carrier.zone_price_list')
price, currency_id = super(Carrier, self).get_sale_price()
if self.carrier_cost_method == 'zone':
zone = None
if 'address' in Transaction().context:
zone = self.find_zone_for_address(
Address(Transaction().context['address'])
)
elif 'zone' in Transaction().context:
zone, = ZonePriceList.search([
('carrier', '=', self.id),
('id', '=', Transaction().context['zone']),
])
if zone is not None:
return zone.price, self.zone_currency.id
return price, currency_id
def get_purchase_price(self):
Address = Pool().get('party.address')
ZonePriceList = Pool().get('carrier.zone_price_list')
price, currency_id = super(Carrier, self).get_purchase_price()
if self.carrier_cost_method == 'zone':
zone = None
if 'address' in Transaction().context:
zone = self.find_zone_for_address(
Address(Transaction().context['address'])
)
elif 'zone' in Transaction().context:
zone, = ZonePriceList.search([
('carrier', '=', self.id),
('id', '=', Transaction().context['zone']),
])
if zone is not None:
return zone.price, self.zone_currency.id
return price, currency_id
def find_zone_for_address(self, address):
"""
A helper function that finds the most matching zone from the given
address.
:param address: Active Record of the address
:return: Active Record of the zone_price_list
"""
CarrierZone = Pool().get('carrier.zone_price_list')
zones = CarrierZone.search([
('country', '=', address.country),
('subdivision', '=', address.subdivision),
], limit=1)
if not zones:
zones = CarrierZone.search([
('country', '=', address.country),
('subdivision', '=', None),
], limit=1)
if zones:
return zones[0]
class CarrierZonePriceList(ModelSQL, ModelView):
'Carrier Zone price List'
__name__ = 'carrier.zone_price_list'
carrier = fields.Many2One('carrier', 'Carrier', required=True, select=True)
country = fields.Many2One(
'country.country', 'Country', required=True, select=True
)
subdivision = fields.Many2One(
'country.subdivision', 'Subdivision', select=True,
domain=[('country', '=', Eval('country'))],
depends=['country']
)
price = fields.Numeric(
'Price', required=True,
digits=(16, Eval('_parent_carrier.weight_currency_digits', 2))
)
# TODO add a sequence and order by sequence
|
openlabs/trytond-carrier-zone
|
carrier.py
|
Python
|
bsd-3-clause
| 4,654
|
"""
This python script adds a new gdb command, "dump-guest-memory". It
should be loaded with "source dump-guest-memory.py" at the (gdb)
prompt.
Copyright (C) 2013, Red Hat, Inc.
Authors:
Laszlo Ersek <lersek@redhat.com>
Janosch Frank <frankja@linux.vnet.ibm.com>
This work is licensed under the terms of the GNU GPL, version 2 or later. See
the COPYING file in the top-level directory.
"""
import ctypes
UINTPTR_T = gdb.lookup_type("uintptr_t")
TARGET_PAGE_SIZE = 0x1000
TARGET_PAGE_MASK = 0xFFFFFFFFFFFFF000
# Special value for e_phnum. This indicates that the real number of
# program headers is too large to fit into e_phnum. Instead the real
# value is in the field sh_info of section 0.
PN_XNUM = 0xFFFF
EV_CURRENT = 1
ELFCLASS32 = 1
ELFCLASS64 = 2
ELFDATA2LSB = 1
ELFDATA2MSB = 2
ET_CORE = 4
PT_LOAD = 1
PT_NOTE = 4
EM_386 = 3
EM_PPC = 20
EM_PPC64 = 21
EM_S390 = 22
EM_AARCH = 183
EM_X86_64 = 62
class ELF(object):
"""Representation of a ELF file."""
def __init__(self, arch):
self.ehdr = None
self.notes = []
self.segments = []
self.notes_size = 0
self.endianness = None
self.elfclass = ELFCLASS64
if arch == 'aarch64-le':
self.endianness = ELFDATA2LSB
self.elfclass = ELFCLASS64
self.ehdr = get_arch_ehdr(self.endianness, self.elfclass)
self.ehdr.e_machine = EM_AARCH
elif arch == 'aarch64-be':
self.endianness = ELFDATA2MSB
self.ehdr = get_arch_ehdr(self.endianness, self.elfclass)
self.ehdr.e_machine = EM_AARCH
elif arch == 'X86_64':
self.endianness = ELFDATA2LSB
self.ehdr = get_arch_ehdr(self.endianness, self.elfclass)
self.ehdr.e_machine = EM_X86_64
elif arch == '386':
self.endianness = ELFDATA2LSB
self.elfclass = ELFCLASS32
self.ehdr = get_arch_ehdr(self.endianness, self.elfclass)
self.ehdr.e_machine = EM_386
elif arch == 's390':
self.endianness = ELFDATA2MSB
self.ehdr = get_arch_ehdr(self.endianness, self.elfclass)
self.ehdr.e_machine = EM_S390
elif arch == 'ppc64-le':
self.endianness = ELFDATA2LSB
self.ehdr = get_arch_ehdr(self.endianness, self.elfclass)
self.ehdr.e_machine = EM_PPC64
elif arch == 'ppc64-be':
self.endianness = ELFDATA2MSB
self.ehdr = get_arch_ehdr(self.endianness, self.elfclass)
self.ehdr.e_machine = EM_PPC64
else:
raise gdb.GdbError("No valid arch type specified.\n"
"Currently supported types:\n"
"aarch64-be, aarch64-le, X86_64, 386, s390, "
"ppc64-be, ppc64-le")
self.add_segment(PT_NOTE, 0, 0)
def add_note(self, n_name, n_desc, n_type):
"""Adds a note to the ELF."""
note = get_arch_note(self.endianness, len(n_name), len(n_desc))
note.n_namesz = len(n_name) + 1
note.n_descsz = len(n_desc)
note.n_name = n_name.encode()
note.n_type = n_type
# Desc needs to be 4 byte aligned (although the 64bit spec
# specifies 8 byte). When defining n_desc as uint32 it will be
# automatically aligned but we need the memmove to copy the
# string into it.
ctypes.memmove(note.n_desc, n_desc.encode(), len(n_desc))
self.notes.append(note)
self.segments[0].p_filesz += ctypes.sizeof(note)
self.segments[0].p_memsz += ctypes.sizeof(note)
def add_segment(self, p_type, p_paddr, p_size):
"""Adds a segment to the elf."""
phdr = get_arch_phdr(self.endianness, self.elfclass)
phdr.p_type = p_type
phdr.p_paddr = p_paddr
phdr.p_filesz = p_size
phdr.p_memsz = p_size
self.segments.append(phdr)
self.ehdr.e_phnum += 1
def to_file(self, elf_file):
"""Writes all ELF structures to the the passed file.
Structure:
Ehdr
Segment 0:PT_NOTE
Segment 1:PT_LOAD
Segment N:PT_LOAD
Note 0..N
Dump contents
"""
elf_file.write(self.ehdr)
off = ctypes.sizeof(self.ehdr) + \
len(self.segments) * ctypes.sizeof(self.segments[0])
for phdr in self.segments:
phdr.p_offset = off
elf_file.write(phdr)
off += phdr.p_filesz
for note in self.notes:
elf_file.write(note)
def get_arch_note(endianness, len_name, len_desc):
"""Returns a Note class with the specified endianness."""
if endianness == ELFDATA2LSB:
superclass = ctypes.LittleEndianStructure
else:
superclass = ctypes.BigEndianStructure
len_name = len_name + 1
class Note(superclass):
"""Represents an ELF note, includes the content."""
_fields_ = [("n_namesz", ctypes.c_uint32),
("n_descsz", ctypes.c_uint32),
("n_type", ctypes.c_uint32),
("n_name", ctypes.c_char * len_name),
("n_desc", ctypes.c_uint32 * ((len_desc + 3) // 4))]
return Note()
class Ident(ctypes.Structure):
"""Represents the ELF ident array in the ehdr structure."""
_fields_ = [('ei_mag0', ctypes.c_ubyte),
('ei_mag1', ctypes.c_ubyte),
('ei_mag2', ctypes.c_ubyte),
('ei_mag3', ctypes.c_ubyte),
('ei_class', ctypes.c_ubyte),
('ei_data', ctypes.c_ubyte),
('ei_version', ctypes.c_ubyte),
('ei_osabi', ctypes.c_ubyte),
('ei_abiversion', ctypes.c_ubyte),
('ei_pad', ctypes.c_ubyte * 7)]
def __init__(self, endianness, elfclass):
self.ei_mag0 = 0x7F
self.ei_mag1 = ord('E')
self.ei_mag2 = ord('L')
self.ei_mag3 = ord('F')
self.ei_class = elfclass
self.ei_data = endianness
self.ei_version = EV_CURRENT
def get_arch_ehdr(endianness, elfclass):
"""Returns a EHDR64 class with the specified endianness."""
if endianness == ELFDATA2LSB:
superclass = ctypes.LittleEndianStructure
else:
superclass = ctypes.BigEndianStructure
class EHDR64(superclass):
"""Represents the 64 bit ELF header struct."""
_fields_ = [('e_ident', Ident),
('e_type', ctypes.c_uint16),
('e_machine', ctypes.c_uint16),
('e_version', ctypes.c_uint32),
('e_entry', ctypes.c_uint64),
('e_phoff', ctypes.c_uint64),
('e_shoff', ctypes.c_uint64),
('e_flags', ctypes.c_uint32),
('e_ehsize', ctypes.c_uint16),
('e_phentsize', ctypes.c_uint16),
('e_phnum', ctypes.c_uint16),
('e_shentsize', ctypes.c_uint16),
('e_shnum', ctypes.c_uint16),
('e_shstrndx', ctypes.c_uint16)]
def __init__(self):
super(superclass, self).__init__()
self.e_ident = Ident(endianness, elfclass)
self.e_type = ET_CORE
self.e_version = EV_CURRENT
self.e_ehsize = ctypes.sizeof(self)
self.e_phoff = ctypes.sizeof(self)
self.e_phentsize = ctypes.sizeof(get_arch_phdr(endianness, elfclass))
self.e_phnum = 0
class EHDR32(superclass):
"""Represents the 32 bit ELF header struct."""
_fields_ = [('e_ident', Ident),
('e_type', ctypes.c_uint16),
('e_machine', ctypes.c_uint16),
('e_version', ctypes.c_uint32),
('e_entry', ctypes.c_uint32),
('e_phoff', ctypes.c_uint32),
('e_shoff', ctypes.c_uint32),
('e_flags', ctypes.c_uint32),
('e_ehsize', ctypes.c_uint16),
('e_phentsize', ctypes.c_uint16),
('e_phnum', ctypes.c_uint16),
('e_shentsize', ctypes.c_uint16),
('e_shnum', ctypes.c_uint16),
('e_shstrndx', ctypes.c_uint16)]
def __init__(self):
super(superclass, self).__init__()
self.e_ident = Ident(endianness, elfclass)
self.e_type = ET_CORE
self.e_version = EV_CURRENT
self.e_ehsize = ctypes.sizeof(self)
self.e_phoff = ctypes.sizeof(self)
self.e_phentsize = ctypes.sizeof(get_arch_phdr(endianness, elfclass))
self.e_phnum = 0
# End get_arch_ehdr
if elfclass == ELFCLASS64:
return EHDR64()
else:
return EHDR32()
def get_arch_phdr(endianness, elfclass):
"""Returns a 32 or 64 bit PHDR class with the specified endianness."""
if endianness == ELFDATA2LSB:
superclass = ctypes.LittleEndianStructure
else:
superclass = ctypes.BigEndianStructure
class PHDR64(superclass):
"""Represents the 64 bit ELF program header struct."""
_fields_ = [('p_type', ctypes.c_uint32),
('p_flags', ctypes.c_uint32),
('p_offset', ctypes.c_uint64),
('p_vaddr', ctypes.c_uint64),
('p_paddr', ctypes.c_uint64),
('p_filesz', ctypes.c_uint64),
('p_memsz', ctypes.c_uint64),
('p_align', ctypes.c_uint64)]
class PHDR32(superclass):
"""Represents the 32 bit ELF program header struct."""
_fields_ = [('p_type', ctypes.c_uint32),
('p_offset', ctypes.c_uint32),
('p_vaddr', ctypes.c_uint32),
('p_paddr', ctypes.c_uint32),
('p_filesz', ctypes.c_uint32),
('p_memsz', ctypes.c_uint32),
('p_flags', ctypes.c_uint32),
('p_align', ctypes.c_uint32)]
# End get_arch_phdr
if elfclass == ELFCLASS64:
return PHDR64()
else:
return PHDR32()
def int128_get64(val):
"""Returns low 64bit part of Int128 struct."""
assert val["hi"] == 0
return val["lo"]
def qlist_foreach(head, field_str):
"""Generator for qlists."""
var_p = head["lh_first"]
while var_p != 0:
var = var_p.dereference()
var_p = var[field_str]["le_next"]
yield var
def qemu_get_ram_block(ram_addr):
"""Returns the RAMBlock struct to which the given address belongs."""
ram_blocks = gdb.parse_and_eval("ram_list.blocks")
for block in qlist_foreach(ram_blocks, "next"):
if (ram_addr - block["offset"]) < block["used_length"]:
return block
raise gdb.GdbError("Bad ram offset %x" % ram_addr)
def qemu_get_ram_ptr(ram_addr):
"""Returns qemu vaddr for given guest physical address."""
block = qemu_get_ram_block(ram_addr)
return block["host"] + (ram_addr - block["offset"])
def memory_region_get_ram_ptr(memory_region):
if memory_region["alias"] != 0:
return (memory_region_get_ram_ptr(memory_region["alias"].dereference())
+ memory_region["alias_offset"])
return qemu_get_ram_ptr(memory_region["ram_block"]["offset"])
def get_guest_phys_blocks():
"""Returns a list of ram blocks.
Each block entry contains:
'target_start': guest block phys start address
'target_end': guest block phys end address
'host_addr': qemu vaddr of the block's start
"""
guest_phys_blocks = []
print("guest RAM blocks:")
print("target_start target_end host_addr message "
"count")
print("---------------- ---------------- ---------------- ------- "
"-----")
current_map_p = gdb.parse_and_eval("address_space_memory.current_map")
current_map = current_map_p.dereference()
# Conversion to int is needed for python 3
# compatibility. Otherwise range doesn't cast the value itself and
# breaks.
for cur in range(int(current_map["nr"])):
flat_range = (current_map["ranges"] + cur).dereference()
memory_region = flat_range["mr"].dereference()
# we only care about RAM
if not memory_region["ram"]:
continue
section_size = int128_get64(flat_range["addr"]["size"])
target_start = int128_get64(flat_range["addr"]["start"])
target_end = target_start + section_size
host_addr = (memory_region_get_ram_ptr(memory_region)
+ flat_range["offset_in_region"])
predecessor = None
# find continuity in guest physical address space
if len(guest_phys_blocks) > 0:
predecessor = guest_phys_blocks[-1]
predecessor_size = (predecessor["target_end"] -
predecessor["target_start"])
# the memory API guarantees monotonically increasing
# traversal
assert predecessor["target_end"] <= target_start
# we want continuity in both guest-physical and
# host-virtual memory
if (predecessor["target_end"] < target_start or
predecessor["host_addr"] + predecessor_size != host_addr):
predecessor = None
if predecessor is None:
# isolated mapping, add it to the list
guest_phys_blocks.append({"target_start": target_start,
"target_end": target_end,
"host_addr": host_addr})
message = "added"
else:
# expand predecessor until @target_end; predecessor's
# start doesn't change
predecessor["target_end"] = target_end
message = "joined"
print("%016x %016x %016x %-7s %5u" %
(target_start, target_end, host_addr.cast(UINTPTR_T),
message, len(guest_phys_blocks)))
return guest_phys_blocks
# The leading docstring doesn't have idiomatic Python formatting. It is
# printed by gdb's "help" command (the first line is printed in the
# "help data" summary), and it should match how other help texts look in
# gdb.
class DumpGuestMemory(gdb.Command):
"""Extract guest vmcore from qemu process coredump.
The two required arguments are FILE and ARCH:
FILE identifies the target file to write the guest vmcore to.
ARCH specifies the architecture for which the core will be generated.
This GDB command reimplements the dump-guest-memory QMP command in
python, using the representation of guest memory as captured in the qemu
coredump. The qemu process that has been dumped must have had the
command line option "-machine dump-guest-core=on" which is the default.
For simplicity, the "paging", "begin" and "end" parameters of the QMP
command are not supported -- no attempt is made to get the guest's
internal paging structures (ie. paging=false is hard-wired), and guest
memory is always fully dumped.
Currently aarch64-be, aarch64-le, X86_64, 386, s390, ppc64-be,
ppc64-le guests are supported.
The CORE/NT_PRSTATUS and QEMU notes (that is, the VCPUs' statuses) are
not written to the vmcore. Preparing these would require context that is
only present in the KVM host kernel module when the guest is alive. A
fake ELF note is written instead, only to keep the ELF parser of "crash"
happy.
Dependent on how busted the qemu process was at the time of the
coredump, this command might produce unpredictable results. If qemu
deliberately called abort(), or it was dumped in response to a signal at
a halfway fortunate point, then its coredump should be in reasonable
shape and this command should mostly work."""
def __init__(self):
super(DumpGuestMemory, self).__init__("dump-guest-memory",
gdb.COMMAND_DATA,
gdb.COMPLETE_FILENAME)
self.elf = None
self.guest_phys_blocks = None
def dump_init(self, vmcore):
"""Prepares and writes ELF structures to core file."""
# Needed to make crash happy, data for more useful notes is
# not available in a qemu core.
self.elf.add_note("NONE", "EMPTY", 0)
# We should never reach PN_XNUM for paging=false dumps,
# there's just a handful of discontiguous ranges after
# merging.
# The constant is needed to account for the PT_NOTE segment.
phdr_num = len(self.guest_phys_blocks) + 1
assert phdr_num < PN_XNUM
for block in self.guest_phys_blocks:
block_size = block["target_end"] - block["target_start"]
self.elf.add_segment(PT_LOAD, block["target_start"], block_size)
self.elf.to_file(vmcore)
def dump_iterate(self, vmcore):
"""Writes guest core to file."""
qemu_core = gdb.inferiors()[0]
for block in self.guest_phys_blocks:
cur = block["host_addr"]
left = block["target_end"] - block["target_start"]
print("dumping range at %016x for length %016x" %
(cur.cast(UINTPTR_T), left))
while left > 0:
chunk_size = min(TARGET_PAGE_SIZE, left)
chunk = qemu_core.read_memory(cur, chunk_size)
vmcore.write(chunk)
cur += chunk_size
left -= chunk_size
def invoke(self, args, from_tty):
"""Handles command invocation from gdb."""
# Unwittingly pressing the Enter key after the command should
# not dump the same multi-gig coredump to the same file.
self.dont_repeat()
argv = gdb.string_to_argv(args)
if len(argv) != 2:
raise gdb.GdbError("usage: dump-guest-memory FILE ARCH")
self.elf = ELF(argv[1])
self.guest_phys_blocks = get_guest_phys_blocks()
with open(argv[0], "wb") as vmcore:
self.dump_init(vmcore)
self.dump_iterate(vmcore)
DumpGuestMemory()
|
afaerber/qemu-cpu
|
scripts/dump-guest-memory.py
|
Python
|
gpl-2.0
| 18,166
|
from pyjamas.ui.Sink import Sink, SinkInfo
from pyjamas.ui.Image import Image
from pyjamas.ui.HTML import HTML
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.HorizontalPanel import HorizontalPanel
from pyjamas.ui.RootPanel import RootPanel
from pyjamas.Canvas2D import Canvas, CanvasImage, ImageLoadListener
from pyjamas.Timer import Timer
from math import floor, cos, sin
import time
class CanvasTab(Sink):
def __init__(self):
Sink.__init__(self)
colour_grid = ColourGridCanvas()
rotated = RotatedCanvas()
spheres = SpheresCanvas()
pattern = PatternCanvas()
spiro = SpiroCanvas()
self.solar = SolarCanvas()
row0 = HorizontalPanel()
row0.setSpacing(8)
row0.add(colour_grid)
row0.add(rotated)
row0.add(spheres)
row0.add(pattern)
row1 = HorizontalPanel()
row1.setSpacing(8)
row1.add(self.solar)
row1.add(spiro)
panel = VerticalPanel()
panel.add(row0)
panel.add(row1)
self.setWidget(panel)
def onShow(self):
self.solar.isActive = True
self.solar.onTimer()
def onHide(self):
self.solar.isActive = False
def init():
text="""
<b>Canvas vector drawing component: Canvas2D</b>
<p>Gives python access to the browser's native canvas tag.
<p>There are two canvas drawing libraries: Canvas2D (oldest) and Canvas
(preferred). Both work in modern browsers (that support canvas). For IE
the canvas functionality is emulated in javascript. This leads to
lower performance in IE and missing/broken functionality. Canvas2D
depends on <a href=\"http://excanvas.sourceforge.net\">Explorer Canvas</a>
in IE.
<p>Originally by Alexei Sokolov at <a href=\"http://gwt.components.googlepages.com\">gwt.components.googlepages.com</a>"
<br>Samples ported from the <a href=\"http://developer.mozilla.org/en/docs/Canvas_tutorial\">Mozilla canvas tutorial</a>"
<br>Samples ported from the <a href=\"http://developer.mozilla.org/en/docs/Canvas_tutorial\">Mozilla canvas tutorial</a>
"""
return SinkInfo("Canvas2D", text, CanvasTab)
class ColourGridCanvas(Canvas):
def __init__(self):
Canvas.__init__(self, 150, 150)
self.draw()
self.addMouseListener(self)
self.addKeyboardListener(self)
def draw(self):
for i in range(0, 6):
for j in range(0, 6):
self.context.fillStyle = u'rgb(%d,%d,0)' % \
( floor(255-42.5*i), floor(255-42.5*j))
self.context.fillRect(j*25,i*25,25,25)
def onMouseDown(self, sender, x, y):
pass
def onMouseEnter(self, sender):
RootPanel().add(HTML("mouseenter: setting focus (keyboard input accepted)"))
self.setFocus(True)
def onMouseLeave(self, sender):
RootPanel().add(HTML("mouseleave: clearing focus (keyboard input not accepted)"))
self.setFocus(False)
def onMouseMove(self, sender, x, y):
RootPanel().add(HTML("move: x %d " % x + "y %d" % y))
def onMouseUp(self, sender, x, y):
pass
def onKeyUp(self, sender, keyCode, modifiers):
RootPanel().add(HTML("keyup: %s" % keyCode))
def onKeyDown(self, sender, keyCode, modifiers):
RootPanel().add(HTML("keydown: %s" % keyCode))
def onClick(self, sender):
RootPanel().add(HTML("click"))
def onKeyPress(self, sender, keyCode, modifiers):
RootPanel().add(HTML("keypressed: %s" % keyCode))
class RotatedCanvas(Canvas):
def __init__(self):
Canvas.__init__(self, 150, 150)
self.context.translate(75,75)
self.draw()
def draw(self):
pi = 3.14159265358979323
# Loop through rings (from inside to out)
for i in range(1,6):
self.context.save()
self.context.fillStyle = 'rgb(%d,%d,255)'%((51*i), (255-51*i))
# draw individual dots
for j in range(0,i*6):
self.context.rotate(pi*2/(i*6))
self.context.beginPath()
self.context.arc(0,i*12.5,5,0,pi*2,True)
self.context.fill()
self.context.restore()
class SpheresCanvas(Canvas):
def __init__(self):
Canvas.__init__(self, 150, 150)
self.draw()
def draw(self):
# create gradients
radgrad = self.context.createRadialGradient(45,45,10,52,50,30)
radgrad.addColorStop(0, '#A7D30C')
radgrad.addColorStop(0.9, '#019F62')
radgrad.addColorStop(1, 'rgba(1,159,98,0)')
radgrad2 = self.context.createRadialGradient(105,105,20,112,120,50)
radgrad2.addColorStop(0, '#FF5F98')
radgrad2.addColorStop(0.75, '#FF0188')
radgrad2.addColorStop(1, 'rgba(255,1,136,0)')
radgrad3 = self.context.createRadialGradient(95,15,15,102,20,40)
radgrad3.addColorStop(0, '#00C9FF')
radgrad3.addColorStop(0.8, '#00B5E2')
radgrad3.addColorStop(1, 'rgba(0,201,255,0)')
radgrad4 = self.context.createRadialGradient(0,150,50,0,140,90)
radgrad4.addColorStop(0, '#F4F201')
radgrad4.addColorStop(0.8, '#E4C700')
radgrad4.addColorStop(1, 'rgba(228,199,0,0)')
# draw shapes
self.context.fillStyle = radgrad4
self.context.fillRect(0,0,150,150)
self.context.fillStyle = radgrad3
self.context.fillRect(0,0,150,150)
self.context.fillStyle = radgrad2
self.context.fillRect(0,0,150,150)
self.context.fillStyle = radgrad
self.context.fillRect(0,0,150,150)
class PatternCanvas(Canvas):
def __init__(self):
Canvas.__init__(self, 150, 150)
self.img = CanvasImage('images/wallpaper.png', self)
def onLoad(self, sender=None):
if sender==self.img:
self.draw()
def onError(self):
pass
def draw(self):
ptrn = self.context.createPattern(self.img.getElement(), 'repeat')
self.context.fillStyle = ptrn
self.context.fillRect(0,0,200,200)
class SpiroCanvas(Canvas):
def __init__(self):
Canvas.__init__(self, 300, 300)
self.draw()
def draw(self):
self.context.fillRect(0,0,300,300)
for i in range(0, 3):
for j in range(0, 3):
self.context.save()
self.context.strokeStyle = "#9CFF00"
self.context.translate(50+j*100,50+i*100)
self.drawSpirograph(20*(j+2)/(j+1),-8*(i+3)/(i+1),10)
self.context.restore()
def drawSpirograph(self, R, r, O):
pi = 3.14159265358979323
x1 = R-O
y1 = 0
x2 = -1
y2 = -1
i = 1
self.context.beginPath()
self.context.moveTo(x1,y1)
while x2 != R-O and y2 != 0:
if i>20000:
break
x2 = (R+r)*cos(i*pi/72) - (r+O)*cos(((R+r)/r)*(i*pi/72))
y2 = (R+r)*sin(i*pi/72) - (r+O)*sin(((R+r)/r)*(i*pi/72))
self.context.lineTo(x2,y2)
x1 = x2
y1 = y2
i+=1
self.context.stroke()
class SolarCanvas(Canvas):
def __init__(self):
Canvas.__init__(self, 300, 300)
self.sun = CanvasImage('images/sun.png')
self.moon = CanvasImage('images/moon.png')
self.earth = CanvasImage('images/earth.png')
self.loader = ImageLoadListener()
self.loader.add(self.sun)
self.loader.add(self.moon)
self.loader.add(self.earth)
self.isActive = True
self.onTimer()
def onTimer(self, t=None):
if not self.isActive:
return
Timer(100, self)
self.draw()
def getTimeSeconds(self):
return time.time() % 60
def getTimeMilliseconds(self):
return (time.time() * 1000.0) % 1.0
def draw(self):
pi = 3.14159265358979323
if not self.loader.isLoaded():
return
self.context.globalCompositeOperation = 'destination-over'
# clear canvas
self.context.clearRect(0,0,300,300)
self.context.save()
self.context.fillStyle = 'rgba(0,0,0,0.4)'
self.context.strokeStyle = 'rgba(0,153,255,0.4)'
self.context.translate(150,150)
# Earth
self.context.rotate( ((2*pi)/60)*self.getTimeSeconds() + ((2*pi)/60000)*self.getTimeMilliseconds() )
self.context.translate(105,0)
self.context.fillRect(0,-12,50,24) # Shadow
self.context.drawImage(self.earth.getElement() ,-12,-12)
# Moon
self.context.save()
self.context.rotate( ((2*pi)/6)*self.getTimeSeconds() + ((2*pi)/6000)*self.getTimeMilliseconds() )
self.context.translate(0,28.5)
self.context.drawImage(self.moon.getElement(),-3.5,-3.5)
self.context.restore()
self.context.restore()
self.context.beginPath()
self.context.arc(150,150,105,0,pi*2,False) # Earth orbit
self.context.stroke()
self.context.drawImage(self.sun.getElement(),0,0)
|
minghuascode/pyj
|
examples/addonsgallery/Canvas2DTab.py
|
Python
|
apache-2.0
| 9,256
|
from .gsm_action import GsmAction
class CallAction(GsmAction):
def __init__(self, app, id_, serial_url, number, seconds):
super().__init__(app, id_, serial_url)
self.number = number
self.seconds = seconds
async def run(self):
self.logger.info('make_call')
await self.gsm_modem.call(self.number, self.seconds)
|
insolite/alarme
|
alarme/extras/action/gsm/call_action.py
|
Python
|
mit
| 361
|
from time import time as timestamp
import hashlib
from api.web import APIHandler
from api.exceptions import APIException
from api.server import handle_api_url
from libs import config
from libs import db
@handle_api_url("test/create_anon_tuned_in/(\d+)")
class CreateAnonTunedIn(APIHandler):
description = "Creates a fake tune-in record for an anonymous user at 127.0.0.1."
local_only = True
sid_required = False
auth_required = False
allow_get = True
return_name = "create_anon_tuned_in_result"
def post(self, sid): #pylint: disable=W0221
if db.c.fetch_var("SELECT COUNT(*) FROM r4_listeners WHERE listener_ip = '127.0.0.1' AND user_id = 1") == 0:
db.c.update("INSERT INTO r4_listeners (listener_ip, user_id, sid, listener_icecast_id) VALUES ('127.0.0.1', 1, %s, 1)", (int(sid),))
self.append_standard("dev_anon_user_tunein_ok", "Anonymous user tune in 127.0.0.1 record completed.")
return
if db.c.fetch_var("SELECT COUNT(*) FROM r4_listeners WHERE listener_ip = '::1' AND user_id = 1") == 0:
db.c.update("INSERT INTO r4_listeners (listener_ip, user_id, sid, listener_icecast_id) VALUES ('::1', 1, %s, 1)", (int(sid),))
self.append_standard("dev_anon_user_tunein_ok", "Anonymous user tune in ::1 record completed.")
return
if db.c.fetch_var("SELECT COUNT(*) FROM r4_listeners WHERE listener_ip = 'localhost' AND user_id = 1") == 0:
db.c.update("INSERT INTO r4_listeners (listener_ip, user_id, sid, listener_icecast_id) VALUES ('localhost', 1, %s, 1)", (int(sid),))
self.append_standard("dev_anon_user_tunein_ok", "Anonymous user tune in localhost record completed.")
return
raise APIException(500, "internal_error", "Anonymous user tune in record already exists.")
class TestUserRequest(APIHandler):
description = "Login as a user."
local_only = True
sid_required = False
auth_required = False
allow_get = True
def post(self, sid): #pylint: disable=W0221
user_id = db.c.fetch_var("SELECT MAX(user_id) FROM phpbb_users")
if user_id and user_id < 2:
user_id = user_id + 1
db.c.update("INSERT INTO phpbb_users (username, user_id, group_id) VALUES ('Test" + str(user_id) + "', %s, 5)", (user_id,))
elif not user_id:
user_id = 2
db.c.update("INSERT INTO phpbb_users (username, user_id, group_id) VALUES ('Test" + str(user_id) + "', %s, 5)", (user_id,))
self.set_cookie(config.get("phpbb_cookie_name") + "_u", user_id)
session_id = db.c.fetch_var("SELECT session_id FROM phpbb_sessions WHERE session_user_id = %s", (user_id,))
if not session_id:
session_id = hashlib.md5(repr(timestamp())).hexdigest()
db.c.update("INSERT INTO phpbb_sessions (session_id, session_user_id) VALUES (%s, %s)", (session_id, user_id))
self.set_cookie(config.get("phpbb_cookie_name") + "_u", user_id)
self.set_cookie(config.get("phpbb_cookie_name") + "_sid", session_id)
self.execute(user_id, sid)
self.append_standard("dev_login_ok", "You are now user ID %s session ID %s" % (user_id, session_id))
def execute(self, user_id, sid):
pass
@handle_api_url("test/login_tuned_in/(\d+)")
class CreateLoginTunedIn(TestUserRequest):
description = "Creates or uses a user account with a tuned in record and sets the appropriate cookies so you're that user."
auth_required = False
sid_required = False
return_name = "login_tuned_in_result"
def execute(self, user_id, sid):
if db.c.fetch_var("SELECT COUNT(*) FROM r4_listeners WHERE user_id = %s", (user_id,)) == 0:
db.c.update("INSERT INTO r4_listeners (listener_ip, user_id, sid, listener_icecast_id) VALUES ('127.0.0.1', %s, %s, 1)", (user_id, sid))
@handle_api_url("test/login_tuned_out/(\d+)")
class CreateLoginTunedOut(TestUserRequest):
description = "Creates or uses a user account with no tuned in record sets the appropriate cookies so you're that user."
auth_required = False
return_name = "login_tuned_out_result"
def execute(self, user_id, sid):
if db.c.fetch_var("SELECT COUNT(*) FROM r4_listeners WHERE user_id = %s", (user_id,)) > 0:
db.c.update("DELETE FROM r4_listeners WHERE user_id = %s ", (user_id,))
|
williamjacksn/rainwave
|
api_requests/admin_web/developer.py
|
Python
|
gpl-2.0
| 4,050
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for metadata service."""
import base64
import hashlib
import hmac
import re
try:
import cPickle as pickle
except ImportError:
import pickle
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
import webob
from nova.api.metadata import base
from nova.api.metadata import handler
from nova.api.metadata import password
from nova import block_device
from nova.compute import flavors
from nova.conductor import api as conductor_api
from nova import context
from nova import db
from nova.db.sqlalchemy import api
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_network
from nova.tests.unit.objects import test_security_group
from nova.virt import netutils
CONF = cfg.CONF
USER_DATA_STRING = ("This is an encoded string")
ENCODE_USER_DATA_STRING = base64.b64encode(USER_DATA_STRING)
def fake_inst_obj(context):
inst = objects.Instance(
context=context,
id=1,
user_id='fake_user',
uuid='b65cee2f-8c69-4aeb-be2f-f79742548fc2',
project_id='test',
key_name="key",
key_data="ssh-rsa AAAAB3Nzai....N3NtHw== someuser@somehost",
host='test',
launch_index=1,
reservation_id='r-xxxxxxxx',
user_data=ENCODE_USER_DATA_STRING,
image_ref=7,
kernel_id=None,
ramdisk_id=None,
vcpus=1,
fixed_ips=[],
root_device_name='/dev/sda1',
hostname='test.novadomain',
display_name='my_displayname',
metadata={},
default_ephemeral_device=None,
default_swap_device=None,
system_metadata={})
nwinfo = network_model.NetworkInfo([])
inst.info_cache = objects.InstanceInfoCache(context=context,
instance_uuid=inst.uuid,
network_info=nwinfo)
with mock.patch.object(inst, 'save'):
inst.set_flavor(flavors.get_default_flavor())
return inst
def return_non_existing_address(*args, **kwarg):
raise exception.NotFound()
def fake_InstanceMetadata(stubs, inst_data, address=None,
sgroups=None, content=None, extra_md=None,
vd_driver=None, network_info=None):
content = content or []
extra_md = extra_md or {}
if sgroups is None:
sgroups = [dict(test_security_group.fake_secgroup,
name='default')]
def sg_get(*args, **kwargs):
return sgroups
stubs.Set(api, 'security_group_get_by_instance', sg_get)
return base.InstanceMetadata(inst_data, address=address,
content=content, extra_md=extra_md,
vd_driver=vd_driver, network_info=network_info)
def fake_request(stubs, mdinst, relpath, address="127.0.0.1",
fake_get_metadata=None, headers=None,
fake_get_metadata_by_instance_id=None, app=None):
def get_metadata_by_remote_address(address):
return mdinst
if app is None:
app = handler.MetadataRequestHandler()
if fake_get_metadata is None:
fake_get_metadata = get_metadata_by_remote_address
if stubs:
stubs.Set(app, 'get_metadata_by_remote_address', fake_get_metadata)
if fake_get_metadata_by_instance_id:
stubs.Set(app, 'get_metadata_by_instance_id',
fake_get_metadata_by_instance_id)
request = webob.Request.blank(relpath)
request.remote_addr = address
if headers is not None:
request.headers.update(headers)
response = request.get_response(app)
return response
class MetadataTestCase(test.TestCase):
def setUp(self):
super(MetadataTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
def test_can_pickle_metadata(self):
# Make sure that InstanceMetadata is possible to pickle. This is
# required for memcache backend to work correctly.
md = fake_InstanceMetadata(self.stubs, self.instance.obj_clone())
pickle.dumps(md, protocol=0)
def test_user_data(self):
inst = self.instance.obj_clone()
inst['user_data'] = base64.b64encode("happy")
md = fake_InstanceMetadata(self.stubs, inst)
self.assertEqual(
md.get_ec2_metadata(version='2009-04-04')['user-data'], "happy")
def test_no_user_data(self):
inst = self.instance.obj_clone()
inst.user_data = None
md = fake_InstanceMetadata(self.stubs, inst)
obj = object()
self.assertEqual(
md.get_ec2_metadata(version='2009-04-04').get('user-data', obj),
obj)
def test_security_groups(self):
inst = self.instance.obj_clone()
sgroups = [dict(test_security_group.fake_secgroup, name='default'),
dict(test_security_group.fake_secgroup, name='other')]
expected = ['default', 'other']
md = fake_InstanceMetadata(self.stubs, inst, sgroups=sgroups)
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['security-groups'], expected)
def test_local_hostname_fqdn(self):
md = fake_InstanceMetadata(self.stubs, self.instance.obj_clone())
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['local-hostname'],
"%s.%s" % (self.instance['hostname'], CONF.dhcp_domain))
def test_format_instance_mapping(self):
# Make sure that _format_instance_mappings works.
ctxt = None
instance_ref0 = objects.Instance(**{'id': 0,
'uuid': 'e5fe5518-0288-4fa3-b0c4-c79764101b85',
'root_device_name': None,
'default_ephemeral_device': None,
'default_swap_device': None})
instance_ref1 = objects.Instance(**{'id': 0,
'uuid': 'b65cee2f-8c69-4aeb-be2f-f79742548fc2',
'root_device_name': '/dev/sda1',
'default_ephemeral_device': None,
'default_swap_device': None})
def fake_bdm_get(ctxt, uuid, use_slave=False):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 87654321,
'snapshot_id': None,
'no_device': None,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': True,
'device_name': '/dev/sdh'}),
fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
'delete_on_termination': None,
'device_name': '/dev/sdc'}),
fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': None,
'snapshot_id': None,
'no_device': None,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': None,
'delete_on_termination': None,
'device_name': '/dev/sdb'})]
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_bdm_get)
expected = {'ami': 'sda1',
'root': '/dev/sda1',
'ephemeral0': '/dev/sdb',
'swap': '/dev/sdc',
'ebs0': '/dev/sdh'}
conductor_api.LocalAPI()
self.assertEqual(base._format_instance_mapping(ctxt,
instance_ref0), block_device._DEFAULT_MAPPINGS)
self.assertEqual(base._format_instance_mapping(ctxt,
instance_ref1), expected)
def test_pubkey(self):
md = fake_InstanceMetadata(self.stubs, self.instance.obj_clone())
pubkey_ent = md.lookup("/2009-04-04/meta-data/public-keys")
self.assertEqual(base.ec2_md_print(pubkey_ent),
"0=%s" % self.instance['key_name'])
self.assertEqual(base.ec2_md_print(pubkey_ent['0']['openssh-key']),
self.instance['key_data'])
def test_image_type_ramdisk(self):
inst = self.instance.obj_clone()
inst['ramdisk_id'] = 'ari-853667c0'
md = fake_InstanceMetadata(self.stubs, inst)
data = md.lookup("/latest/meta-data/ramdisk-id")
self.assertIsNotNone(data)
self.assertTrue(re.match('ari-[0-9a-f]{8}', data))
def test_image_type_kernel(self):
inst = self.instance.obj_clone()
inst['kernel_id'] = 'aki-c2e26ff2'
md = fake_InstanceMetadata(self.stubs, inst)
data = md.lookup("/2009-04-04/meta-data/kernel-id")
self.assertTrue(re.match('aki-[0-9a-f]{8}', data))
self.assertEqual(
md.lookup("/ec2/2009-04-04/meta-data/kernel-id"), data)
def test_image_type_no_kernel_raises(self):
inst = self.instance.obj_clone()
md = fake_InstanceMetadata(self.stubs, inst)
self.assertRaises(base.InvalidMetadataPath,
md.lookup, "/2009-04-04/meta-data/kernel-id")
def test_check_version(self):
inst = self.instance.obj_clone()
md = fake_InstanceMetadata(self.stubs, inst)
self.assertTrue(md._check_version('1.0', '2009-04-04'))
self.assertFalse(md._check_version('2009-04-04', '1.0'))
self.assertFalse(md._check_version('2009-04-04', '2008-09-01'))
self.assertTrue(md._check_version('2008-09-01', '2009-04-04'))
self.assertTrue(md._check_version('2009-04-04', '2009-04-04'))
def test_InstanceMetadata_uses_passed_network_info(self):
network_info = []
self.mox.StubOutWithMock(netutils, "get_injected_network_template")
netutils.get_injected_network_template(network_info).AndReturn(False)
self.mox.ReplayAll()
base.InstanceMetadata(fake_inst_obj(self.context),
network_info=network_info)
def test_InstanceMetadata_invoke_metadata_for_config_drive(self):
fakes.stub_out_key_pair_funcs(self.stubs)
inst = self.instance.obj_clone()
inst_md = base.InstanceMetadata(inst)
for (path, value) in inst_md.metadata_for_config_drive():
self.assertIsNotNone(path)
def test_InstanceMetadata_queries_network_API_when_needed(self):
network_info_from_api = []
self.mox.StubOutWithMock(netutils, "get_injected_network_template")
netutils.get_injected_network_template(
network_info_from_api).AndReturn(False)
self.mox.ReplayAll()
base.InstanceMetadata(fake_inst_obj(self.context))
def test_local_ipv4(self):
nw_info = fake_network.fake_get_instance_nw_info(self.stubs,
num_networks=2)
expected_local = "192.168.1.100"
md = fake_InstanceMetadata(self.stubs, self.instance,
network_info=nw_info, address="fake")
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(expected_local, data['meta-data']['local-ipv4'])
def test_local_ipv4_from_nw_info(self):
nw_info = fake_network.fake_get_instance_nw_info(self.stubs,
num_networks=2)
expected_local = "192.168.1.100"
md = fake_InstanceMetadata(self.stubs, self.instance,
network_info=nw_info)
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['local-ipv4'], expected_local)
def test_local_ipv4_from_address(self):
expected_local = "fake"
md = fake_InstanceMetadata(self.stubs, self.instance,
network_info=[], address="fake")
data = md.get_ec2_metadata(version='2009-04-04')
self.assertEqual(data['meta-data']['local-ipv4'], expected_local)
class OpenStackMetadataTestCase(test.TestCase):
def setUp(self):
super(OpenStackMetadataTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
def test_top_level_listing(self):
# request for /openstack/<version>/ should show metadata.json
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
result = mdinst.lookup("/openstack")
# trailing / should not affect anything
self.assertEqual(result, mdinst.lookup("/openstack/"))
# the 'content' should not show up in directory listing
self.assertNotIn(base.CONTENT_DIR, result)
self.assertIn('2012-08-10', result)
self.assertIn('latest', result)
def test_version_content_listing(self):
# request for /openstack/<version>/ should show metadata.json
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
listing = mdinst.lookup("/openstack/2012-08-10")
self.assertIn("meta_data.json", listing)
def test_returns_apis_supported_in_havana_version(self):
mdinst = fake_InstanceMetadata(self.stubs, self.instance)
havana_supported_apis = mdinst.lookup("/openstack/2013-10-17")
self.assertEqual([base.MD_JSON_NAME, base.UD_NAME, base.PASS_NAME,
base.VD_JSON_NAME], havana_supported_apis)
def test_returns_apis_supported_in_folsom_version(self):
mdinst = fake_InstanceMetadata(self.stubs, self.instance)
folsom_supported_apis = mdinst.lookup("/openstack/2012-08-10")
self.assertEqual([base.MD_JSON_NAME, base.UD_NAME],
folsom_supported_apis)
def test_returns_apis_supported_in_grizzly_version(self):
mdinst = fake_InstanceMetadata(self.stubs, self.instance)
grizzly_supported_apis = mdinst.lookup("/openstack/2013-04-04")
self.assertEqual([base.MD_JSON_NAME, base.UD_NAME, base.PASS_NAME],
grizzly_supported_apis)
def test_metadata_json(self):
fakes.stub_out_key_pair_funcs(self.stubs)
inst = self.instance.obj_clone()
content = [
('/etc/my.conf', "content of my.conf"),
('/root/hello', "content of /root/hello"),
]
mdinst = fake_InstanceMetadata(self.stubs, inst,
content=content)
mdjson = mdinst.lookup("/openstack/2012-08-10/meta_data.json")
mdjson = mdinst.lookup("/openstack/latest/meta_data.json")
mddict = jsonutils.loads(mdjson)
self.assertEqual(mddict['uuid'], self.instance['uuid'])
self.assertIn('files', mddict)
self.assertIn('public_keys', mddict)
self.assertEqual(mddict['public_keys'][self.instance['key_name']],
self.instance['key_data'])
self.assertIn('launch_index', mddict)
self.assertEqual(mddict['launch_index'], self.instance['launch_index'])
# verify that each of the things we put in content
# resulted in an entry in 'files', that their content
# there is as expected, and that /content lists them.
for (path, content) in content:
fent = [f for f in mddict['files'] if f['path'] == path]
self.assertEqual(1, len(fent))
fent = fent[0]
found = mdinst.lookup("/openstack%s" % fent['content_path'])
self.assertEqual(found, content)
def test_x509_keypair(self):
# check if the x509 content is set, if the keypair type is x509.
fakes.stub_out_key_pair_funcs(self.stubs, type='x509')
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
mdjson = mdinst.lookup("/openstack/2012-08-10/meta_data.json")
mddict = jsonutils.loads(mdjson)
# keypair is stubbed-out, so it's public_key is 'public_key'.
expected = {'name': self.instance['key_name'],
'type': 'x509',
'data': 'public_key'}
self.assertEqual([expected], mddict['keys'])
def test_extra_md(self):
# make sure extra_md makes it through to metadata
fakes.stub_out_key_pair_funcs(self.stubs)
inst = self.instance.obj_clone()
extra = {'foo': 'bar', 'mylist': [1, 2, 3],
'mydict': {"one": 1, "two": 2}}
mdinst = fake_InstanceMetadata(self.stubs, inst, extra_md=extra)
mdjson = mdinst.lookup("/openstack/2012-08-10/meta_data.json")
mddict = jsonutils.loads(mdjson)
for key, val in extra.iteritems():
self.assertEqual(mddict[key], val)
def test_password(self):
# make sure extra_md makes it through to metadata
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
result = mdinst.lookup("/openstack/latest/password")
self.assertEqual(result, password.handle_password)
def test_userdata(self):
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
userdata_found = mdinst.lookup("/openstack/2012-08-10/user_data")
self.assertEqual(USER_DATA_STRING, userdata_found)
# since we had user-data in this instance, it should be in listing
self.assertIn('user_data', mdinst.lookup("/openstack/2012-08-10"))
inst.user_data = None
mdinst = fake_InstanceMetadata(self.stubs, inst)
# since this instance had no user-data it should not be there.
self.assertNotIn('user_data', mdinst.lookup("/openstack/2012-08-10"))
self.assertRaises(base.InvalidMetadataPath,
mdinst.lookup, "/openstack/2012-08-10/user_data")
def test_random_seed(self):
fakes.stub_out_key_pair_funcs(self.stubs)
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
# verify that 2013-04-04 has the 'random' field
mdjson = mdinst.lookup("/openstack/2013-04-04/meta_data.json")
mddict = jsonutils.loads(mdjson)
self.assertIn("random_seed", mddict)
self.assertEqual(len(base64.b64decode(mddict["random_seed"])), 512)
# verify that older version do not have it
mdjson = mdinst.lookup("/openstack/2012-08-10/meta_data.json")
self.assertNotIn("random_seed", jsonutils.loads(mdjson))
def test_no_dashes_in_metadata(self):
# top level entries in meta_data should not contain '-' in their name
fakes.stub_out_key_pair_funcs(self.stubs)
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
mdjson = jsonutils.loads(
mdinst.lookup("/openstack/latest/meta_data.json"))
self.assertEqual([], [k for k in mdjson.keys() if k.find("-") != -1])
def test_vendor_data_presence(self):
inst = self.instance.obj_clone()
mdinst = fake_InstanceMetadata(self.stubs, inst)
# verify that 2013-10-17 has the vendor_data.json file
result = mdinst.lookup("/openstack/2013-10-17")
self.assertIn('vendor_data.json', result)
# verify that older version do not have it
result = mdinst.lookup("/openstack/2013-04-04")
self.assertNotIn('vendor_data.json', result)
def test_vendor_data_response(self):
inst = self.instance.obj_clone()
mydata = {'mykey1': 'value1', 'mykey2': 'value2'}
class myVdriver(base.VendorDataDriver):
def __init__(self, *args, **kwargs):
super(myVdriver, self).__init__(*args, **kwargs)
data = mydata.copy()
uuid = kwargs['instance']['uuid']
data.update({'inst_uuid': uuid})
self.data = data
def get(self):
return self.data
mdinst = fake_InstanceMetadata(self.stubs, inst, vd_driver=myVdriver)
# verify that 2013-10-17 has the vendor_data.json file
vdpath = "/openstack/2013-10-17/vendor_data.json"
vd = jsonutils.loads(mdinst.lookup(vdpath))
# the instance should be passed through, and our class copies the
# uuid through to 'inst_uuid'.
self.assertEqual(vd['inst_uuid'], inst['uuid'])
# check the other expected values
for k, v in mydata.items():
self.assertEqual(vd[k], v)
class MetadataHandlerTestCase(test.TestCase):
"""Test that metadata is returning proper values."""
def setUp(self):
super(MetadataHandlerTestCase, self).setUp()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
self.mdinst = fake_InstanceMetadata(self.stubs, self.instance,
address=None, sgroups=None)
def test_callable(self):
def verify(req, meta_data):
self.assertIsInstance(meta_data, CallableMD)
return "foo"
class CallableMD(object):
def lookup(self, path_info):
return verify
response = fake_request(self.stubs, CallableMD(), "/bar")
self.assertEqual(response.status_int, 200)
self.assertEqual(response.body, "foo")
def test_root(self):
expected = "\n".join(base.VERSIONS) + "\nlatest"
response = fake_request(self.stubs, self.mdinst, "/")
self.assertEqual(response.body, expected)
response = fake_request(self.stubs, self.mdinst, "/foo/../")
self.assertEqual(response.body, expected)
def test_root_metadata_proxy_enabled(self):
self.flags(service_metadata_proxy=True,
group='neutron')
expected = "\n".join(base.VERSIONS) + "\nlatest"
response = fake_request(self.stubs, self.mdinst, "/")
self.assertEqual(response.body, expected)
response = fake_request(self.stubs, self.mdinst, "/foo/../")
self.assertEqual(response.body, expected)
def test_version_root(self):
response = fake_request(self.stubs, self.mdinst, "/2009-04-04")
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("text/plain"))
self.assertEqual(response.body, 'meta-data/\nuser-data')
response = fake_request(self.stubs, self.mdinst, "/9999-99-99")
self.assertEqual(response.status_int, 404)
def test_json_data(self):
fakes.stub_out_key_pair_funcs(self.stubs)
response = fake_request(self.stubs, self.mdinst,
"/openstack/latest/meta_data.json")
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("application/json"))
response = fake_request(self.stubs, self.mdinst,
"/openstack/latest/vendor_data.json")
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("application/json"))
def test_user_data_non_existing_fixed_address(self):
self.stubs.Set(network_api.API, 'get_fixed_ip_by_address',
return_non_existing_address)
response = fake_request(None, self.mdinst, "/2009-04-04/user-data",
"127.1.1.1")
self.assertEqual(response.status_int, 404)
def test_fixed_address_none(self):
response = fake_request(None, self.mdinst,
relpath="/2009-04-04/user-data", address=None)
self.assertEqual(response.status_int, 500)
def test_invalid_path_is_404(self):
response = fake_request(self.stubs, self.mdinst,
relpath="/2009-04-04/user-data-invalid")
self.assertEqual(response.status_int, 404)
def test_user_data_with_use_forwarded_header(self):
expected_addr = "192.192.192.2"
def fake_get_metadata(address):
if address == expected_addr:
return self.mdinst
else:
raise Exception("Expected addr of %s, got %s" %
(expected_addr, address))
self.flags(use_forwarded_for=True)
response = fake_request(self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="168.168.168.1",
fake_get_metadata=fake_get_metadata,
headers={'X-Forwarded-For': expected_addr})
self.assertEqual(response.status_int, 200)
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("text/plain"))
self.assertEqual(response.body,
base64.b64decode(self.instance['user_data']))
response = fake_request(self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="168.168.168.1",
fake_get_metadata=fake_get_metadata,
headers=None)
self.assertEqual(response.status_int, 500)
@mock.patch('nova.utils.constant_time_compare')
def test_by_instance_id_uses_constant_time_compare(self, mock_compare):
mock_compare.side_effect = test.TestingException
req = webob.Request.blank('/')
hnd = handler.MetadataRequestHandler()
req.headers['X-Instance-ID'] = 'fake-inst'
req.headers['X-Instance-ID-Signature'] = 'fake-sig'
req.headers['X-Tenant-ID'] = 'fake-proj'
self.assertRaises(test.TestingException,
hnd._handle_instance_id_request, req)
self.assertEqual(1, mock_compare.call_count)
def test_user_data_with_neutron_instance_id(self):
expected_instance_id = 'a-b-c-d'
def fake_get_metadata(instance_id, remote_address):
if remote_address is None:
raise Exception('Expected X-Forwared-For header')
elif instance_id == expected_instance_id:
return self.mdinst
else:
# raise the exception to aid with 500 response code test
raise Exception("Expected instance_id of %s, got %s" %
(expected_instance_id, instance_id))
signed = hmac.new(
CONF.neutron.metadata_proxy_shared_secret,
expected_instance_id,
hashlib.sha256).hexdigest()
# try a request with service disabled
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
headers={'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 200)
# now enable the service
self.flags(service_metadata_proxy=True,
group='neutron')
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 200)
response_ctype = response.headers['Content-Type']
self.assertTrue(response_ctype.startswith("text/plain"))
self.assertEqual(response.body,
base64.b64decode(self.instance['user_data']))
# mismatched signature
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': ''})
self.assertEqual(response.status_int, 403)
# missing X-Tenant-ID from request
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 400)
# mismatched X-Tenant-ID
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'FAKE',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 404)
# without X-Forwarded-For
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 500)
# unexpected Instance-ID
signed = hmac.new(
CONF.neutron.metadata_proxy_shared_secret,
'z-z-z-z',
hashlib.sha256).hexdigest()
response = fake_request(
self.stubs, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata_by_instance_id=fake_get_metadata,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'z-z-z-z',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(response.status_int, 500)
def test_get_metadata(self):
def _test_metadata_path(relpath):
# recursively confirm a http 200 from all meta-data elements
# available at relpath.
response = fake_request(self.stubs, self.mdinst,
relpath=relpath)
for item in response.body.split('\n'):
if 'public-keys' in relpath:
# meta-data/public-keys/0=keyname refers to
# meta-data/public-keys/0
item = item.split('=')[0]
if item.endswith('/'):
path = relpath + '/' + item
_test_metadata_path(path)
continue
path = relpath + '/' + item
response = fake_request(self.stubs, self.mdinst, relpath=path)
self.assertEqual(response.status_int, 200, message=path)
_test_metadata_path('/2009-04-04/meta-data')
def _metadata_handler_with_instance_id(self, hnd):
expected_instance_id = 'a-b-c-d'
signed = hmac.new(
CONF.neutron.metadata_proxy_shared_secret,
expected_instance_id,
hashlib.sha256).hexdigest()
self.flags(service_metadata_proxy=True, group='neutron')
response = fake_request(
None, self.mdinst,
relpath="/2009-04-04/user-data",
address="192.192.192.2",
fake_get_metadata=False,
app=hnd,
headers={'X-Forwarded-For': '192.192.192.2',
'X-Instance-ID': 'a-b-c-d',
'X-Tenant-ID': 'test',
'X-Instance-ID-Signature': signed})
self.assertEqual(200, response.status_int)
self.assertEqual(base64.b64decode(self.instance['user_data']),
response.body)
@mock.patch.object(base, 'get_metadata_by_instance_id')
def test_metadata_handler_with_instance_id(self, get_by_uuid):
# test twice to ensure that the cache works
get_by_uuid.return_value = self.mdinst
self.flags(metadata_cache_expiration=15)
hnd = handler.MetadataRequestHandler()
self._metadata_handler_with_instance_id(hnd)
self._metadata_handler_with_instance_id(hnd)
self.assertEqual(1, get_by_uuid.call_count)
@mock.patch.object(base, 'get_metadata_by_instance_id')
def test_metadata_handler_with_instance_id_no_cache(self, get_by_uuid):
# test twice to ensure that disabling the cache works
get_by_uuid.return_value = self.mdinst
self.flags(metadata_cache_expiration=0)
hnd = handler.MetadataRequestHandler()
self._metadata_handler_with_instance_id(hnd)
self._metadata_handler_with_instance_id(hnd)
self.assertEqual(2, get_by_uuid.call_count)
def _metadata_handler_with_remote_address(self, hnd):
response = fake_request(
None, self.mdinst,
fake_get_metadata=False,
app=hnd,
relpath="/2009-04-04/user-data",
address="192.192.192.2")
self.assertEqual(200, response.status_int)
self.assertEqual(base64.b64decode(self.instance.user_data),
response.body)
@mock.patch.object(base, 'get_metadata_by_address')
def test_metadata_handler_with_remote_address(self, get_by_uuid):
# test twice to ensure that the cache works
get_by_uuid.return_value = self.mdinst
self.flags(metadata_cache_expiration=15)
hnd = handler.MetadataRequestHandler()
self._metadata_handler_with_remote_address(hnd)
self._metadata_handler_with_remote_address(hnd)
self.assertEqual(1, get_by_uuid.call_count)
@mock.patch.object(base, 'get_metadata_by_address')
def test_metadata_handler_with_remote_address_no_cache(self, get_by_uuid):
# test twice to ensure that disabling the cache works
get_by_uuid.return_value = self.mdinst
self.flags(metadata_cache_expiration=0)
hnd = handler.MetadataRequestHandler()
self._metadata_handler_with_remote_address(hnd)
self._metadata_handler_with_remote_address(hnd)
self.assertEqual(2, get_by_uuid.call_count)
class MetadataPasswordTestCase(test.TestCase):
def setUp(self):
super(MetadataPasswordTestCase, self).setUp()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_inst_obj(self.context)
self.flags(use_local=True, group='conductor')
self.mdinst = fake_InstanceMetadata(self.stubs, self.instance,
address=None, sgroups=None)
self.flags(use_local=True, group='conductor')
def test_get_password(self):
request = webob.Request.blank('')
self.mdinst.password = 'foo'
result = password.handle_password(request, self.mdinst)
self.assertEqual(result, 'foo')
def test_bad_method(self):
request = webob.Request.blank('')
request.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
password.handle_password, request, self.mdinst)
@mock.patch('nova.objects.Instance.get_by_uuid')
def _try_set_password(self, get_by_uuid, val='bar'):
request = webob.Request.blank('')
request.method = 'POST'
request.body = val
get_by_uuid.return_value = self.instance
with mock.patch.object(self.instance, 'save') as save:
password.handle_password(request, self.mdinst)
save.assert_called_once_with()
self.assertIn('password_0', self.instance.system_metadata)
def test_set_password(self):
self.mdinst.password = ''
self._try_set_password()
def test_conflict(self):
self.mdinst.password = 'foo'
self.assertRaises(webob.exc.HTTPConflict,
self._try_set_password)
def test_too_large(self):
self.mdinst.password = ''
self.assertRaises(webob.exc.HTTPBadRequest,
self._try_set_password,
val=('a' * (password.MAX_SIZE + 1)))
|
bgxavier/nova
|
nova/tests/unit/test_metadata.py
|
Python
|
apache-2.0
| 38,119
|
from capstone.game.games import TicTacToe
from capstone.game.players import RandPlayer
from capstone.game.utils import play_series
game = TicTacToe()
players = [RandPlayer(), RandPlayer()]
play_series(game, players)
|
davidrobles/mlnd-capstone-code
|
experiments/play_tic_tac_toe_series.py
|
Python
|
mit
| 217
|
"""add friendships table
Revision ID: 553bdd8a749f
Revises: 553bdd8a749e
Create Date: 2018-04-19 14:24:33.050913
"""
# revision identifiers, used by Alembic.
revision = '553bdd8a749f'
down_revision = '553bdd8a749e'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
utc_now = sa.text("(now() at time zone 'utc')")
def upgrade(engine_name):
print "Upgrading {}".format(engine_name)
# your upgrade script goes here
op.create_index('ix_ck_clients_ip_address', 'ck_clients', ['ip_address'])
op.create_index('ix_ck_clients_user_id', 'ck_clients', ['user_id'])
op.create_index('ix_ck_clients_player_id', 'ck_clients', ['player_id'])
op.create_index('ix_ck_clients_build', 'ck_clients', ['build'])
op.create_index('ix_ck_clients_identity_id', 'ck_clients', ['identity_id'])
def downgrade(engine_name):
print "Downgrading {}".format(engine_name)
op.drop_index('ix_ck_clients_ip_address')
op.drop_index('ix_ck_clients_user_id')
op.drop_index('ix_ck_clients_player_id')
op.drop_index('ix_ck_clients_build')
op.drop_index('ix_ck_clients_identity_id')
|
dgnorth/drift-base
|
alembic/versions/553bdd8a749f_add_clients_indices.py
|
Python
|
mit
| 1,137
|
# -*- coding: utf-8 -*-
import os
import struct
from .base import FirmwareObject, BaseObject, StructuredObject
from .utils import *
from .structs.flash_structs import *
class RegionSection(StructuredObject):
size = 20
def __init__(self, data):
self.parse_structure(data, FlashRegionSectionType)
class MasterSection(StructuredObject):
size = 12
def __init__(self, data):
self.parse_structure(data, FlashMasterSectionType)
class DescriptorMap(StructuredObject):
size = 16
def __init__(self, data):
self.parse_structure(data, FlashDescriptorMapType)
class FlashRegion(FirmwareObject, BaseObject):
def __init__(self, data, region_name, region_details):
self.sections = []
self.data = data
self.attrs = region_details
self.name = region_name
@property
def objects(self):
return self.sections
def process(self):
from .uefi import FirmwareVolume
if self.name == "bios":
data = self.data
while True:
volume_index = search_firmware_volumes(data, limit=1)
if len(volume_index) == 0:
break
fv = FirmwareVolume(data[volume_index[0] - 40:])
if fv.valid_header:
self.sections.append(fv)
data = data[volume_index[0] - 40 + fv.size:]
else:
data = data[volume_index[0] + 8:]
if self.name == "me":
pass
for section in self.sections:
section.process()
return True
def showinfo(self, ts='', index=None):
print "%s%s type= %s, size= 0x%x (%d bytes) details[ %s ]" % (
ts, blue("Flash Region"), green(self.name),
len(self.data), len(self.data),
", ".join(["%s: %s" % (k, v) for k, v in self.attrs.iteritems()])
)
for section in self.sections:
section.showinfo(ts="%s " % ts)
pass
def dump(self, parent=""):
dump_data(os.path.join(parent, "region-%s.fd" % self.name), self.data)
parent = os.path.join(parent, "region-%s" % self.name)
for section in self.sections:
section.dump(parent)
pass
pass
class FlashDescriptor(FirmwareObject):
def __init__(self, data):
self.valid_header = False
if len(data) < 20:
return
self.padding, self.header = struct.unpack("<16s4s", data[:16 + 4])
if self.header != FLASH_HEADER:
return
self.valid_header = True
self.regions = []
self.data = data
@property
def objects(self):
return self.regions
def process(self):
def _region_size(base, limit):
if limit:
return (limit + 1 - base) * 0x1000
return 0
def _region_offset(base):
return base * 0x1000
self.map = DescriptorMap(self.data[20:20 + DescriptorMap.size])
region_offset = (self.map.structure.RegionBase * 0x10)
self.region = RegionSection(
self.data[region_offset:region_offset + RegionSection.size])
master_offset = (self.map.structure.MasterBase * 0x10)
self.master = MasterSection(
self.data[master_offset:master_offset + MasterSection.size])
bios_base = self.region.structure.BiosBase
bios_limit = self.region.structure.BiosLimit
bios_size = _region_offset(
bios_base) + _region_size(bios_base, bios_limit)
bios = self.data[_region_offset(bios_base): bios_size]
bios_region = FlashRegion(bios, "bios", {
"base": bios_base,
"limit": bios_limit,
"id": self.master.structure.BiosId,
"read": self.master.structure.BiosRead,
"write": self.master.structure.BiosWrite
})
bios_region.process()
self.regions.append(bios_region)
me_base = self.region.structure.MeBase
me_limit = self.region.structure.MeLimit
me_size = _region_offset(me_base) + _region_size(me_base, me_limit)
me = self.data[_region_offset(me_base): me_size]
me_region = FlashRegion(me, "me", {
"base": me_base,
"limit": me_limit,
"id": self.master.structure.MeId,
"read": self.master.structure.MeRead,
"write": self.master.structure.MeWrite
})
me_region.process()
self.regions.append(me_region)
gbe_base = self.region.structure.GbeBase
gbe_limit = self.region.structure.GbeLimit
gbe_size = _region_offset(gbe_base) + _region_size(gbe_base, gbe_limit)
gbe = self.data[_region_offset(gbe_base): gbe_size]
gbe_region = FlashRegion(gbe, "gbe", {
"base": gbe_base,
"limit": gbe_limit,
"id": self.master.structure.GbeId,
"read": self.master.structure.GbeRead,
"write": self.master.structure.GbeWrite
})
gbe_region.process()
self.regions.append(gbe_region)
return True
def showinfo(self, ts='', index=None):
print (("%s%s chips %d, regions %d, masters %d, PCH straps %d, "
"PROC straps %d, ICC entries %d") % (
ts, blue("Flash Descriptor (Intel PCH)"),
self.map.structure.NumberOfFlashChips,
self.map.structure.NumberOfRegions,
self.map.structure.NumberOfMasters,
self.map.structure.NumberOfPchStraps,
self.map.structure.NumberOfProcStraps,
self.map.structure.NumberOfIccTableEntries))
for region in self.regions:
region.showinfo(ts="%s " % ts)
def dump(self, parent, index=None):
dump_data(os.path.join(parent, "flash.fd"), self.data)
parent = os.path.join(parent, "regions")
for region in self.regions:
region.dump(parent)
pass
|
RafaelRMachado/uefi-firmware-parser
|
uefi_firmware/flash.py
|
Python
|
mit
| 5,981
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import os
from lib.core.common import singleTimeWarnMessage
from lib.core.enums import DBMS
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.LOW
def dependencies():
singleTimeWarnMessage("tamper script '%s' is only meant to be run against %s" % (os.path.basename(__file__).split(".")[0], DBMS.MYSQL))
def tamper(payload, **kwargs):
"""
Replaces space character (' ') with a dash comment ('--') followed by
a new line ('\n')
Requirement:
* MySQL
* MSSQL
Tested against:
Notes:
* Useful to bypass several web application firewalls.
>>> tamper('1 AND 9227=9227')
'1--%0AAND--%0A9227=9227'
"""
retVal = ""
if payload:
for i in xrange(len(payload)):
if payload[i].isspace():
retVal += "--%0A"
elif payload[i] == '#' or payload[i:i + 3] == '-- ':
retVal += payload[i:]
break
else:
retVal += payload[i]
return retVal
|
V11/volcano
|
server/sqlmap/tamper/space2mysqldash.py
|
Python
|
mit
| 1,161
|
import numpy as np
# Malisiewicz et al.
def non_max_suppression(boxes, overlapThresh):
# if there are no boxes, return an empty list
if len(boxes) == 0:
return []
# if the bounding boxes integers, convert them to floats --
# this is important since we'll be doing a bunch of divisions
if boxes.dtype.kind == "i":
boxes = boxes.astype("float")
# initialize the list of picked indexes
pick = []
# grab the coordinates of the bounding boxes
x1 = boxes[:,0]
y1 = boxes[:,1]
x2 = boxes[:,2]
y2 = boxes[:,3]
# compute the area of the bounding boxes and sort the bounding
# boxes by the bottom-right y-coordinate of the bounding box
area = (x2 - x1 + 1) * (y2 - y1 + 1)
idxs = np.argsort(y2)
# keep looping while some indexes still remain in the indexes
# list
while len(idxs) > 0:
# grab the last index in the indexes list and add the
# index value to the list of picked indexes
last = len(idxs) - 1
i = idxs[last]
pick.append(i)
# find the largest (x, y) coordinates for the start of
# the bounding box and the smallest (x, y) coordinates
# for the end of the bounding box
xx1 = np.maximum(x1[i], x1[idxs[:last]])
yy1 = np.maximum(y1[i], y1[idxs[:last]])
xx2 = np.minimum(x2[i], x2[idxs[:last]])
yy2 = np.minimum(y2[i], y2[idxs[:last]])
# compute the width and height of the bounding box
w = np.maximum(0, xx2 - xx1 + 1)
h = np.maximum(0, yy2 - yy1 + 1)
# compute the ratio of overlap
overlap = (w * h) / area[idxs[:last]]
# delete all indexes from the index list that have
idxs = np.delete(idxs, np.concatenate(([last],
np.where(overlap > overlapThresh)[0])))
# return only the bounding boxes that were picked using the
# integer data type
return pick
|
rawcoder/object-detection
|
VOCdevkit/nms.py
|
Python
|
gpl-2.0
| 1,737
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception related utilities.
"""
import logging
import sys
import time
import traceback
import six
from payload.openstack.common.gettextutils import _ # noqa
class save_and_reraise_exception(object):
"""Save current exception, run some code and then re-raise.
In some cases the exception context can be cleared, resulting in None
being attempted to be re-raised after an exception handler is run. This
can happen when eventlet switches greenthreads or when running an
exception handler, code raises and catches an exception. In both
cases the exception context will be cleared.
To work around this, we save the exception state, run handler code, and
then re-raise the original exception. If another exception occurs, the
saved exception is logged and the new exception is re-raised.
In some cases the caller may not want to re-raise the exception, and
for those circumstances this context provides a reraise flag that
can be used to suppress the exception. For example:
except Exception:
with save_and_reraise_exception() as ctxt:
decide_if_need_reraise()
if not should_be_reraised:
ctxt.reraise = False
"""
def __init__(self):
self.reraise = True
def __enter__(self):
self.type_, self.value, self.tb, = sys.exc_info()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
logging.error(_('Original exception being dropped: %s'),
traceback.format_exception(self.type_,
self.value,
self.tb))
return False
if self.reraise:
six.reraise(self.type_, self.value, self.tb)
def forever_retry_uncaught_exceptions(infunc):
def inner_func(*args, **kwargs):
last_log_time = 0
last_exc_message = None
exc_count = 0
while True:
try:
return infunc(*args, **kwargs)
except Exception as exc:
this_exc_message = six.u(str(exc))
if this_exc_message == last_exc_message:
exc_count += 1
else:
exc_count = 1
# Do not log any more frequently than once a minute unless
# the exception message changes
cur_time = int(time.time())
if (cur_time - last_log_time > 60 or
this_exc_message != last_exc_message):
logging.exception(
_('Unexpected exception occurred %d time(s)... '
'retrying.') % exc_count)
last_log_time = cur_time
last_exc_message = this_exc_message
exc_count = 0
# This should be a very rare event. In case it isn't, do
# a sleep.
time.sleep(1)
return inner_func
|
gorocacher/payload
|
payload/openstack/common/excutils.py
|
Python
|
apache-2.0
| 3,748
|
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014-2018 Shi Chi(Mack Stone)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
from .func_exponential import *
if sys.version_info > (3, 0):
long = int
def length(x):
"""Returns the length of x, i.e., sqrt(x * x).
:param x: Floating-point vector types.
.. seealso::
`GLSL length man page <http://www.opengl.org/sdk/docs/manglsl/xhtml/length.xml>`_
`GLSL 4.20.8 specification, section 8.5 Geometric Functions <http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf>`_"""
# TODO: implement vec2 type
# if isinstance(x, Vec2):
# sqr = x.x * x.x + x.y * x.y
# return math.sqrt(sqr)
if isinstance(x, Vec3):
sqr = x.x * x.x + x.y * x.y + x.z * x.z
return math.sqrt(sqr)
elif isinstance(x, Vec4):
sqr = x.x * x.x + x.y * x.y + x.z * x.z + x.w * x.w
return math.sqrt(sqr)
elif isinstance(x, float) or isinstance(x, int) or isinstance(x, long):
return abs(x)
else:
raise TypeError('unsupport type %s' % type(x))
def dot(x, y):
"""Returns the dot product of x and y, i.e., result = x * y.
:param x: Floating-point vector types.
.. seealso::
`GLSL dot man page <http://www.opengl.org/sdk/docs/manglsl/xhtml/dot.xml>`_
`GLSL 4.20.8 specification, section 8.5 Geometric Functions <http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf>`_"""
# TODO: implement vec2
# if isinstance(x, Vec2) and isinstance(y, Vec2):
# tmp = Vec2(x * y)
# return tmp.x + tmp.y
if isinstance(x, Vec3) and isinstance(y, Vec3):
tmp = Vec3(x * y)
return tmp.x + tmp.y + tmp.z
elif isinstance(x, Vec4) and isinstance(y, Vec4):
tmp = Vec4(x * y)
return (tmp.x + tmp.y) + (tmp.z + tmp.w)
elif isinstance(x, float) or isinstance(x, int) or isinstance(x, long):
return x * y
else:
raise TypeError('unsupport type %s' % type(x))
def normalize(x):
"""Returns a vector in the same direction as x but with length of 1.
.. seealso::
`GLSL normalize man page <http://www.opengl.org/sdk/docs/manglsl/xhtml/normalize.xml>`_
`GLSL 4.20.8 specification, section 8.5 Geometric Functions <http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf>`_"""
if isinstance(x, float) or isinstance(x, int) or isinstance(x, long):
return -1.0 if x < 0.0 else 1.0
#elif isinstance(x, Vec2):
#sqr = x.x * x.x + x.y * x.y
#return x * inversesqrt(sqr)
elif isinstance(x, Vec3):
sqr = x.x * x.x + x.y * x.y + x.z * x.z
return x * inversesqrt(sqr)
elif isinstance(x, Vec4):
sqr = x.x * x.x + x.y * x.y + x.z * x.z + x.w * x.w
return x * inversesqrt(sqr)
|
mackst/glm
|
glm/detail/func_geometric.py
|
Python
|
mit
| 3,822
|
# coding=utf-8
"""This module, trading_model.py, represents a trading model used in both training and testing."""
FINANCE_MODEL_TYPE_M0 = 'm0_net_resistance'
class FinanceModel(object):
"""Represents a financial model that can be both trained and tested."""
def __init__(self, model_type, types_of_data_needed):
self._model_type = model_type
self._types_of_data_needed = types_of_data_needed
self._weights = []
self._required_libraries = []
self._required_defines = []
def add_required_library(self, l):
"""Adds a required library."""
self._required_libraries.append(l)
def add_required_define_statement(self, d, v):
"""Adds a required define statement."""
self._required_defines.append([d, v])
@property
def required_libraries(self):
"""Returns the required libraries of this model."""
return self._required_libraries
@property
def required_define_statements(self):
"""Returns the required define statements of this model."""
return self._required_defines
@property
def model_name(self) -> str:
"""Returns the name of this model."""
return self._model_type
@property
def file_name(self) -> str:
"""Returns the file name of this model."""
return self._model_type + '.c'
@property
def type_of_data_needed(self):
"""Returns the type of data this model needs."""
return self._types_of_data_needed
|
utarsuno/quasar_source
|
deprecated/finance/finance_simulations/models/trading_model.py
|
Python
|
mit
| 1,363
|
from flask import Blueprint
users = Blueprint("users", __name__,
template_folder='templates',
static_folder='static',
static_url_path='/static/users')
from views import *
|
GautamAnghore/clic-o-matic
|
apps/users/__init__.py
|
Python
|
gpl-2.0
| 227
|
# -*- coding: utf-8 -*-
import _global as g
def foo(a, b, c):
return 2 + (256 + 3 + (444 + 34));
def foo(a, b, c):
return 2 + (256 + 3 + (444 + 34));
def bar(a, b):
return a * b;
g.bar = bar
|
niwinz/cobrascript
|
samples/sample1.py
|
Python
|
bsd-3-clause
| 210
|
#encoding:utf-8
from Configs.GlobalConfig import Hosts, DataStorages, IsoStorages, ExportStorages
'''
---------------------------------------------------------------------------------------------------
@note: ModuleTestData
---------------------------------------------------------------------------------------------------
'''
########################################################################
# 1个数据中心信息
########################################################################
dc_nfs_name = 'DC-ITC10'
dc_name_list = [dc_nfs_name]
xml_dc_info = '''
<data_center>
<name>%s</name>
<storage_type>nfs</storage_type>
<version minor="4" major="3"/>
</data_center>
''' % (dc_nfs_name)
########################################################################
# 1个集群信息
########################################################################
cluster_nfs_name = 'Cluster-ITC10'
cluster_name_list = [cluster_nfs_name]
xml_cluster_info = '''
<cluster>
<name>%s</name>
<cpu id="Intel Conroe Family"/>
<data_center>
<name>%s</name>
</data_center>
<gluster_service>true</gluster_service>
</cluster>
''' % (cluster_nfs_name, dc_nfs_name)
########################################################################
# 1个主机信息(node1加入NFS数据中心)
########################################################################
host1 = Hosts['node1']
host1_name = 'node-ITC10-1'
host1_ip = host1['ip']
host1_password = host1['password']
host2 = Hosts['node2']
host2_name = 'node-ITC10-2'
host2_ip = host2['ip']
host2_password = host2['password']
xml_host1_info = '''
<host>
<cluster>
<name>%s</name>
</cluster>
<name>%s</name>
<address>%s</address>
<root_password>%s</root_password>
</host>
''' % (cluster_nfs_name, host1_name, host1_ip, host1_password)
xml_host2_info = '''
<host>
<cluster>
<name>%s</name>
</cluster>
<name>%s</name>
<address>%s</address>
<root_password>%s</root_password>
</host>
''' % (cluster_nfs_name, host2_name, host2_ip, host2_password)
#######################################################################################
# 4个存储域信息(data1/data2,1个ISO和1个Export域)
#######################################################################################
data1_nfs_name = 'data1-nfs-ITC10'
data1_nfs = DataStorages['nfs']['data1']
data1_nfs_ip = data1_nfs['ip']
data1_nfs_path = data1_nfs['path']
data2_nfs_name = 'data2-nfs-ITC10'
data2_nfs = DataStorages['nfs']['data2']
data2_nfs_ip = data2_nfs['ip']
data2_nfs_path = data2_nfs['path']
export1_name = 'export1-ITC10'
export1 = ExportStorages['Export-Storage2']
export1_ip = export1['ip']
export1_path = export1['path']
iso1_name = 'iso1-ITC10'
iso1 = IsoStorages['ISO-Storage1']
iso1_ip = iso1['ip']
iso1_path = iso1['path']
xml_storage_info = '''
<data_driver>
<storage_domain>
<name>%s</name>
<type>data</type>
<host>
<name>%s</name>
</host>
<storage>
<type>nfs</type>
<address>%s</address>
<path>%s</path>
</storage>
</storage_domain>
<storage_domain>
<name>%s</name>
<type>data</type>
<host>
<name>%s</name>
</host>
<storage>
<type>nfs</type>
<address>%s</address>
<path>%s</path>
</storage>
</storage_domain>
<storage_domain>
<name>%s</name>
<type>export</type>
<host>
<name>%s</name>
</host>
<storage>
<type>nfs</type>
<address>%s</address>
<path>%s</path>
</storage>
</storage_domain>
<storage_domain>
<name>%s</name>
<type>iso</type>
<host>
<name>%s</name>
</host>
<storage>
<type>nfs</type>
<address>%s</address>
<path>%s</path>
</storage>
</storage_domain>
</data_driver>
''' % (data1_nfs_name, host1_name, data1_nfs_ip, data1_nfs_path,
data2_nfs_name, host1_name, data2_nfs_ip, data2_nfs_path,
export1_name, host1_name, export1_ip, export1_path,
iso1_name, host1_name, iso1_ip, iso1_path)
'''
@note: 存储域名称应该由该模块的Setup用例初始化获得,这里暂时用字符串代替
'''
vm_name = 'VM-ITC10'
vm_info='''
<vm>
<name>%s</name>
<description>Virtual Machine for Module Test.</description>
<type>server</type>
<memory>536870912</memory>
<cluster>
<name>%s</name>
</cluster>
<template>
<name>Blank</name>
</template>
<cpu>
<topology sockets="2" cores="1"/>
</cpu>
<os>
<boot dev="cdrom"/>
<boot dev="hd"/>
</os>
</vm>
''' % (vm_name, cluster_nfs_name)
'''
---------------------------------------------------------------------------------------------------
@note: Post-Test-Data
---------------------------------------------------------------------------------------------------
'''
xml_del_host_option = '''
<action>
<force>true</force>
<async>false</async>
</action>
'''
xml_del_sd_option = '''
<storage_domain>
<host>
<name>%s</name>
</host>
<format>true</format>
</storage_domain>
'''
'''
---------------------------------------------------------------------------------------------------
@note: ExpectedResult
---------------------------------------------------------------------------------------------------
'''
expected_status_code_create_dc = 201
expected_status_code_create_cluster = 201
expected_status_code_del_dc = 200
expected_status_code_del_cluster = 200
|
faylau/oVirt3.3WebAPITest
|
src/TestData/Volume/ITC10_SetUp.py
|
Python
|
apache-2.0
| 6,407
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import sqlalchemy as sa
from migrate import changeset
def upgrade(migrate_engine):
metadata = sa.MetaData()
metadata.bind = migrate_engine
# the old (non-sqlalchemy-migrate) migration scripts messed up the
# lengths of these columns, so fix them here.
changeset.alter_column(
sa.Column('class_name', sa.String(128), nullable=False),
table="schedulers",
metadata=metadata,
engine=migrate_engine)
changeset.alter_column(
sa.Column('name', sa.String(128), nullable=False),
table="schedulers",
metadata=metadata,
engine=migrate_engine)
# sqlalchemy's reflection gets the server_defaults wrong, so this
# table has to be included here.
changes = sa.Table('changes', metadata,
sa.Column('changeid', sa.Integer, primary_key=True),
sa.Column('author', sa.String(256), nullable=False),
sa.Column('comments', sa.String(1024), nullable=False),
sa.Column('is_dir', sa.SmallInteger, nullable=False),
sa.Column('branch', sa.String(256)),
sa.Column('revision', sa.String(256)),
sa.Column('revlink', sa.String(256)),
sa.Column('when_timestamp', sa.Integer, nullable=False),
sa.Column('category', sa.String(256)),
sa.Column('repository', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('project', sa.String(length=512), nullable=False,
server_default=''),
)
changeset.alter_column(
sa.Column('author', sa.String(256), nullable=False),
table=changes,
metadata=metadata,
engine=migrate_engine)
changeset.alter_column(
sa.Column('branch', sa.String(256)),
table=changes,
metadata=metadata,
engine=migrate_engine)
|
zozo123/buildbot
|
master/buildbot/db/migrate/versions/010_fix_column_lengths.py
|
Python
|
gpl-3.0
| 2,740
|
"""
Name: Abanoub Milad Nassief
Email: abanoubcs@gmail.com
Description: citation paper analyser
Imports physics citation graph
"""
# general imports
from pylab import *
import math
###################################
# Code for loading citation graph
def load_graph():
"""
Function that loads a graph given the URL
for a text representation of the graph
Returns a dictionary that models a graph
"""
graph_file = open("phy.txt","r")
graph_text = graph_file.read()
graph_lines = graph_text.split('\n')
graph_lines = graph_lines[ : -1]
print ("Loaded graph with", len(graph_lines), "nodes")
answer_graph = {}
for line in graph_lines:
neighbors = line.split(' ')
node = int(neighbors[0])
answer_graph[node] = set([])
for neighbor in neighbors[1 : -1]:
answer_graph[node].add(int(neighbor))
return answer_graph
def compute_in_degrees(digraph):
""" take in param graph, output in degree dictionary """
graph = {}
for node in digraph:
graph[node] = 0
for node in digraph:
for itr in digraph[node]:
graph[itr] += 1
return graph
def in_degree_distribution(digraph):
"""" take in param graph, output in degree distribution dictionary """
digraph = compute_in_degrees(digraph)
graph = {}
for degree in range(len(digraph)):
count = 0
for node in digraph:
if digraph[node] == degree:
count += 1
if count >0:
graph[degree] = count
return graph
def build_plot(graph):
"""
Build plot using graph
"""
plot = []
for degree in graph:
plot.append([math.log(degree), math.log(graph[degree])])
return plot
citation_graph = load_graph()
result_graph = in_degree_distribution(citation_graph)
#Pass graph in as a parameter
plot = build_plot(result_graph)
matlibplot.plot_lines("Iteration counts", 600, 600,"in-degree", "distribution", [plot])
|
AbanoubM/Algorithmic-Thinking
|
citation paper analyser/citation_analyser.py
|
Python
|
mit
| 2,051
|
import sys; sys.path.append("../")
import unittest
from collections import OrderedDict
from baemo.references import Reference
from baemo.references import References
from baemo.exceptions import ReferencesMalformed
class TestReferences(unittest.TestCase):
# __init__
def test___init___no_params(self):
r = References()
self.assertEqual(r.__dict__, {})
self.assertEqual(type(r), References)
def test___init___dict_param(self):
r = References({
"k": {
"type": "one_to_one",
"entity": "user"
}
})
self.assertEqual(r.__dict__, {
"k": Reference({
"type": "one_to_one",
"entity": "user"
})
})
# __call__
def test___call___no_params(self):
r = References({
"k": {
"type": "one_to_one",
"entity": "user"
}
})
r()
self.assertEqual(r.__dict__, {})
def test___call___dict_param(self):
r = References()
r({
"k": {
"type": "one_to_one",
"entity": "user"
}
})
self.assertEqual(r.__dict__, {
"k": Reference({
"type": "one_to_one",
"entity": "user"
})
})
# _wrap
def test__wrap__dict_param(self):
r = {
"k": {
"type": "one_to_one",
"entity": "user"
}
}
self.assertEqual(References._wrap(r), {
"k": Reference({
"type": "one_to_one",
"entity": "user"
})
})
def test__wrap__nested_dict_param(self):
r = {
"k1": {
"k2": {
"k3": {
"type": "one_to_one",
"entity": "user"
}
}
}
}
self.assertEqual(References._wrap(r), {
"k1": {
"k2": {
"k3": Reference({
"type": "one_to_one",
"entity": "user"
})
}
}
})
# wrap
def test_wrap(self):
r = References()
r.__dict__ = {
"k": {
"type": "one_to_one",
"entity": "user"
}
}
r.wrap()
self.assertEqual(r.__dict__, {
"k": Reference({
"type": "one_to_one",
"entity": "user"
})
})
# _validate
def test__validate__dict_param(self):
r = {
"k": Reference({
"type": "one_to_one",
"entity": "user"
}
)}
try:
References._validate(r)
except ReferencesMalformed:
self.fail("exception raised")
def test__validate__dict_param__raises_ReferencesMalformed(self):
with self.assertRaises(ReferencesMalformed):
References._validate({"foo": "bar"})
# validate
def test_validate__dict_param(self):
r = References()
r.__dict__ = {
"k": Reference({
"type": "one_to_one",
"entity": "user"
}
)}
try:
r.validate()
except ReferencesMalformed:
self.fail("exception raised")
def test_validate__dict_param__raises_ReferencesMalformed(self):
r = References()
r.__dict__ = {"foo": "bar"}
with self.assertRaises(ReferencesMalformed):
r.validate()
if __name__ == "__main__":
unittest.main()
|
chrisantonellis/pymongo_basemodel
|
test/test_references.py
|
Python
|
mit
| 3,808
|
# coding: utf-8
#
# Credit to Jess Teale for original idea
# (c) Igor Smolinski 2017
# (c) Jess Teale 2017
#
from subprocess import call#for importing colour
from time import sleep #For waiting
import random #For randomisation of question
import os #For cls/clear
import sys #???
import site #???
import ctypes #Title?
#Sets all variables to null just in case
word = None
key_pos = None
var1 = None
var2 = None
answer = None
la = None
te = None
foo = None
ctypes.windll.kernel32.SetConsoleTitleW("Spanish and French Verb Ending(s) Revision")#Shell window title
def go():#Used instead of os.system('pause') to not show errors
foo = input("Press enter to continue...")
def oops():#Error message for when problem exists between keyboard and chair
print("Oops! There was an error. :(")
sleep(1)
go()
os.system('cls')
sys.exit(0)
def main():#Defined as function to be able to loop program
word = input("Your Infinitive: ")
key_pos = len(word) - 2
suggestions = ["I", "You (s.)", "He/She/It", "We", "You (Pl.)", "They"]
var1 = random.choice(suggestions)
var2 = ""
if la == "a":# SPANISH
if te == "1":# SP IMPERFECT
if word.endswith("ar"):
inf = "ar"
if var1 == "I":
var2 = "aba"
elif var1 == "You":
var2 = "abas"
elif var1 == "He/She/It":
var2 = "aba"
elif var1 == "We":
var2 = "ábamos"
elif var1 == "You (Pl.)":
var2 = "abais"
elif var1 == "They":
var2 = "aban"
elif word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "ía"
elif var1 == "You":
var2 = "ías"
elif var1 == "He/She/It":
var2 = "ía"
elif var1 == "We":
var2 = "íamos"
elif var1 == "You (Pl.)":
var2 = "íais"
elif var1 == "They":
var2 = "ían"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "ía"
elif var1 == "You":
var2 = "ías"
elif var1 == "He/She/It":
var2 = "ía"
elif var1 == "We":
var2 = "íamos"
elif var1 == "You (Pl.)":
var2 = "íais"
elif var1 == "They":
var2 = "ían"
elif te == "2":# SP PRETERITE
if word.endswith("ar"):
inf = "ar"
if var1 == "I":
var2 = "é"
elif var1 == "You":
var2 = "aste"
elif var1 == "He/She/It":
var2 = "ó"
elif var1 == "We":
var2 = "amos"
elif var1 == "You (Pl.)":
var2 = "asteis"
elif var1 == "They":
var2 = "aron"
elif word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "í"
elif var1 == "You":
var2 = "íste"
elif var1 == "He/She/It":
var2 = "ió"
elif var1 == "We":
var2 = "imos"
elif var1 == "You (Pl.)":
var2 = "isteis"
elif var1 == "They":
var2 = "ieron"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "í"
elif var1 == "You":
var2 = "iste"
elif var1 == "He/She/It":
var2 = "ió"
elif var1 == "We":
var2 = "imos"
elif var1 == "You (Pl.)":
var2 = "isteis"
elif var1 == "They":
var2 = "ieron"
elif te == "3":# SP PRESENT
if word.endswith("ar"):
inf = "ar"
if var1 == "I":
var2 = "o"
elif var1 == "You":
var2 = "as"
elif var1 == "He/She/It":
var2 = "a"
elif var1 == "We":
var2 = "amos"
elif var1 == "You (Pl.)":
var2 = "áis"
elif var1 == "They":
var2 = "an"
elif word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "o"
elif var1 == "You":
var2 = "es"
elif var1 == "He/She/It":
var2 = "e"
elif var1 == "We":
var2 = "emos"
elif var1 == "You (Pl.)":
var2 = "éis"
elif var1 == "They":
var2 = "en"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "o"
elif var1 == "You":
var2 = "es"
elif var1 == "He/She/It":
var2 = "e"
elif var1 == "We":
var2 = "imos"
elif var1 == "You (Pl.)":
var2 = "ís"
elif var1 == "They":
var2 = "en"
elif te == "4":# SP FUTURE
if word.endswith("ar"):
inf = "ar"
if var1 == "I":
var2 = "aré"
elif var1 == "You":
var2 = "arás"
elif var1 == "He/She/It":
var2 = "ará"
elif var1 == "We":
var2 = "emos"
elif var1 == "You (Pl.)":
var2 = "éis"
elif var1 == "They":
var2 = "án"
elif word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "eré"
elif var1 == "You":
var2 = "erás"
elif var1 == "He/She/It":
var2 = "erá"
elif var1 == "We":
var2 = "emos"
elif var1 == "You (Pl.)":
var2 = "éis"
elif var1 == "They":
var2 = "án"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "iré"
elif var1 == "You":
var2 = "irás"
elif var1 == "He/She/It":
var2 = "irá"
elif var1 == "We":
var2 = "iremos"
elif var1 == "You (Pl.)":
var2 = "iréis"
elif var1 == "They":
var2 = "án"
elif te == "5":#SP CONDITIONAL
if word.endswith("ar"):
inf = "ar"
if var1 == "I":
var2 = "aría"
elif var1 == "You":
var2 = "arías"
elif var1 == "He/She/It":
var2 = "aría"
elif var1 == "We":
var2 = "aríamos"
elif var1 == "You (Pl.)":
var2 = "aríais"
elif var1 == "They":
var2 = "arían"
elif word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "ería"
elif var1 == "You":
var2 = "erías"
elif var1 == "He/She/It":
var2 = "ería"
elif var1 == "We":
var2 = "eríamos"
elif var1 == "You (Pl.)":
var2 = "eríais"
elif var1 == "They":
var2 = "erían"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "iría"
elif var1 == "You":
var2 = "irías"
elif var1 == "He/She/It":
var2 = "iría"
elif var1 == "We":
var2 = "iríamos"
elif var1 == "You (Pl.)":
var2 = "iríais"
elif var1 == "They":
var2 = "irían"
else:
oops()
else:
oops()
elif la == "b":# FRENCH:
if te == "1":# FR PAST
if word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "é"
elif var1 == "You":
var2 = "é"
elif var1 == "He/She/It":
var2 = "é"
elif var1 == "We":
var2 = "é"
elif var1 == "You (Pl.)":
var2 = "é"
elif var1 == "They":
var2 = "é"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "i"
elif var1 == "You":
var2 = "i"
elif var1 == "He/She/It":
var2 = "i"
elif var1 == "We":
var2 = "i"
elif var1 == "You (Pl.)":
var2 = "i"
elif var1 == "They":
var2 = "i"
elif word.endswith("re"):
inf = "re"
if var1 == "I":
var2 = "u"
elif var1 == "You":
var2 = "u"
elif var1 == "He/She/It":
var2 = "u"
elif var1 == "We":
var2 = "u"
elif var1 == "You (Pl.)":
var2 = "u"
elif var1 == "They":
var2 = "u"
elif te == "2":# FR PRESENT
if word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "e"
elif var1 == "You":
var2 = "es"
elif var1 == "He/She/It":
var2 = "e"
elif var1 == "We":
var2 = "ons"
elif var1 == "You (Pl.)":
var2 = "ez"
elif var1 == "They":
var2 = "ent"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "is"
elif var1 == "You":
var2 = "is"
elif var1 == "He/She/It":
var2 = "it"
elif var1 == "We":
var2 = "issons"
elif var1 == "You (Pl.)":
var2 = "issez"
elif var1 == "They":
var2 = "issent"
elif word.endswith("re"):
inf = "re"
if var1 == "I":
var2 = "s"
elif var1 == "You":
var2 = "s"
elif var1 == "He/She/It":
var2 = ""
elif var1 == "We":
var2 = "ons"
elif var1 == "You (Pl.)":
var2 = "ez"
elif var1 == "They":
var2 = "ent"
elif te == "3":# FR FUTURE
if word.endswith("er"):
inf = "er"
if var1 == "I":
var2 = "ai"
elif var1 == "You":
var2 = "as"
elif var1 == "He/She/It":
var2 = "a"
elif var1 == "We":
var2 = "ons"
elif var1 == "You (Pl.)":
var2 = "ez"
elif var1 == "They":
var2 = "ont"
elif word.endswith("ir"):
inf = "ir"
if var1 == "I":
var2 = "ai"
elif var1 == "You":
var2 = "as"
elif var1 == "He/She/It":
var2 = "a"
elif var1 == "We":
var2 = "ons"
elif var1 == "You (Pl.)":
var2 = "ez"
elif var1 == "They":
var2 = "ont"
elif word.endswith("re"):
inf = "re"
if var1 == "I":
var2 = "ai"
elif var1 == "You":
var2 = "as"
elif var1 == "He/She/It":
var2 = "a"
elif var1 == "We":
var2 = "ons"
elif var1 == "You (Pl.)":
var2 = "ez"
elif var1 == "They":
var2 = "ont"
else:
oops()
else:
oops()
else:
oops()
#Removes ar, er, ir and re
if word.endswith("ar"):
stem = word.replace('ar', '')#Removes AR
elif word.endswith("er"):
stem = word.replace('er', '')#Removes ER
elif word.endswith("ir"):
stem = word.replace('ir', '')#Removes IR
elif word.endswith("re"):
stem = word.replace('re', '')#Removes RE
else:
oops()
#Tells user what to change verb to
print("Change " + word + " to <" + var1 + ">")
answer = input(stem)
if answer == var2:
print("Correct! :)")
call('color A0', shell = True)
sleep(2)
if la == 'a':
call('color 4E', shell = True)
elif la =='b':
call('color 17', shell = True)
os.system('cls')
main()
elif answer != stem + var2:
print("Incorrect. :(")
print("Correct Answer: " + stem + var2 + " ")
call('color C0', shell = True)
sleep(2)
if la == 'a':
call('color 4E', shell = True)
elif la =='b':
call('color 17', shell = True)
go()
os.system('cls')
main()
else:
oops()
os.system('cls')
print("Verb Ending Revision ^.^")
print("")
print("Languages and Tenses:")
print("a) Spanish")
print(" 1) Imperfect")
print(" 2) Preterite")
print(" 3) Present")
print(" 4) Future")
print(" 5) Conditional")
print("b) French")
print(" 1) Past")
print(" 2) Present")
print(" 3) Future")
print("")
try:#Used try as there were errors previously
la = input("Language: ")
te = input("Tense: ")
except StandardError:
oops()
sleep(2)
os.system('cls')
if la == 'a':
call('color 4E', shell = True)
elif la =='b':
call('color 17', shell = True)
#Alt codes for people without those sybols on their keyboard map
print("!!! Important !!!")
print("Some characters have special symbols.")
print("Please take note of the following Alt Codes")
print("if your keyboard is not set to Spanish/French.")
print("")
print("Alt + 0225 | á")
print("Alt + -131 | â")
print("Alt + 0233 | é")
print("Alt + -136 | ê")
print("Alt + 0237 | í")
print("Alt + -140 | î")
print("Alt + -162 | ó")
print("Alt + -147 | ô")
print("Alt + -163 | ú")
print("Alt + -150 | û")
print("")
sleep(2)
go()
os.system('cls')
main()#Sends to main part
|
igor-dot-gz/spfr
|
version/sp-fr-revision.6.py
|
Python
|
mit
| 16,167
|
# -*- coding: utf-8 -*-
import os
import json
import requests
import logging
from rest_framework import serializers
from rest_framework.exceptions import APIException
from django.core.files.base import ContentFile
from django.contrib.auth.models import User
from django.utils.translation import ugettext_noop
from django.conf import settings
import sputnik
from booktype.importer import utils as importer_utils
from booktype.importer.delegate import Delegate
from booktype.importer.notifier import CollectNotifier
from booktype.apps.account import utils as account_utils
from booktype.apps.edit.forms import AdditionalMetadataForm
from booktype.utils.book import create_book
from booktype.utils.misc import booktype_slugify
from booki.utils.log import logBookHistory, logChapterHistory
from booki.editor.models import (Book, BookToc, Language, Chapter, BookStatus,
Info, METADATA_FIELDS, Attachment,
get_attachment_url)
from ..core.serializers import SimpleBookRoleSerializer
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
try:
from PIL import Image
except ImportError:
import Image
logger = logging.getLogger('api.editor.serializers')
class LanguageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Language
fields = ['id', 'abbrevation', 'name', 'url']
class BookListSerializer(serializers.ModelSerializer):
editor_url = serializers.SerializerMethodField(read_only=True)
owner = serializers.HyperlinkedRelatedField(
view_name='user-detail',
read_only=True
)
language = LanguageSerializer(read_only=True)
class Meta:
model = Book
fields = [
'id', 'owner', 'title', 'url', 'description',
'created', 'editor_url', 'language'
]
read_only_fields = ['id', 'url', 'created']
def get_editor_url(self, obj):
request = self.context['request']
url = reverse('edit:editor', args=[obj.url_title])
return request.build_absolute_uri(url)
class BookSerializer(BookListSerializer):
toc = serializers.SerializerMethodField(read_only=True)
metadata = serializers.SerializerMethodField(read_only=True)
language_id = serializers.PrimaryKeyRelatedField(
queryset=Language.objects.all(),
source='language')
users_by_role = serializers.HyperlinkedIdentityField(
view_name='book-users-by-role')
class Meta(BookListSerializer.Meta):
parent = BookListSerializer.Meta
fields = parent.fields + ['toc', 'metadata', 'language_id', 'users_by_role']
depth = 1
def get_toc(self, obj):
book_url = self.get_editor_url(obj)
def _build_toc_entry(item):
if item.is_chapter():
return {
'title': item.chapter.title,
'url_title': item.chapter.url_title,
'typeof': item.typeof,
'typeof_label': ugettext_noop('Chapter'),
'editor_url': '{0}#edit/{1}'.format(book_url, item.chapter.id),
'current_editor': item.chapter.get_current_editor_username()
}
else:
entry = {
'name': item.name,
'editor_url': None,
'children': [],
'typeof': item.typeof,
'typeof_label': ugettext_noop('Section')
}
if item.has_children():
entry['children'] = map(_build_toc_entry, item.children())
return entry
version = obj.get_version()
items = BookToc.objects.filter(version=version, parent__isnull=True).order_by("-weight")
return map(_build_toc_entry, items)
def get_metadata(self, obj):
return [{'name': x.name, 'value': x.get_value()} for x in obj.metadata]
class BookCreateSerializer(BookSerializer):
owner_id = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all(),
source='owner')
import_book_url = serializers.URLField(write_only=True, required=False)
import_book_format = serializers.ChoiceField(
choices=['epub', 'docx'], write_only=True, required=False)
class Meta(BookSerializer.Meta):
parent = BookSerializer.Meta
fields = parent.fields + [
'owner_id', 'import_book_url', 'import_book_format']
def validate(self, data):
data = super(BookCreateSerializer, self).validate(data)
fields = data.keys()
if 'import_book_url' in fields and 'import_book_format' not in fields:
error = {'import_book_format': ["This field is required."]}
logger.warn('BookCreateSerializer validate: {}'.format(error))
raise serializers.ValidationError(error)
return data
def create(self, validated_data):
n = Book.objects.count()
book_title = validated_data['title']
owner = validated_data['owner']
url_title = '%s-%s' % (n, booktype_slugify(book_title))
book = create_book(owner, book_title, book_url=url_title)
book.language = validated_data.get('language', None)
book.save()
import_book_url = validated_data.get('import_book_url')
import_format = validated_data.get('import_book_format')
if import_book_url:
book_file = self._get_book_file(import_book_url)
try:
book_importer = importer_utils.get_importer_module(import_format)
except Exception as err:
error = "Wrong importer format {}".format(err)
logger.warn('BookCreateSerializer create: {}'.format(error))
raise serializers.ValidationError(error)
delegate = Delegate()
notifier = CollectNotifier()
try:
book_importer(book_file, book, notifier=notifier, delegate=delegate)
except Exception as err:
error_msg = "Unexpected error while importing the file {}".format(err)
logger.warn('BookCreateSerializer create: {}'.format(error_msg))
raise APIException(error_msg)
if len(notifier.errors) > 0:
err = "\n".join(notifier.errors)
error_msg = "Something went wrong: {}".format(err)
logger.warn('BookCreateSerializer create: {}'.format(error_msg))
raise APIException(error_msg)
return book
def _get_book_file(self, url):
try:
response = requests.get(url)
book_file = ContentFile(response.content)
except Exception as err:
error_msg = "Error while retrieving the file {}".format(err)
logger.warn('BookCreateSerializer create: {}'.format(error_msg))
raise serializers.ValidationError(error_msg)
return book_file
class ChapterListCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Chapter
fields = ('id', 'title', 'version', 'status', 'revision', 'url_title', 'created', 'modified')
read_only_fields = ('version', 'status', 'revision', 'url_title', 'created', 'modified')
def validate(self, attrs):
attrs['book'] = self.context['view']._book
attrs['content'] = u'<h1>{}</h1><p><br/></p>'.format(attrs['title'])
attrs['content_json'] = u'''{
"entityMap": {},
"blocks": [
{
"key": "bm8jb",
"text": "",
"type": "datablock",
"depth": 0,
"inlineStyleRanges": [],
"entityRanges": [],
"data": {}
},
{
"key": "f29sf",
"text": "Chapter Title",
"type": "heading1",
"depth": 0,
"inlineStyleRanges": [],
"entityRanges": [],
"data": {
"attributes": {
"style": {}
}
}
},
{
"key": "a4d8p",
"text": "",
"type": "unstyled",
"depth": 0,
"inlineStyleRanges": [],
"entityRanges": [],
"data": {}
}
]
}'''
attrs['url_title'] = booktype_slugify(attrs['title'])
attrs['version'] = attrs['book'].version
attrs['status'] = BookStatus.objects.filter(book=attrs['book']).order_by("-weight")[0]
# validate title/url_title
if not len(attrs['url_title']):
error_msg = {'title': 'Title is empty or contains wrong characters.'}
logger.warn('ChapterListCreateSerializer validate: {}'.format(error_msg))
raise serializers.ValidationError(error_msg)
# validate title/url_title
chapter_exists = Chapter.objects.filter(
book=self.context['view']._book, version=attrs['book'].version, url_title=attrs['url_title']
).exists()
if chapter_exists:
error_msg = {'title': 'Chapter with this title already exists.'}
logger.warn('ChapterListCreateSerializer validate: {}'.format(error_msg))
raise serializers.ValidationError(error_msg)
return attrs
def create(self, validated_data):
chapter = super(ChapterListCreateSerializer, self).create(validated_data)
# create toc
book = self.context['view']._book
book_version = self.context['view']._book.version
weight = len(book_version.get_toc()) + 1
for itm in BookToc.objects.filter(version=book_version, book=book).order_by("-weight"):
itm.weight = weight
itm.save()
weight -= 1
toc_item = BookToc(
version=book_version, book=book, name=chapter.title, chapter=chapter, weight=1, typeof=1
)
toc_item.save()
# create chapter history
history = logChapterHistory(
chapter=chapter,
content=chapter.content,
user=self.context['request'].user,
comment="created via api",
revision=chapter.revision
)
# create book history
if history:
logBookHistory(
book=book,
version=book_version,
chapter=chapter,
chapter_history=history,
user=self.context['request'].user,
kind='chapter_create'
)
# TODO
# this is just playground
# we must create separate tool to push messages through the sputnik channel from API endpoints
# without having clientID in request
# message_info
channel_name = "/chat/{}/".format(book.id)
clnts = sputnik.smembers("sputnik:channel:{}:channel".format(channel_name))
message = {
'channel': channel_name,
"command": "message_info",
"from": self.context['request'].user.username,
"email": self.context['request'].user.email,
"message_id": "user_new_chapter",
"message_args": [self.context['request'].user.username, chapter.title]
}
for c in clnts:
if c.strip() != '':
sputnik.push("ses:%s:messages" % c, json.dumps(message))
# chapter_create
channel_name = "/booktype/book/{}/{}/".format(book.id, book_version.get_version())
clnts = sputnik.smembers("sputnik:channel:{}:channel".format(channel_name))
message = {
'channel': channel_name,
"command": "chapter_create",
"chapter": (chapter.id, chapter.title, chapter.url_title, 1, chapter.status.id,
chapter.lock_type, chapter.lock_username, "root", toc_item.id, "normal", None)
}
for c in clnts:
if c.strip() != '':
sputnik.push("ses:%s:messages" % c, json.dumps(message))
# notificatoin message
message = {
'channel': channel_name,
'command': 'notification',
'message': 'notification_chapter_was_created',
'username': self.context['request'].user.username,
'message_args': (chapter.title,)
}
for c in clnts:
if c.strip() != '':
sputnik.push("ses:%s:messages" % c, json.dumps(message))
return chapter
class ChapterRetrieveUpdateDestroySerializer(serializers.ModelSerializer):
class Meta:
model = Chapter
fields = '__all__'
read_only_fields = ('version', 'book', 'revision', 'url_title', 'created', 'modified')
def validate_status(self, status):
if self.context['view']._book.id is not status.book.id:
error_msg = 'Wrong status id. Options are {}'.format(
[i['id'] for i in BookStatus.objects.filter(book=self.context['view']._book).values('id')]
)
logger.warn('ChapterRetrieveUpdateDestroySerializer validate_status: {}'.format(error_msg))
raise serializers.ValidationError(error_msg)
return status
def validate_content_json(self, content_json):
try:
json.loads(content_json)
except ValueError as e:
error_msg = "Not valid json: {}".format(e)
logger.warn('ChapterRetrieveUpdateDestroySerializer validate_content_json: {}'.format(error_msg))
raise serializers.ValidationError(error_msg)
return content_json
class MetadataListCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Info
fields = ('id', 'name', 'value_string')
def validate_name(self, name):
metadata_keys = set()
# metadata keys
for field, _, standard in METADATA_FIELDS:
metadata_keys.add('%s.%s' % (standard, field))
# additional metadata keys
for field, attrs in getattr(settings, 'ADDITIONAL_METADATA', {}).items():
metadata_keys.add('%s.%s' % (AdditionalMetadataForm.META_PREFIX, field))
if name not in metadata_keys:
raise serializers.ValidationError('Wrong metadata name. Options are: {}'.format(
', '.join(metadata_keys)
))
book = self.context['view']._book
if book.info_set.filter(name__exact=name).exists():
raise serializers.ValidationError('{} already exist. You can update or delete this metadata entry'.format(
name
))
return name
def validate(self, attrs):
_string = 0
attrs['kind'] = _string
attrs['book'] = self.context['view']._book
return attrs
class MetadataRetrieveUpdateDestroySerializer(serializers.ModelSerializer):
class Meta:
model = Info
fields = ('id', 'name', 'value_string')
def validate_name(self, name):
metadata_keys = set()
# metadata keys
for field, _, standard in METADATA_FIELDS:
metadata_keys.add('%s.%s' % (standard, field))
# additional metadata keys
for field, attrs in getattr(settings, 'ADDITIONAL_METADATA', {}).items():
metadata_keys.add('%s.%s' % (AdditionalMetadataForm.META_PREFIX, field))
if name not in metadata_keys:
raise serializers.ValidationError('Wrong metadata name. Options are: {}'.format(
', '.join(metadata_keys)
))
book = self.context['view']._book
if book.info_set.filter(name__exact=name).exclude(id=self.instance.id).exists():
raise serializers.ValidationError('{} already exist. You can update or delete this metadata entry'.format(
name
))
return name
def validate(self, attrs):
_string = 0
attrs['kind'] = _string
attrs['book'] = self.context['view']._book
return attrs
class BookUserListSerializer(serializers.ModelSerializer):
book_roles = serializers.SerializerMethodField()
profile_image_url = serializers.SerializerMethodField()
profile_url = serializers.SerializerMethodField()
class Meta:
model = User
fields = (
'id', 'url', 'username', 'email', 'first_name', 'last_name', 'profile_url',
'profile_image_url', 'get_full_name', 'book_roles'
)
def get_book_roles(self, obj):
book_roles = []
for role in obj.roles.filter(book=self.context['view']._book):
book_roles.append(SimpleBookRoleSerializer(role).data)
return book_roles
def get_profile_image_url(self, obj):
return account_utils.get_profile_image(obj)
def get_profile_url(self, obj):
return reverse('accounts:view_profile', args=[obj.username])
class BookAttachmentListSerializer(serializers.ModelSerializer):
attachment = serializers.SerializerMethodField()
thumbnail = serializers.SerializerMethodField()
size = serializers.SerializerMethodField()
dimension = serializers.SerializerMethodField()
class Meta:
model = Attachment
fields = (
'id', 'attachment', 'created', 'version', 'status', 'size',
'dimension', 'thumbnail'
)
def get_attachment(self, obj):
im_url = get_attachment_url(obj, os.path.split(obj.attachment.name)[1])
return im_url
def get_thumbnail(self, obj):
return obj.thumbnail()
def get_size(self, obj):
return obj.attachment.size
def get_dimension(self, obj):
try:
im = Image.open(obj.attachment.name)
return im.size
except:
pass
return None
|
eos87/Booktype
|
lib/booktype/api/editor/serializers.py
|
Python
|
agpl-3.0
| 17,781
|
from itertools import combinations_with_replacement
class Main:
def __init__(self):
self.a, self.n = input().split()
def output(self):
for i in combinations_with_replacement(sorted(self.a), int(self.n)):
print(''.join(i))
if __name__ == '__main__':
obj = Main()
obj.output()
|
MrinmoiHossain/HackerRank
|
Python/Itertools/itertools.combinations_with_replacement().py
|
Python
|
mit
| 347
|
#!/usr/bin/python
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import json
import optparse
import os
import shutil
import subprocess
import sys
import tempfile
import urllib2
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
DOC_DIR = os.path.dirname(SCRIPT_DIR)
ChannelInfo = collections.namedtuple('ChannelInfo', ['branch', 'version'])
def Trace(msg):
if Trace.verbose:
sys.stderr.write(str(msg) + '\n')
Trace.verbose = False
def GetChannelInfo():
url = 'http://omahaproxy.appspot.com/json'
u = urllib2.urlopen(url)
try:
data = json.loads(u.read())
finally:
u.close()
channel_info = {}
for os_row in data:
osname = os_row['os']
if osname not in ('win', 'mac', 'linux'):
continue
for version_row in os_row['versions']:
channel = version_row['channel']
# We don't display canary docs.
if channel == 'canary':
continue
version = version_row['version'].split('.')[0] # Major version
branch = version_row['true_branch']
if branch is None:
branch = 'trunk'
if channel in channel_info:
existing_info = channel_info[channel]
if branch != existing_info.branch:
sys.stderr.write('Warning: found different branch numbers for '
'channel %s: %s vs %s. Using %s.\n' % (
channel, branch, existing_info.branch, existing_info.branch))
else:
channel_info[channel] = ChannelInfo(branch, version)
return channel_info
def RemoveFile(filename):
if os.path.exists(filename):
os.remove(filename)
def RemoveDir(dirname):
if os.path.exists(dirname):
shutil.rmtree(dirname)
def GetSVNRepositoryRoot(branch):
if branch == 'trunk':
return 'http://src.chromium.org/chrome/trunk/src'
return 'http://src.chromium.org/chrome/branches/%s/src' % branch
def CheckoutPepperDocs(branch, doc_dirname):
Trace('Removing directory %s' % doc_dirname)
RemoveDir(doc_dirname)
svn_root_url = GetSVNRepositoryRoot(branch)
for subdir in ('api', 'generators', 'cpp', 'utility'):
url = svn_root_url + '/ppapi/%s' % subdir
cmd = ['svn', 'co', url, os.path.join(doc_dirname, subdir)]
Trace('Checking out docs into %s:\n %s' % (doc_dirname, ' '.join(cmd)))
subprocess.check_call(cmd)
# The IDL generator needs PLY (a python lexing library); check it out into
# generators.
url = svn_root_url + '/third_party/ply'
ply_dirname = os.path.join(doc_dirname, 'generators', 'ply')
cmd = ['svn', 'co', url, ply_dirname]
Trace('Checking out PLY into %s:\n %s' % (ply_dirname, ' '.join(cmd)))
subprocess.check_call(cmd)
def FixPepperDocLinks(doc_dirname):
# TODO(binji): We can remove this step when the correct links are in the
# stable branch.
Trace('Looking for links to fix in Pepper headers...')
for root, dirs, filenames in os.walk(doc_dirname):
# Don't recurse into .svn
if '.svn' in dirs:
dirs.remove('.svn')
for filename in filenames:
header_filename = os.path.join(root, filename)
Trace(' Checking file %r...' % header_filename)
replacements = {
'<a href="/native-client/{{pepperversion}}/devguide/coding/audio">':
'<a href="/native-client/devguide/coding/audio.html">',
'<a href="/native-client/devguide/coding/audio">':
'<a href="/native-client/devguide/coding/audio.html">',
'<a href="/native-client/{{pepperversion}}/pepperc/globals_defs"':
'<a href="globals_defs.html"',
'<a href="../pepperc/ppb__image__data_8h.html">':
'<a href="../c/ppb__image__data_8h.html">'}
with open(header_filename) as f:
lines = []
replaced = False
for line in f:
for find, replace in replacements.iteritems():
pos = line.find(find)
if pos != -1:
Trace(' Found %r...' % find)
replaced = True
line = line[:pos] + replace + line[pos + len(find):]
lines.append(line)
if replaced:
Trace(' Writing new file.')
with open(header_filename, 'w') as f:
f.writelines(lines)
def GenerateCHeaders(pepper_version, doc_dirname):
script = os.path.join(os.pardir, 'generators', 'generator.py')
cwd = os.path.join(doc_dirname, 'api')
out_dirname = os.path.join(os.pardir, 'c')
cmd = [sys.executable, script, '--cgen', '--release', 'M' + pepper_version,
'--wnone', '--dstroot', out_dirname]
Trace('Generating C Headers for version %s\n %s' % (
pepper_version, ' '.join(cmd)))
subprocess.check_call(cmd, cwd=cwd)
def GenerateDoxyfile(template_filename, out_dirname, doc_dirname, doxyfile):
Trace('Writing Doxyfile "%s" (from template %s)' % (
doxyfile, template_filename))
with open(template_filename) as f:
data = f.read()
with open(doxyfile, 'w') as f:
f.write(data % {
'out_dirname': out_dirname,
'doc_dirname': doc_dirname,
'script_dirname': SCRIPT_DIR})
def RunDoxygen(out_dirname, doxyfile):
Trace('Removing old output directory %s' % out_dirname)
RemoveDir(out_dirname)
Trace('Making new output directory %s' % out_dirname)
os.makedirs(out_dirname)
cmd = ['doxygen', doxyfile]
Trace('Running Doxygen:\n %s' % ' '.join(cmd))
subprocess.check_call(cmd)
def RunDoxyCleanup(out_dirname):
script = os.path.join(SCRIPT_DIR, 'doxy_cleanup.py')
cmd = [sys.executable, script, out_dirname]
if Trace.verbose:
cmd.append('-v')
Trace('Running doxy_cleanup:\n %s' % ' '.join(cmd))
subprocess.check_call(cmd)
def RunRstIndex(kind, channel, pepper_version, out_dirname, out_rst_filename):
assert kind in ('root', 'c', 'cpp')
script = os.path.join(SCRIPT_DIR, 'rst_index.py')
cmd = [sys.executable, script,
'--' + kind,
'--channel', channel,
'--version', pepper_version,
out_dirname,
'-o', out_rst_filename]
Trace('Running rst_index:\n %s' % ' '.join(cmd))
subprocess.check_call(cmd)
def GenerateDocs(root_dirname, channel, pepper_version, branch):
Trace('Generating docs for %s (branch %s)' % (channel, branch))
pepper_dirname = 'pepper_%s' % channel
out_dirname = os.path.join(root_dirname, pepper_dirname)
try:
svn_dirname = tempfile.mkdtemp(prefix=pepper_dirname)
doxyfile_dirname = tempfile.mkdtemp(prefix='%s_doxyfiles' % pepper_dirname)
CheckoutPepperDocs(branch, svn_dirname)
FixPepperDocLinks(svn_dirname)
GenerateCHeaders(pepper_version, svn_dirname)
doxyfile_c = ''
doxyfile_cpp = ''
# Generate Root index
rst_index_root = os.path.join(DOC_DIR, pepper_dirname, 'index.rst')
RunRstIndex('root', channel, pepper_version, out_dirname, rst_index_root)
# Generate C docs
out_dirname_c = os.path.join(out_dirname, 'c')
doxyfile_c = os.path.join(doxyfile_dirname, 'Doxyfile.c.%s' % channel)
doxyfile_c_template = os.path.join(SCRIPT_DIR, 'Doxyfile.c.template')
rst_index_c = os.path.join(DOC_DIR, pepper_dirname, 'c', 'index.rst')
GenerateDoxyfile(doxyfile_c_template, out_dirname_c, svn_dirname,
doxyfile_c)
RunDoxygen(out_dirname_c, doxyfile_c)
RunDoxyCleanup(out_dirname_c)
RunRstIndex('c', channel, pepper_version, out_dirname_c, rst_index_c)
# Generate C++ docs
out_dirname_cpp = os.path.join(out_dirname, 'cpp')
doxyfile_cpp = os.path.join(doxyfile_dirname, 'Doxyfile.cpp.%s' % channel)
doxyfile_cpp_template = os.path.join(SCRIPT_DIR, 'Doxyfile.cpp.template')
rst_index_cpp = os.path.join(DOC_DIR, pepper_dirname, 'cpp', 'index.rst')
GenerateDoxyfile(doxyfile_cpp_template, out_dirname_cpp, svn_dirname,
doxyfile_cpp)
RunDoxygen(out_dirname_cpp, doxyfile_cpp)
RunDoxyCleanup(out_dirname_cpp)
RunRstIndex('cpp', channel, pepper_version, out_dirname_cpp, rst_index_cpp)
finally:
# Cleanup
RemoveDir(svn_dirname)
RemoveDir(doxyfile_dirname)
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] <out_directory>')
parser.add_option('-v', '--verbose',
help='Verbose output', action='store_true')
options, dirs = parser.parse_args(argv)
if options.verbose:
Trace.verbose = True
if len(dirs) != 1:
parser.error('Expected an output directory')
channel_info = GetChannelInfo()
for channel, info in channel_info.iteritems():
GenerateDocs(dirs[0], channel, info.version, info.branch)
return 0
if __name__ == '__main__':
try:
rtn = main(sys.argv[1:])
except KeyboardInterrupt:
sys.stderr.write('%s: interrupted\n' % os.path.basename(__file__))
rtn = 1
sys.exit(rtn)
|
7kbird/chrome
|
native_client_sdk/src/doc/doxygen/generate_docs.py
|
Python
|
bsd-3-clause
| 8,799
|
from django.conf import settings
from django.db import migrations, models
from opaque_keys.edx.django.models import CourseKeyField
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Permission',
fields=[
('name', models.CharField(max_length=30, serialize=False, primary_key=True)),
],
options={
'db_table': 'django_comment_client_permission',
},
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=30)),
('course_id', CourseKeyField(db_index=True, max_length=255, blank=True)),
('users', models.ManyToManyField(related_name='roles', to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'django_comment_client_role',
},
),
migrations.AddField(
model_name='permission',
name='roles',
field=models.ManyToManyField(related_name='permissions', to='django_comment_common.Role'),
),
]
|
edx/edx-platform
|
openedx/core/djangoapps/django_comment_common/migrations/0001_initial.py
|
Python
|
agpl-3.0
| 1,369
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import utool as ut
import numpy as np
import vtool as vt
from six.moves import zip, map, range
from scipy.spatial import distance
import scipy.cluster.hierarchy
import sklearn.cluster
(print, rrr, profile) = ut.inject2(__name__, '[preproc_occurrence]')
def ibeis_compute_occurrences(ibs, gid_list, config=None, verbose=None):
"""
clusters occurrences togethers (by time, not yet space)
An occurrence is a meeting, localized in time and space between a camera
and a group of animals.
Animals are identified within each occurrence.
Does not modify database state, just returns cluster ids
Args:
ibs (IBEISController): ibeis controller object
gid_list (list):
Returns:
tuple: (None, None)
CommandLine:
python -m ibeis --tf ibeis_compute_occurrences:0 --show
TODO: FIXME: good example of autogen doctest return failure
"""
if config is None:
config = {'use_gps': False, 'seconds_thresh': 600}
#from ibeis.algo import Config
#config = Config.OccurrenceConfig().asdict()
occur_labels, occur_gids = compute_occurrence_groups(ibs, gid_list, config,
verbose=verbose)
if True:
gid2_label = {gid: label for label, gids in zip(occur_labels, occur_gids)
for gid in gids}
# Assert that each gid only belongs to one occurrence
flat_imgsetids = ut.dict_take(gid2_label, gid_list)
flat_gids = gid_list
else:
# Flatten gids list by enounter
flat_imgsetids, flat_gids = ut.flatten_membership_mapping(occur_labels, occur_gids)
return flat_imgsetids, flat_gids
def compute_occurrence_groups(ibs, gid_list, config={}, use_gps=False,
verbose=None):
r"""
Args:
ibs (IBEISController): ibeis controller object
gid_list (list):
Returns:
tuple: (None, None)
CommandLine:
python -m ibeis compute_occurrence_groups
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.algo.preproc.preproc_occurrence import * # NOQA
>>> import ibeis
>>> ibs = ibeis.opendb(defaultdb='testdb1')
>>> verbose = True
>>> images = ibs.images()
>>> gid_list = images.gids
>>> config = {} # ibeis.algo.Config.OccurrenceConfig().asdict()
>>> tup = ibeis_compute_occurr ences(ibs, gid_list)
>>> (flat_imgsetids, flat_gids)
>>> aids_list = list(ut.group_items(aid_list_, flat_imgsetids).values())
>>> metric = list(map(len, aids_list))
>>> sortx = ut.list_argsort(metric)[::-1]
>>> index = sortx[1]
>>> aids = aids_list[index]
>>> gids = list(set(ibs.get_annot_gids(aids)))
"""
if verbose is None:
verbose = ut.NOT_QUIET
# Config info
gid_list = np.unique(gid_list)
if verbose:
print('[occur] Computing occurrences on %r images.' % (len(gid_list)))
print('[occur] config = ' + ut.repr3(config))
use_gps = config['use_gps']
datas = prepare_X_data(ibs, gid_list, use_gps=use_gps)
from ibeis.algo.preproc import occurrence_blackbox
cluster_algo = config.get('cluster_algo', 'agglomerative')
km_per_sec = config.get('km_per_sec', occurrence_blackbox.KM_PER_SEC)
thresh_sec = config.get('seconds_thresh', 30 * 60.0)
min_imgs_per_occurence = config.get('min_imgs_per_occurence', 1)
# 30 minutes = 3.6 kilometers
# 5 minutes = 0.6 kilometers
assert cluster_algo == 'agglomerative', 'only agglomerative is supported'
# Group datas with different values separately
all_gids = []
all_labels = []
for key in datas.keys():
val = datas[key]
gids, latlons, posixtimes = val
labels = occurrence_blackbox.cluster_timespace_sec(
latlons, posixtimes, thresh_sec, km_per_sec=km_per_sec)
if labels is None:
labels = np.zeros(len(gids), dtype=np.int)
all_gids.append(gids)
all_labels.append(labels)
# Combine labels across different groups
pads = [vt.safe_max(ys, fill=0) + 1 for ys in all_labels]
offsets = np.array([0] + pads[:-1]).cumsum()
all_labels_ = [ys + offset for ys, offset in zip(all_labels, offsets)]
label_arr = np.array(ut.flatten(all_labels_))
gid_arr = np.array(ut.flatten(all_gids))
# Group images by unique label
labels, label_gids = group_images_by_label(label_arr, gid_arr)
# Remove occurrences less than the threshold
occur_labels = labels
occur_gids = label_gids
occur_unixtimes = compute_occurrence_unixtime(ibs, occur_gids)
occur_labels, occur_gids = filter_and_relabel(
labels, label_gids, min_imgs_per_occurence, occur_unixtimes)
if verbose:
print('[occur] Found %d clusters.' % len(occur_labels))
if len(label_gids) > 0 and verbose:
print('[occur] Cluster image size stats:')
ut.print_dict(
ut.get_stats(list(map(len, occur_gids)), use_median=True,
use_sum=True),
'occur image stats')
return occur_labels, occur_gids
def compute_occurrence_unixtime(ibs, occur_gids):
#assert isinstance(ibs, IBEISController)
# TODO: account for -1
from ibeis.other import ibsfuncs
unixtimes = ibsfuncs.unflat_map(ibs.get_image_unixtime, occur_gids)
time_arrs = list(map(np.array, unixtimes))
occur_unixtimes = list(map(np.mean, time_arrs))
return occur_unixtimes
def _compute_occurrence_datetime(ibs, occur_gids):
#assert isinstance(ibs, IBEISController)
#from ibeis.other import ibsfuncs
occur_unixtimes = compute_occurrence_unixtime(ibs, occur_gids)
occur_datetimes = list(map(ut.unixtime_to_datetimestr, occur_unixtimes))
return occur_datetimes
def prepare_X_data(ibs, gid_list, use_gps=True):
"""
Splits data into groups with/without gps and time
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis.algo.preproc.preproc_occurrence import * # NOQA
>>> import ibeis
>>> ibs = ibeis.opendb(defaultdb='testdb1')
>>> images = ibs.images()
>>> # ibeis.control.accessor_decors.DEBUG_GETTERS = True
>>> use_gps = True
>>> gid_list = images.gids
>>> datas = prepare_X_data(ibs, gid_list, use_gps)
>>> print(ut.repr2(datas, nl=2, precision=2))
>>> assert len(datas['both'][0]) == 12
>>> assert len(datas['neither'][0]) == 0
"""
images = ibs.images(gid_list, caching=True)
gps_list_ = images.gps2
unixtime_list_ = images.unixtime2
has_gps = np.all(np.logical_not(np.isnan(gps_list_)), axis=1)
has_time = np.logical_not(np.isnan(unixtime_list_))
if not use_gps:
has_gps[:] = False
has_both = np.logical_and(has_time, has_gps)
has_either = np.logical_or(has_time, has_gps)
has_gps_only = np.logical_and(has_gps, np.logical_not(has_both))
has_time_only = np.logical_and(has_time, np.logical_not(has_both))
has_neither = np.logical_not(has_either)
both = images.compress(has_both)
xgps = images.compress(has_gps_only)
xtime = images.compress(has_time_only)
neither = images.compress(has_neither)
# Group imagse with different attributes separately
datas = {
'both' : (both.gids, both.unixtime2, both.gps2),
'gps_only' : (xgps.gids, None, xgps.gps2),
'time_only' : (xtime.gids, xtime.unixtime2, None),
'neither' : (neither.gids, None, None),
}
return datas
def agglomerative_cluster_occurrences(X_data, thresh_sec):
"""
Agglomerative occurrence clustering algorithm
Args:
X_data (ndarray): Length N array of data to cluster
thresh_sec (float):
Returns:
ndarray: (label_arr) - Length N array of cluster indexes
CommandLine:
python -m ibeis.algo.preproc.preproc_occurrence --exec-agglomerative_cluster_occurrences
References:
https://docs.scipy.org/doc/scipy-0.9.0/reference/generated/scipy.cluster.hierarchy.fclusterdata.html#scipy.cluster.hierarchy.fclusterdata
http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.cluster.hierarchy.fcluster.html
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.algo.preproc.preproc_occurrence import * # NOQA
>>> X_data = '?'
>>> thresh_sec = '?'
>>> (occur_ids, occur_gids) = agglomerative_cluster_occurrences(X_data, thresh_sec)
>>> result = ('(occur_ids, occur_gids) = %s' % (str((occur_ids, occur_gids)),))
>>> print(result)
"""
label_arr = scipy.cluster.hierarchy.fclusterdata(
X_data, thresh_sec, criterion='distance')
return label_arr
def meanshift_cluster_occurrences(X_data, quantile):
""" Meanshift occurrence clustering algorithm
Args:
X_data (ndarray): Length N array of data to cluster
quantile (float): quantile should be between [0, 1].
eg: quantile=.5 represents the median of all pairwise distances
Returns:
ndarray : Length N array of labels
CommandLine:
python -m ibeis.algo.preproc.preproc_occurrence --exec-meanshift_cluster_occurrences
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.algo.preproc.preproc_occurrence import * # NOQA
>>> X_data = '?'
>>> quantile = '?'
>>> result = meanshift_cluster_occurrences(X_data, quantile)
>>> print(result)
"""
try:
bandwidth = sklearn.cluster.estimate_bandwidth(X_data, quantile=quantile, n_samples=500)
assert bandwidth != 0, ('[occur] bandwidth is 0. Cannot cluster')
# bandwidth is with respect to the RBF used in clustering
#ms = sklearn.cluster.MeanShift(bandwidth=bandwidth, bin_seeding=True, cluster_all=True)
ms = sklearn.cluster.MeanShift(bandwidth=bandwidth, bin_seeding=True, cluster_all=False)
ms.fit(X_data)
label_arr = ms.labels_
unique_labels = np.unique(label_arr)
max_label = max(0, unique_labels.max())
num_orphans = (label_arr == -1).sum()
label_arr[label_arr == -1] = np.arange(max_label + 1, max_label + 1 + num_orphans)
except Exception as ex:
ut.printex(ex, 'error computing meanshift',
key_list=['X_data', 'quantile'],
iswarning=True)
# Fallback to all from same occurrence
label_arr = np.zeros(X_data.size)
return label_arr
def group_images_by_label(label_arr, gid_arr):
"""
Input: Length N list of labels and ids
Output: Length M list of unique labels, and lenth M list of lists of ids
"""
# Reverse the image to cluster index mapping
import vtool as vt
labels_, groupxs_ = vt.group_indices(label_arr)
sortx = np.array(list(map(len, groupxs_))).argsort()[::-1]
labels = labels_.take(sortx, axis=0)
groupxs = ut.take(groupxs_, sortx)
label_gids = vt.apply_grouping(gid_arr, groupxs)
return labels, label_gids
def filter_and_relabel(labels, label_gids, min_imgs_per_occurence, occur_unixtimes=None):
"""
Removes clusters with too few members.
Relabels clusters-labels such that label 0 has the most members
"""
label_nGids = np.array(list(map(len, label_gids)))
label_isvalid = label_nGids >= min_imgs_per_occurence
occur_gids = ut.compress(label_gids, label_isvalid)
if occur_unixtimes is not None:
occur_unixtimes = ut.compress(occur_unixtimes, label_isvalid)
# Rebase ids so occurrence0 has the most images
#occur_ids = list(range(label_isvalid.sum()))
#else:
# sort by time instead
unixtime_arr = np.array(occur_unixtimes)
# Reorder occurrences so the oldest has the lowest number
occur_gids = ut.take(label_gids, unixtime_arr.argsort())
occur_ids = list(range(len(occur_gids)))
return occur_ids, occur_gids
def timespace_distance(pt1, pt2):
(sec1, lat1, lon1) = pt1
(sec2, lat2, lon2) = pt2
km_dist = vt.haversine((lat1, lon1), (lat2, lon2))
km_per_sec = .002 # conversion ratio for reasonable animal walking speed
#sec_dist = (((sec1 - sec2) * km_per_sec) ** 2)
sec_dist = np.abs(sec1 - sec2) * km_per_sec
timespace_dist = km_dist + sec_dist
return timespace_dist
def timespace_pdist(X_data):
if X_data.shape[1] == 3:
return distance.pdist(X_data, timespace_distance)
if X_data.shape[1] == 1:
return distance.pdist(X_data, 'euclidian')
def cluster_timespace(X_data, thresh):
"""
References:
http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/
scipy.cluster.hierarchy.linkage.html
CommandLine:
python -m ibeis.algo.preproc.preproc_occurrence cluster_timespace --show
Example:
>>> # DISABLE_DOCTEST
>>> from ibeis.algo.preproc.preproc_occurrence import * # NOQA
>>> X_data = testdata_gps()
>>> thresh = 10
>>> X_labels = cluster_timespace(X_data, thresh)
>>> fnum = pt.ensure_fnum(None)
>>> fig = pt.figure(fnum=fnum, doclf=True, docla=True)
>>> hier.dendrogram(linkage_mat, orientation='top')
>>> plot_annotaiton_gps(X_data)
>>> ut.show_if_requested()
"""
condenced_dist_mat = distance.pdist(X_data, timespace_distance)
# Compute heirarchical linkages
linkage_mat = scipy.cluster.hierarchy.linkage(condenced_dist_mat,
method='centroid')
# Cluster linkages
X_labels = scipy.cluster.hierarchy.fcluster(linkage_mat, thresh,
criterion='inconsistent',
depth=2, R=None, monocrit=None)
return X_labels
def testdata_gps():
r"""
Simple data to test GPS algorithm.
Returns:
X_name (ndarray): Nx1 matrix denoting groundtruth locations
X_data (ndarray): Nx3 matrix where each columns are (time, lat, lon)
"""
lon = np.array([4.54, 104.0, -14.9, 56.26, 103.46, 103.37, 54.22, 23.3,
25.53, 23.31, 118.0, 103.53, 54.40, 103.48, 6.14, 7.25,
2.38, 18.18, 103.54, 103.40, 28.59, 25.21, 29.35, 25.20, ])
lat = np.array([52.22, 1.14, 27.34, 25.16, 1.16, 1.11, 24.30, 37.54, 37.26,
38.1, 24.25, 1.13, 24.49, 1.13, 42.33, 43.44, 39.34, 70.30,
1.16, 1.10, 40.58, 37.34, 41.18, 38.35, ])
time = np.zeros(len(lon))
X_data = np.vstack((time, lat, lon)).T
X_name = np.array([0, 1, 2, 2, 2, 2, 3, 3, 3]) # NOQA
X_data = np.array([
(0, 42.727985, -73.683994), # MRC
(0, 42.657872, -73.764148), # Home
(0, 42.657414, -73.774448), # Park1
(0, 42.658333, -73.770993), # Park2
(0, 42.654384, -73.768919), # Park3
(0, 42.655039, -73.769048), # Park4
(0, 42.876974, -73.819311), # CP1
(0, 42.862946, -73.804977), # CP2
(0, 42.849809, -73.758486), # CP3
])
return X_name, X_data
def plot_gps_html(gps_list):
""" Plots gps coordinates on a map projection
InstallBasemap:
sudo apt-get install libgeos-dev
pip install git+https://github.com/matplotlib/basemap
http://matplotlib.org/basemap/users/examples.html
pip install gmplot
sudo apt-get install netcdf-bin
sudo apt-get install libnetcdf-dev
pip install netCDF4
Ignore:
pip install git+git://github.com/myuser/foo.git@v123
Example:
>>> from ibeis.algo.preproc.preproc_occurrence import * # NOQA
>>> import ibeis
>>> ibs = ibeis.opendb(defaultdb='testdb1')
>>> images = ibs.images()
>>> # Setup GPS points to draw
>>> print('Setup GPS points')
>>> gps_list_ = np.array(images.gps2)
>>> unixtime_list_ = np.array(images.unixtime2)
>>> has_gps = np.all(np.logical_not(np.isnan(gps_list_)), axis=1)
>>> has_unixtime = np.logical_not(np.isnan(unixtime_list_))
>>> isvalid = np.logical_and(has_gps, has_unixtime)
>>> gps_list = gps_list_.compress(isvalid, axis=0)
>>> unixtime_list = unixtime_list_.compress(isvalid) # NOQA
>>> plot_image_gps(gps_list)
"""
import plottool as pt
import gmplot
import matplotlib as mpl
import vtool as vt
pt.qt4ensure()
lat = gps_list.T[0]
lon = gps_list.T[1]
# Get extent of
bbox = vt.bbox_from_verts(gps_list)
centerx, centery = vt.bbox_center(bbox)
gmap = gmplot.GoogleMapPlotter(centerx, centery, 13)
color = mpl.colors.rgb2hex(pt.ORANGE)
gmap.scatter(lat, lon, color=color, size=100, marker=False)
gmap.draw("mymap.html")
ut.startfile('mymap.html')
## Scale
#bbox = vt.scale_bbox(bbox, 10.0)
#extent = vt.extent_from_bbox(bbox)
#basemap_extent = dict(llcrnrlon=extent[2], urcrnrlon=extent[3],
# llcrnrlat=extent[0], urcrnrlat=extent[1])
## Whole globe
##basemap_extent = dict(llcrnrlon=0, llcrnrlat=-80,
## urcrnrlon=360, urcrnrlat=80)
#from mpl_toolkits.basemap import Basemap
#from matplotlib.colors import LightSource # NOQA
#from mpl_toolkits.basemap import shiftgrid, cm # NOQA
#from netCDF4 import Dataset
## Read information to make background pretty
#print('Grab topo information')
#etopodata = Dataset('http://ferret.pmel.noaa.gov/thredds/dodsC/data/PMEL/etopo5.nc')
#print('Read topo information')
#topoin = etopodata.variables['ROSE'][:]
#lons = etopodata.variables['ETOPO05_X'][:]
#lats = etopodata.variables['ETOPO05_Y'][:]
## shift data so lons go from -180 to 180 instead of 20 to 380.
#print('Shift data')
#topoin, lons = shiftgrid(180., topoin, lons, start=False)
#print('Make figure')
#fnum = pt.ensure_fnum(None)
#fig = pt.figure(fnum=fnum, doclf=True, docla=True) # NOQA
#print('Draw projection')
#m = Basemap(projection='mill', **basemap_extent)
## setup Lambert Conformal basemap.
##m = Basemap(projection='cea',resolution='h', **basemap_extent)
## transform to nx x ny regularly spaced 5km native projection grid
#print('projection grid')
#nx = int((m.xmax - m.xmin) / 5000.) + 1
#ny = int((m.ymax - m.ymin) / 5000.) + 1
#topodat = m.transform_scalar(topoin, lons, lats, nx, ny)
## plot image over map with imshow.
#im = m.imshow(topodat, cm.GMT_haxby) # NOQA
## draw coastlines and political boundaries.
#m.drawcoastlines()
#m.drawcountries()
#m.drawstates()
# transform to nx x ny regularly spaced 5km native projection grid
#ls = LightSource(azdeg=90, altdeg=20)
#rgb = ls.shade(topodat, cm.GMT_haxby)
#im = m.imshow(rgb)
# draw coastlines and political boundaries.
#m.drawcoastlines()
#m.drawcountries()
#m.drawstates()
# draw a boundary around the map, fill the background.
# this background will end up being the ocean color, since
# the continents will be drawn on top.
#m.bluemarble()
#m.drawmapboundary(fill_color='aqua')
#m.fillcontinents(color='coral', lake_color='aqua')
# Convert GPS to projected coordinates
#x1, y1 = m(lon, lat) # convert to meters # lon==X, lat==Y
#m.plot(x1, y1, '*', markersize=10)
#fig.zoom_fac = pt.zoom_factory()
#fig.pan_fac = pt.pan_factory()
#fig.show()
if __name__ == '__main__':
"""
python -m ibeis.algo.preproc.preproc_occurrence
python -m ibeis.algo.preproc.preproc_occurrence --allexamples
"""
import utool as ut
import multiprocessing
multiprocessing.freeze_support()
ut.doctest_funcs()
|
SU-ECE-17-7/ibeis
|
ibeis/algo/preproc/preproc_occurrence.py
|
Python
|
apache-2.0
| 19,943
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'About.ui'
#
# Created: Sun Aug 16 22:14:37 2015
# by: PyQt5 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_About(object):
def setupUi(self, About):
About.setObjectName(_fromUtf8("About"))
About.resize(250, 130)
About.setMinimumSize(QtCore.QSize(250, 130))
About.setMaximumSize(QtCore.QSize(250, 130))
self.verticalLayout = QtGui.QVBoxLayout(About)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.lblHeadline = QtGui.QLabel(About)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lblHeadline.sizePolicy().hasHeightForWidth())
self.lblHeadline.setSizePolicy(sizePolicy)
self.lblHeadline.setAlignment(QtCore.Qt.AlignCenter)
self.lblHeadline.setWordWrap(True)
self.lblHeadline.setObjectName(_fromUtf8("lblHeadline"))
self.verticalLayout.addWidget(self.lblHeadline)
self.lblSummary = QtGui.QLabel(About)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lblSummary.sizePolicy().hasHeightForWidth())
self.lblSummary.setSizePolicy(sizePolicy)
self.lblSummary.setScaledContents(False)
self.lblSummary.setAlignment(QtCore.Qt.AlignCenter)
self.lblSummary.setObjectName(_fromUtf8("lblSummary"))
self.verticalLayout.addWidget(self.lblSummary)
self.lblInfo = QtGui.QLabel(About)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lblInfo.sizePolicy().hasHeightForWidth())
self.lblInfo.setSizePolicy(sizePolicy)
self.lblInfo.setScaledContents(False)
self.lblInfo.setAlignment(QtCore.Qt.AlignCenter)
self.lblInfo.setObjectName(_fromUtf8("lblInfo"))
self.verticalLayout.addWidget(self.lblInfo)
self.retranslateUi(About)
QtCore.QMetaObject.connectSlotsByName(About)
def retranslateUi(self, About):
About.setWindowTitle(_translate("About", "About", None))
self.lblHeadline.setText(_translate("About", "Mouse Autoclicker v1.1", None))
self.lblSummary.setText(_translate("About", "A tool to automatize mouse clicks.", None))
self.lblInfo.setText(_translate("About", "Author: Kari Vatjus-Anttila\n"
"Email: kari.vatjusanttila@gmail.com", None))
import resources_rc
|
karivatj/MouseAutoClicker
|
src/AboutUI.py
|
Python
|
gpl-2.0
| 3,313
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2016 Martine Lenders <mail@martine-lenders.eu>
#
# Distributed under terms of the MIT license.
from __future__ import print_function
import os
import sys
import random
import subprocess
import time
import types
import pexpect
DEFAULT_TIMEOUT = 5
class Strategy(object):
def __init__(self, func=None):
if func is not None:
if sys.version_info < (3,):
self.__class__.execute = types.MethodType(func, self, self.__class__)
else:
self.__class__.execute = types.MethodType(func, self)
def execute(self, *args, **kwargs):
raise NotImplementedError()
class ApplicationStrategy(Strategy):
def __init__(self, app_dir=os.getcwd(), func=None):
super(ApplicationStrategy, self).__init__(func)
self.app_dir = app_dir
class BoardStrategy(Strategy):
def __init__(self, board, func=None):
super(BoardStrategy, self).__init__(func)
self.board = board
def __run_make(self, application, make_targets, env=None):
env = os.environ.copy()
if env is not None:
env.update(env)
env.update(self.board.to_env())
cmd = ("make", "-C", application) + make_targets
print(' '.join(cmd))
print(subprocess.check_output(cmd, env=env))
def execute(self, application):
super(BoardStrategy, self).execute(application)
class CleanStrategy(BoardStrategy):
def execute(self, application, env=None):
super(CleanStrategy, self).__run_make(application, ("-B", "clean"), env)
class BuildStrategy(BoardStrategy):
def execute(self, application, env=None):
super(BuildStrategy, self).__run_make(application, ("all",), env)
class FlashStrategy(BoardStrategy):
def execute(self, application, env=None):
super(FlashStrategy, self).__run_make(application, ("all",), env)
class ResetStrategy(BoardStrategy):
def execute(self, application, env=None):
super(ResetStrategy, self).__run_make(application, ("reset",), env)
class Board(object):
def __init__(self, name, port=None, serial=None, clean=None,
build=None, flash=None,
reset=None, term=None):
def _reset_native_execute(obj, application, env=None, *args, **kwargs):
pass
if (name == "native") and (reset is None):
reset = _reset_native_execute
self.name = name
self.port = port
self.serial = serial
self.clean_strategy = CleanStrategy(self, clean)
self.build_strategy = BuildStrategy(self, build)
self.flash_strategy = FlashStrategy(self, flash)
self.reset_strategy = ResetStrategy(self, reset)
def __len__(self):
return 1
def __iter__(self):
return self
def next(self):
raise StopIteration()
def __repr__(self):
return ("<Board %s,port=%s,serial=%s>" %
(repr(self.name), repr(self.port), repr(self.serial)))
def to_env(self):
env = {}
if self.name:
env['BOARD'] = self.name
if self.port:
env['PORT'] = self.port
if self.serial:
env['SERIAL'] = self.serial
return env
def clean(self, application=os.getcwd(), env=None):
self.build_strategy.execute(application, env)
def build(self, application=os.getcwd(), env=None):
self.build_strategy.execute(application, env)
def flash(self, application=os.getcwd(), env=None):
self.flash_strategy.execute(application, env)
def reset(self, application=os.getcwd(), env=None):
self.reset_strategy.execute(application, env)
class BoardGroup(object):
def __init__(self, boards):
self.boards = boards
def __len__(self):
return len(self.boards)
def __iter__(self):
return iter(self.boards)
def __repr__(self):
return str(self.boards)
def clean(self, application=os.getcwd(), env=None):
for board in self.boards:
board.clean(application, env)
def build(self, application=os.getcwd(), env=None):
for board in self.boards:
board.build(application, env)
def flash(self, application=os.getcwd(), env=None):
for board in self.boards:
board.flash(application, env)
def reset(self, application=os.getcwd(), env=None):
for board in self.boards:
board.reset(application, env)
def default_test_case(board_group, application, env=None):
for board in board_group:
env = os.environ.copy()
if env is not None:
env.update(env)
env.update(board.to_env())
with pexpect.spawnu("make", ["-C", application, "term"], env=env,
timeout=DEFAULT_TIMEOUT,
logfile=sys.stdout) as spawn:
spawn.expect("TEST: SUCCESS")
class TestStrategy(ApplicationStrategy):
def execute(self, board_groups, test_cases=[default_test_case],
timeout=DEFAULT_TIMEOUT, env=None):
for board_group in board_groups:
print("Testing for %s: " % board_group)
for test_case in test_cases:
board_group.reset()
test_case(board_group, self.app_dir, env=None)
sys.stdout.write('.')
sys.stdout.flush()
print()
def get_ipv6_address(spawn):
spawn.sendline(u"ifconfig")
spawn.expect(u"[A-Za-z0-9]{2}_[0-9]+: inet6 (fe80::[0-9a-f:]+)")
return spawn.match.group(1)
def test_ipv6_send(board_group, application, env=None):
env_sender = os.environ.copy()
if env is not None:
env_sender.update(env)
env_sender.update(board_group.boards[0].to_env())
env_receiver = os.environ.copy()
if env is not None:
env_receiver.update(env)
env_receiver.update(board_group.boards[1].to_env())
with pexpect.spawnu("make", ["-C", application, "term"], env=env_sender,
timeout=DEFAULT_TIMEOUT) as sender, \
pexpect.spawnu("make", ["-C", application, "term"], env=env_receiver,
timeout=DEFAULT_TIMEOUT) as receiver:
ipprot = random.randint(0x00, 0xff)
receiver_ip = get_ipv6_address(receiver)
receiver.sendline(u"ip server start %d" % ipprot)
# wait for neighbor discovery to be done
time.sleep(5)
sender.sendline(u"ip send %s %d 01:23:45:67:89:ab:cd:ef" % (receiver_ip, ipprot))
sender.expect_exact(u"Success: send 8 byte over IPv6 to %s (next header: %d)" %
(receiver_ip, ipprot))
receiver.expect(u"00000000 01 23 45 67 89 AB CD EF")
def test_udpv6_send(board_group, application, env=None):
env_sender = os.environ.copy()
if env is not None:
env_sender.update(env)
env_sender.update(board_group.boards[0].to_env())
env_receiver = os.environ.copy()
if env is not None:
env_receiver.update(env)
env_receiver.update(board_group.boards[1].to_env())
with pexpect.spawnu("make", ["-C", application, "term"], env=env_sender,
timeout=DEFAULT_TIMEOUT) as sender, \
pexpect.spawnu("make", ["-C", application, "term"], env=env_receiver,
timeout=DEFAULT_TIMEOUT) as receiver:
port = random.randint(0x0000, 0xffff)
receiver_ip = get_ipv6_address(receiver)
receiver.sendline(u"udp server start %d" % port)
# wait for neighbor discovery to be done
time.sleep(5)
sender.sendline(u"udp send %s %d ab:cd:ef" % (receiver_ip, port))
sender.expect_exact(u"Success: send 3 byte over UDP to [%s]:%d" %
(receiver_ip, port))
receiver.expect(u"00000000 AB CD EF")
def test_tcpv6_send(board_group, application, env=None):
env_client = os.environ.copy()
if env is not None:
env_client.update(env)
env_client.update(board_group.boards[0].to_env())
env_server = os.environ.copy()
if env is not None:
env_server.update(env)
env_server.update(board_group.boards[1].to_env())
with pexpect.spawnu("make", ["-C", application, "term"], env=env_client,
timeout=DEFAULT_TIMEOUT) as client, \
pexpect.spawnu("make", ["-C", application, "term"], env=env_server,
timeout=DEFAULT_TIMEOUT) as server:
port = random.randint(0x0000, 0xffff)
server_ip = get_ipv6_address(server)
client_ip = get_ipv6_address(client)
server.sendline(u"tcp server start %d" % port)
# wait for neighbor discovery to be done
time.sleep(5)
client.sendline(u"tcp connect %s %d" % (server_ip, port))
server.expect(u"TCP client \\[%s\\]:[0-9]+ connected" % client_ip)
client.sendline(u"tcp send affe:abe")
client.expect_exact(u"Success: send 4 byte over TCP to server")
server.expect(u"00000000 AF FE AB E0")
client.sendline(u"tcp disconnect")
client.sendline(u"tcp send affe:abe")
client.expect_exact(u"could not send")
def test_triple_send(board_group, application, env=None):
env_sender = os.environ.copy()
if env is not None:
env_sender.update(env)
env_sender.update(board_group.boards[0].to_env())
env_receiver = os.environ.copy()
if env is not None:
env_receiver.update(env)
env_receiver.update(board_group.boards[1].to_env())
with pexpect.spawnu("make", ["-C", application, "term"], env=env_sender,
timeout=DEFAULT_TIMEOUT) as sender, \
pexpect.spawnu("make", ["-C", application, "term"], env=env_receiver,
timeout=DEFAULT_TIMEOUT) as receiver:
udp_port = random.randint(0x0000, 0xffff)
tcp_port = random.randint(0x0000, 0xffff)
ipprot = random.randint(0x00, 0xff)
receiver_ip = get_ipv6_address(receiver)
sender_ip = get_ipv6_address(sender)
receiver.sendline(u"ip server start %d" % ipprot)
receiver.sendline(u"udp server start %d" % udp_port)
receiver.sendline(u"tcp server start %d" % tcp_port)
# wait for neighbor discovery to be done
time.sleep(5)
sender.sendline(u"udp send %s %d 01:23" % (receiver_ip, udp_port))
sender.expect_exact(u"Success: send 2 byte over UDP to [%s]:%d" %
(receiver_ip, udp_port))
receiver.expect(u"00000000 01 23")
sender.sendline(u"ip send %s %d 01:02:03:04" % (receiver_ip, ipprot))
sender.expect_exact(u"Success: send 4 byte over IPv6 to %s (next header: %d)" %
(receiver_ip, ipprot))
receiver.expect(u"00000000 01 02 03 04")
sender.sendline(u"tcp connect %s %d" % (receiver_ip, tcp_port))
receiver.expect(u"TCP client \\[%s\\]:[0-9]+ connected" % sender_ip)
sender.sendline(u"tcp send dead:beef")
sender.expect_exact(u"Success: send 4 byte over TCP to server")
receiver.expect(u"00000000 DE AD BE EF")
if __name__ == "__main__":
TestStrategy().execute([BoardGroup((Board("native", "tap0"),
Board("native", "tap1")))],
[test_ipv6_send, test_udpv6_send, test_tcpv6_send,
test_triple_send])
|
BytesGalore/RIOT
|
tests/lwip/tests/01-run.py
|
Python
|
lgpl-2.1
| 11,453
|
from jsonschema import validate # type: ignore
from jsonschema.exceptions import ValidationError # type: ignore
from django.test import TestCase
from django.conf import settings
from .api_tests import *
from .test_load_datamodel import *
if settings.TEST_RUNNER == 'selenium_testsuite_runner.SeleniumTestSuiteRunner':
from .selenium_tests import *
class OpenApiTests(TestCase):
def test_tables_spec(self) -> None:
from .schema import generate_openapi_for_tables
from specifyweb.context.openapi_schema import schema
spec = generate_openapi_for_tables()
validate(instance=spec, schema=schema)
|
specify/specify7
|
specifyweb/specify/tests.py
|
Python
|
gpl-2.0
| 638
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import android
from interface import WalletSynchronizer
from wallet import Wallet, format_satoshis
import mnemonic
from decimal import Decimal
import datetime, re
def modal_dialog(title, msg = None):
droid.dialogCreateAlert(title,msg)
droid.dialogSetPositiveButtonText('OK')
droid.dialogShow()
droid.dialogGetResponse()
droid.dialogDismiss()
def modal_input(title, msg, value = None, etype=None):
droid.dialogCreateInput(title, msg, value, etype)
droid.dialogSetPositiveButtonText('OK')
droid.dialogSetNegativeButtonText('Cancel')
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
if response.get('which') == 'positive':
return response.get('value')
def modal_question(q, msg, pos_text = 'OK', neg_text = 'Cancel'):
droid.dialogCreateAlert(q, msg)
droid.dialogSetPositiveButtonText(pos_text)
droid.dialogSetNegativeButtonText(neg_text)
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
return response.get('which') == 'positive'
def edit_label(addr):
v = modal_input('Edit label',None,wallet.labels.get(addr))
if v is not None:
if v:
wallet.labels[addr] = v
else:
if addr in wallet.labels.keys():
wallet.labels.pop(addr)
wallet.update_tx_history()
wallet.save()
droid.fullSetProperty("labelTextView", "text", v)
def select_from_contacts():
title = 'Contacts:'
droid.dialogCreateAlert(title)
l = []
for i in range(len(wallet.addressbook)):
addr = wallet.addressbook[i]
label = wallet.labels.get(addr,addr)
l.append( label )
droid.dialogSetItems(l)
droid.dialogSetPositiveButtonText('New contact')
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
if response.get('which') == 'positive':
return 'newcontact'
result = response.get('item')
print result
if result is not None:
addr = wallet.addressbook[result]
return addr
def select_from_addresses():
droid.dialogCreateAlert("Addresses:")
l = []
for i in range(len(wallet.addresses)):
addr = wallet.addresses[i]
label = wallet.labels.get(addr,addr)
l.append( label )
droid.dialogSetItems(l)
droid.dialogShow()
response = droid.dialogGetResponse()
result = response.result.get('item')
droid.dialogDismiss()
if result is not None:
addr = wallet.addresses[result]
return addr
def protocol_name(p):
if p == 't': return 'TCP/stratum'
if p == 'h': return 'HTTP/Stratum'
if p == 'n': return 'TCP/native'
def protocol_dialog(host, protocol, z):
droid.dialogCreateAlert('Protocol',host)
if z:
protocols = z.keys()
else:
protocols = ['t','h','n']
l = []
current = protocols.index(protocol)
for p in protocols:
l.append(protocol_name(p))
droid.dialogSetSingleChoiceItems(l, current)
droid.dialogSetPositiveButtonText('OK')
droid.dialogSetNegativeButtonText('Cancel')
droid.dialogShow()
response = droid.dialogGetResponse().result
if not response: return
if response.get('which') == 'positive':
response = droid.dialogGetSelectedItems().result[0]
droid.dialogDismiss()
p = protocols[response]
port = z[p]
return host + ':' + port + ':' + p
def make_layout(s, scrollable = False):
content = """
<LinearLayout
android:id="@+id/zz"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="#ff222222">
<TextView
android:id="@+id/textElectrum"
android:text="Electrum"
android:textSize="7pt"
android:textColor="#ff4444ff"
android:gravity="left"
android:layout_height="wrap_content"
android:layout_width="match_parent"
/>
</LinearLayout>
%s """%s
if scrollable:
content = """
<ScrollView
android:id="@+id/scrollview"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<LinearLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="wrap_content" >
%s
</LinearLayout>
</ScrollView>
"""%content
return """<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/background"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#ff000022">
%s
</LinearLayout>"""%content
def main_layout():
return make_layout("""
<TextView android:id="@+id/balanceTextView"
android:layout_width="match_parent"
android:text=""
android:textColor="#ffffffff"
android:textAppearance="?android:attr/textAppearanceLarge"
android:padding="7dip"
android:textSize="8pt"
android:gravity="center_vertical|center_horizontal|left">
</TextView>
<TextView android:id="@+id/historyTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Recent transactions"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="center_vertical|center_horizontal|center">
</TextView>
%s """%get_history_layout(15),True)
def qr_layout(addr):
return make_layout("""
<TextView android:id="@+id/addrTextView"
android:layout_width="match_parent"
android:layout_height="50"
android:text="%s"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="center_vertical|center_horizontal|center">
</TextView>
<ImageView
android:id="@+id/qrView"
android:gravity="center"
android:layout_width="match_parent"
android:layout_height="350"
android:antialias="false"
android:src="file:///sdcard/sl4a/qrcode.bmp" />
<TextView android:id="@+id/labelTextView"
android:layout_width="match_parent"
android:layout_height="50"
android:text="%s"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="center_vertical|center_horizontal|center">
</TextView>
"""%(addr,wallet.labels.get(addr,'')), True)
payto_layout = make_layout("""
<TextView android:id="@+id/recipientTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Pay to:"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="left">
</TextView>
<EditText android:id="@+id/recipient"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:tag="Tag Me" android:inputType="text">
</EditText>
<LinearLayout android:id="@+id/linearLayout1"
android:layout_width="match_parent"
android:layout_height="wrap_content">
<Button android:id="@+id/buttonQR" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="From QR code"></Button>
<Button android:id="@+id/buttonContacts" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="From Contacts"></Button>
</LinearLayout>
<TextView android:id="@+id/labelTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Description:"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="left">
</TextView>
<EditText android:id="@+id/label"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:tag="Tag Me" android:inputType="text">
</EditText>
<TextView android:id="@+id/amountLabelTextView"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Amount:"
android:textAppearance="?android:attr/textAppearanceLarge"
android:gravity="left">
</TextView>
<EditText android:id="@+id/amount"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:tag="Tag Me" android:inputType="numberDecimal">
</EditText>
<LinearLayout android:layout_width="match_parent"
android:layout_height="wrap_content" android:id="@+id/linearLayout1">
<Button android:id="@+id/buttonPay" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="Send"></Button>
</LinearLayout>""",False)
settings_layout = make_layout(""" <ListView
android:id="@+id/myListView"
android:layout_width="match_parent"
android:layout_height="wrap_content" />""")
def get_history_values(n):
values = []
h = wallet.get_tx_history()
length = min(n, len(h))
for i in range(length):
line = h[-i-1]
v = line['value']
try:
dt = datetime.datetime.fromtimestamp( line['timestamp'] )
if dt.date() == dt.today().date():
time_str = str( dt.time() )
else:
time_str = str( dt.date() )
conf = 'v'
except:
print line['timestamp']
time_str = 'pending'
conf = 'o'
tx_hash = line['tx_hash']
label = wallet.labels.get(tx_hash)
is_default_label = (label == '') or (label is None)
if is_default_label: label = line['default_label']
values.append((conf, ' ' + time_str, ' ' + format_satoshis(v,True), ' ' + label ))
return values
def get_history_layout(n):
rows = ""
i = 0
values = get_history_values(n)
for v in values:
a,b,c,d = v
color = "#ff00ff00" if a == 'v' else "#ffff0000"
rows += """
<TableRow>
<TextView
android:id="@+id/hl_%d_col1"
android:layout_column="0"
android:text="%s"
android:textColor="%s"
android:padding="3" />
<TextView
android:id="@+id/hl_%d_col2"
android:layout_column="1"
android:text="%s"
android:padding="3" />
<TextView
android:id="@+id/hl_%d_col3"
android:layout_column="2"
android:text="%s"
android:padding="3" />
<TextView
android:id="@+id/hl_%d_col4"
android:layout_column="3"
android:text="%s"
android:padding="4" />
</TableRow>"""%(i,a,color,i,b,i,c,i,d)
i += 1
output = """
<TableLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:stretchColumns="0,1,2,3">
%s
</TableLayout>"""% rows
return output
def set_history_layout(n):
values = get_history_values(n)
i = 0
for v in values:
a,b,c,d = v
droid.fullSetProperty("hl_%d_col1"%i,"text", a)
if a == 'v':
droid.fullSetProperty("hl_%d_col1"%i, "textColor","#ff00ff00")
else:
droid.fullSetProperty("hl_%d_col1"%i, "textColor","#ffff0000")
droid.fullSetProperty("hl_%d_col2"%i,"text", b)
droid.fullSetProperty("hl_%d_col3"%i,"text", c)
droid.fullSetProperty("hl_%d_col4"%i,"text", d)
i += 1
status_text = ''
def update_layout():
global status_text
if not wallet.interface.is_connected:
text = "Not connected..."
elif wallet.blocks == 0:
text = "Server not ready"
elif not wallet.up_to_date:
text = "Synchronizing..."
else:
c, u = wallet.get_balance()
text = "Balance:"+format_satoshis(c)
if u : text += ' [' + format_satoshis(u,True).strip() + ']'
# vibrate if status changed
if text != status_text:
if status_text and wallet.interface.is_connected and wallet.up_to_date:
droid.vibrate()
status_text = text
droid.fullSetProperty("balanceTextView", "text", status_text)
if wallet.up_to_date:
set_history_layout(15)
def pay_to(recipient, amount, fee, label):
if wallet.use_encryption:
password = droid.dialogGetPassword('Password').result
if not password: return
else:
password = None
droid.dialogCreateSpinnerProgress("Electrum", "signing transaction...")
droid.dialogShow()
try:
tx = wallet.mktx( recipient, amount, label, password, fee)
except BaseException, e:
modal_dialog('error', e.message)
droid.dialogDismiss()
return
droid.dialogDismiss()
r, h = wallet.sendtx( tx )
if r:
modal_dialog('Payment sent', h)
return True
else:
modal_dialog('Error', h)
def recover():
droid.dialogCreateAlert("Wallet not found","Do you want to create a new wallet, or restore an existing one?")
droid.dialogSetPositiveButtonText('Create')
droid.dialogSetNeutralButtonText('Restore')
droid.dialogSetNegativeButtonText('Cancel')
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
if response.get('which') == 'negative':
exit(1)
is_recovery = response.get('which') == 'neutral'
if not is_recovery:
wallet.new_seed(None)
else:
if modal_question("Input method",None,'QR Code', 'mnemonic'):
code = droid.scanBarcode()
r = code.result
if r:
seed = r['extras']['SCAN_RESULT']
else:
exit(1)
else:
m = modal_input('Mnemonic','please enter your code')
try:
seed = mnemonic.mn_decode(m.split(' '))
except:
modal_dialog('error: could not decode this seed')
exit(1)
wallet.seed = str(seed)
modal_dialog('Your seed is:', wallet.seed)
modal_dialog('Mnemonic code:', ' '.join(mnemonic.mn_encode(wallet.seed)) )
msg = "recovering wallet..." if is_recovery else "creating wallet..."
droid.dialogCreateSpinnerProgress("Electrum", msg)
droid.dialogShow()
wallet.init_mpk( wallet.seed )
WalletSynchronizer(wallet,True).start()
wallet.update()
droid.dialogDismiss()
droid.vibrate()
if is_recovery:
if wallet.is_found():
wallet.update_tx_history()
wallet.fill_addressbook()
modal_dialog("recovery successful")
else:
if not modal_question("no transactions found for this seed","do you want to keep this wallet?"):
exit(1)
change_password_dialog()
wallet.save()
def make_new_contact():
code = droid.scanBarcode()
r = code.result
if r:
data = r['extras']['SCAN_RESULT']
if data:
if re.match('^bitcoin:', data):
address, _, _, _, _, _, _ = wallet.parse_url(data, None, None)
elif wallet.is_valid(data):
address = data
else:
address = None
if address:
if modal_question('Add to contacts?', address):
wallet.addressbook.append(address)
wallet.save()
else:
modal_dialog('Invalid address', data)
do_refresh = False
def update_callback():
global do_refresh
print "gui callback", wallet.interface.is_connected, wallet.up_to_date
do_refresh = True
droid.eventPost("refresh",'z')
def main_loop():
global do_refresh
update_layout()
out = None
quitting = False
while out is None:
event = droid.eventWait(1000).result
if event is None:
if do_refresh:
update_layout()
do_refresh = False
continue
print "got event in main loop", repr(event)
if event == 'OK': continue
if event is None: continue
#if event["name"]=="refresh":
# request 2 taps before we exit
if event["name"]=="key":
if event["data"]["key"] == '4':
if quitting:
out = 'quit'
else:
quitting = True
else: quitting = False
if event["name"]=="click":
id=event["data"]["id"]
elif event["name"]=="settings":
out = 'settings'
elif event["name"] in menu_commands:
out = event["name"]
if out == 'contacts':
global contact_addr
contact_addr = select_from_contacts()
if contact_addr == 'newcontact':
make_new_contact()
contact_addr = None
if not contact_addr:
out = None
elif out == "receive":
global receive_addr
receive_addr = select_from_addresses()
if receive_addr:
amount = modal_input('Amount', 'Amount you want receive. ', '', "numberDecimal")
if amount:
receive_addr = 'bitcoin:%s?amount=%s'%(receive_addr, amount)
if not receive_addr:
out = None
return out
def payto_loop():
global recipient
if recipient:
droid.fullSetProperty("recipient","text",recipient)
recipient = None
out = None
while out is None:
event = droid.eventWait().result
print "got event in payto loop", event
if event["name"] == "click":
id = event["data"]["id"]
if id=="buttonPay":
droid.fullQuery()
recipient = droid.fullQueryDetail("recipient").result.get('text')
label = droid.fullQueryDetail("label").result.get('text')
amount = droid.fullQueryDetail('amount').result.get('text')
if not wallet.is_valid(recipient):
modal_dialog('Error','Invalid Bitcoin address')
continue
try:
amount = int( 100000000 * Decimal(amount) )
except:
modal_dialog('Error','Invalid amount')
continue
result = pay_to(recipient, amount, wallet.fee, label)
if result:
out = 'main'
elif id=="buttonContacts":
addr = select_from_contacts()
droid.fullSetProperty("recipient","text",addr)
elif id=="buttonQR":
code = droid.scanBarcode()
r = code.result
if r:
data = r['extras']['SCAN_RESULT']
if data:
if re.match('^bitcoin:', data):
payto, amount, label, _, _, _, _ = wallet.parse_url(data, None, None)
droid.fullSetProperty("recipient", "text",payto)
droid.fullSetProperty("amount", "text", amount)
droid.fullSetProperty("label", "text", label)
else:
droid.fullSetProperty("recipient", "text", data)
elif event["name"] in menu_commands:
out = event["name"]
elif event["name"]=="key":
if event["data"]["key"] == '4':
out = 'main'
#elif event["name"]=="screen":
# if event["data"]=="destroy":
# out = 'main'
return out
receive_addr = ''
contact_addr = ''
recipient = ''
def receive_loop():
out = None
while out is None:
event = droid.eventWait().result
print "got event", event
if event["name"]=="key":
if event["data"]["key"] == '4':
out = 'main'
elif event["name"]=="clipboard":
droid.setClipboard(receive_addr)
modal_dialog('Address copied to clipboard',receive_addr)
elif event["name"]=="edit":
edit_label(receive_addr)
return out
def contacts_loop():
global recipient
out = None
while out is None:
event = droid.eventWait().result
print "got event", event
if event["name"]=="key":
if event["data"]["key"] == '4':
out = 'main'
elif event["name"]=="clipboard":
droid.setClipboard(contact_addr)
modal_dialog('Address copied to clipboard',contact_addr)
elif event["name"]=="edit":
edit_label(contact_addr)
elif event["name"]=="paytocontact":
recipient = contact_addr
out = 'send'
elif event["name"]=="deletecontact":
if modal_question('delete contact', contact_addr):
out = 'main'
return out
def server_dialog(plist):
droid.dialogCreateAlert("Public servers")
droid.dialogSetItems( plist.keys() )
droid.dialogSetPositiveButtonText('Private server')
droid.dialogShow()
response = droid.dialogGetResponse().result
droid.dialogDismiss()
if response.get('which') == 'positive':
return modal_input('Private server', None)
i = response.get('item')
if i is not None:
response = plist.keys()[i]
return response
def seed_dialog():
if wallet.use_encryption:
password = droid.dialogGetPassword('Seed').result
if not password: return
else:
password = None
try:
seed = wallet.pw_decode( wallet.seed, password)
except:
modal_dialog('error','incorrect password')
return
modal_dialog('Your seed is',seed)
modal_dialog('Mnemonic code:', ' '.join(mnemonic.mn_encode(seed)) )
def change_password_dialog():
if wallet.use_encryption:
password = droid.dialogGetPassword('Your wallet is encrypted').result
if password is None: return
else:
password = None
try:
seed = wallet.pw_decode( wallet.seed, password)
except:
modal_dialog('error','incorrect password')
return
new_password = droid.dialogGetPassword('Choose a password').result
if new_password == None:
return
if new_password != '':
password2 = droid.dialogGetPassword('Confirm new password').result
if new_password != password2:
modal_dialog('error','passwords do not match')
return
wallet.update_password(seed, password, new_password)
if new_password:
modal_dialog('Password updated','your wallet is encrypted')
else:
modal_dialog('No password','your wallet is not encrypted')
return True
def settings_loop():
def set_listview():
server, port, p = wallet.server.split(':')
fee = str( Decimal( wallet.fee)/100000000 )
is_encrypted = 'yes' if wallet.use_encryption else 'no'
protocol = protocol_name(p)
droid.fullShow(settings_layout)
droid.fullSetList("myListView",['Server: ' + server, 'Protocol: '+ protocol, 'Port: '+port, 'Transaction fee: '+fee, 'Password: '+is_encrypted, 'Seed'])
set_listview()
out = None
while out is None:
event = droid.eventWait().result
print "got event", event
if event == 'OK': continue
if not event: continue
plist = {}
for item in wallet.interface.servers:
host, pp = item
z = {}
for item2 in pp:
protocol, port = item2
z[protocol] = port
plist[host] = z
if event["name"] == "itemclick":
pos = event["data"]["position"]
host, port, protocol = wallet.server.split(':')
if pos == "0": #server
host = server_dialog(plist)
if host:
p = plist[host]
port = p['t']
srv = host + ':' + port + ':t'
try:
wallet.set_server(srv)
except:
modal_dialog('error','invalid server')
set_listview()
elif pos == "1": #protocol
if host in plist:
srv = protocol_dialog(host, protocol, plist[host])
if srv:
try:
wallet.set_server(srv)
except:
modal_dialog('error','invalid server')
set_listview()
elif pos == "2": #port
a_port = modal_input('Port number', 'If you use a public server, this field is set automatically when you set the protocol', port, "number")
if a_port:
if a_port != port:
srv = host + ':' + a_port + ':'+ protocol
try:
wallet.set_server(srv)
except:
modal_dialog('error','invalid port number')
set_listview()
elif pos == "3": #fee
fee = modal_input('Transaction fee', 'The fee will be this amount multiplied by the number of inputs in your transaction. ', str( Decimal( wallet.fee)/100000000 ), "numberDecimal")
if fee:
try:
fee = int( 100000000 * Decimal(fee) )
except:
modal_dialog('error','invalid fee value')
if wallet.fee != fee:
wallet.fee = fee
wallet.save()
set_listview()
elif pos == "4":
if change_password_dialog():
set_listview()
elif pos == "5":
seed_dialog()
elif event["name"] in menu_commands:
out = event["name"]
elif event["name"] == 'cancel':
out = 'main'
elif event["name"] == "key":
if event["data"]["key"] == '4':
out = 'main'
return out
menu_commands = ["send", "receive", "settings", "contacts", "main"]
droid = android.Android()
wallet = Wallet()
wallet.register_callback(update_callback)
wallet.set_path("/sdcard/electrum.dat")
wallet.read()
if not wallet.file_exists:
recover()
else:
WalletSynchronizer(wallet,True).start()
s = 'main'
def add_menu(s):
droid.clearOptionsMenu()
if s == 'main':
droid.addOptionsMenuItem("Send","send",None,"")
droid.addOptionsMenuItem("Receive","receive",None,"")
droid.addOptionsMenuItem("Contacts","contacts",None,"")
droid.addOptionsMenuItem("Settings","settings",None,"")
elif s == 'receive':
droid.addOptionsMenuItem("Copy","clipboard",None,"")
droid.addOptionsMenuItem("Label","edit",None,"")
elif s == 'contacts':
droid.addOptionsMenuItem("Copy","clipboard",None,"")
droid.addOptionsMenuItem("Label","edit",None,"")
droid.addOptionsMenuItem("Pay to","paytocontact",None,"")
#droid.addOptionsMenuItem("Delete","deletecontact",None,"")
def make_bitmap(addr):
# fixme: this is highly inefficient
droid.dialogCreateSpinnerProgress("please wait")
droid.dialogShow()
try:
import pyqrnative, bmp
qr = pyqrnative.QRCode(4, pyqrnative.QRErrorCorrectLevel.L)
qr.addData(addr)
qr.make()
k = qr.getModuleCount()
assert k == 33
bmp.save_qrcode(qr,"/sdcard/sl4a/qrcode.bmp")
finally:
droid.dialogDismiss()
while True:
add_menu(s)
if s == 'main':
droid.fullShow(main_layout())
s = main_loop()
#droid.fullDismiss()
elif s == 'send':
droid.fullShow(payto_layout)
s = payto_loop()
#droid.fullDismiss()
elif s == 'receive':
make_bitmap(receive_addr)
droid.fullShow(qr_layout(receive_addr))
s = receive_loop()
elif s == 'contacts':
make_bitmap(contact_addr)
droid.fullShow(qr_layout(contact_addr))
s = contacts_loop()
elif s == 'settings':
#droid.fullShow(settings_layout)
s = settings_loop()
#droid.fullDismiss()
else:
break
droid.makeToast("Bye!")
|
matholroyd/electrum
|
electrum4a.py
|
Python
|
gpl-3.0
| 30,081
|
# Copyright (c) 2014 Montavista Software, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from congress.datasources import cinder_driver
from congress.tests import base
from congress.tests.datasources import util
from congress.tests import helper
ResponseObj = util.ResponseObj
class TestCinderDriver(base.TestCase):
def setUp(self):
super(TestCinderDriver, self).setUp()
args = helper.datasource_openstack_args()
args['poll_time'] = 0
self.driver = cinder_driver.CinderDriver(name='testcinder', args=args)
def test_list_volumes(self):
volumes_data = [
ResponseObj({'id': '8bf2eddb-0e1a-46f9-a49a-853f8016f476',
'size': '1',
'user_id': 'b75055d5f0834d99ae874f085cf95272',
'status': 'available',
'description': 'foo',
'name': 'bar',
'bootable': 'False',
'created_at': '2014-10-09T12:16:23.000000',
'volume_type': 'lvmdriver-1'}),
ResponseObj({'id': '7cd8f73d-3243-49c9-a25b-a77ceb6ad1fa',
'size': '1',
'user_id': '6e14edb203a84aa6a5a6a90872cbae79',
'status': 'creating',
'description': 'wonder',
'name': 'alice',
'bootable': 'True',
'created_at': '2014-10-12T06:54:55.000000',
'volume_type': 'None'})]
volume_list = self.driver._translate_volumes(volumes_data)
self.assertIsNotNone(volume_list)
self.assertEqual(2, len(volume_list))
self.assertEqual({('8bf2eddb-0e1a-46f9-a49a-853f8016f476', '1',
'b75055d5f0834d99ae874f085cf95272', 'available',
'foo', 'bar', 'False', '2014-10-09T12:16:23.000000',
'lvmdriver-1'),
('7cd8f73d-3243-49c9-a25b-a77ceb6ad1fa', '1',
'6e14edb203a84aa6a5a6a90872cbae79', 'creating',
'wonder', 'alice', 'True',
'2014-10-12T06:54:55.000000', 'None')},
self.driver.state['volumes'])
def test_list_snaphosts(self):
snapshots_data = [
ResponseObj({'status': 'available',
'created_at': '2014-10-12T06:54:55.000000',
'volume_id': 'b75055d5f0834d99ae874f085cf95272',
'size': '1',
'id': '7cd8f73d-3243-49c9-a25b-a77ceb6ad1fa',
'name': 'foo'}),
ResponseObj({'status': 'creating',
'created_at': '2014-10-12T06:54:55.000000',
'volume_id': '6e14edb203a84aa6a5a6a90872cbae79',
'size': '1',
'id': '7cd8f73d-3243-49c9-a25b-a77ceb6ad1fa',
'name': 'baar'})]
snapshot_list = self.driver._translate_snapshots(snapshots_data)
self.assertIsNotNone(snapshot_list)
self.assertEqual(2, len(snapshot_list))
self.assertEqual({('7cd8f73d-3243-49c9-a25b-a77ceb6ad1fa', '1',
'available', 'b75055d5f0834d99ae874f085cf95272',
'foo', '2014-10-12T06:54:55.000000'),
('7cd8f73d-3243-49c9-a25b-a77ceb6ad1fa', '1',
'creating', '6e14edb203a84aa6a5a6a90872cbae79',
'baar', '2014-10-12T06:54:55.000000')},
self.driver.state['snapshots'])
def test_list_services(self):
services_data = [
ResponseObj({'status': 'enabled',
'binary': 'cinder-scheduler',
'zone': 'nova',
'state': 'up',
'updated_at': '2014-10-10T06:25:08.000000',
'host': 'openstack@lvmdriver-1',
'disabled_reason': 'None'}),
ResponseObj({'status': 'enabled',
'binary': 'cinder-scheduler',
'zone': 'nova',
'state': 'up',
'updated_at': '2014-10-10T06:25:08.000000',
'host': 'openstack',
'disabled_reason': 'None'})]
service_list = self.driver._translate_services(services_data)
self.assertIsNotNone(service_list)
self.assertEqual(2, len(service_list))
self.assertEqual({('enabled', 'cinder-scheduler', 'nova',
'up', '2014-10-10T06:25:08.000000',
'openstack@lvmdriver-1', 'None'),
('enabled', 'cinder-scheduler', 'nova',
'up', '2014-10-10T06:25:08.000000',
'openstack', 'None')},
self.driver.state['services'])
def test_execute(self):
class CinderClient(object):
def __init__(self):
self.testkey = None
def createVolume(self, arg1):
self.testkey = 'arg1=%s' % arg1
cinder_client = CinderClient()
self.driver.cinder_client = cinder_client
api_args = {
'positional': ['1']
}
expected_ans = 'arg1=1'
self.driver.execute('createVolume', api_args)
self.assertEqual(cinder_client.testkey, expected_ans)
|
ekcs/congress
|
congress/tests/datasources/test_cinder_driver.py
|
Python
|
apache-2.0
| 6,149
|
# -*- coding: utf-8 -*-
"""Providing automated testing functionality
.. module:: yoda.yoda
:platform: Unix
:synopsis: Providing automated testing functionality
.. moduleauthor:: Petr Czaderna <pc@hydratk.org>
"""
"""
Events:
-------
yoda_before_init_tests
yoda_before_append_test_file
yoda_before_process_tests
yoda_before_check_results
yoda_on_check_results
yoda_before_append_helpers_dir
yoda_before_append_lib_dir
yoda_before_parse_test_file
yoda_before_exec_ts_prereq
yoda_before_exec_tco_test
yoda_before_exec_validate_test
yoda_before_exec_ts_postreq
yoda_on_test_run_completed
"""
import os
import yaml
import traceback
import sys
import time
from hydratk.core import extension, bootstrapper
from hydratk.core import event
from hydratk.core import const
from hydratk.lib.console.commandlinetool import CommandlineTool
from hydratk.extensions.yoda.testengine import TestEngine
from hydratk.extensions.yoda.testresults.testresults import TestResultsDB
from hydratk.extensions.yoda.testresults.testresults import TestResultsOutputFactory
from hydratk.lib.debugging.simpledebug import dmsg
from hydratk.extensions.yoda.testobject import BreakTestRun
from hydratk.extensions.yoda.testobject import BreakTestSet
from hydratk.lib.database.dbo.dbo import DBO
from hydratk.lib.system.fs import file_get_contents
import hydratk.lib.system.config as syscfg
from sqlite3 import Error
dep_modules = {
'hydratk': {
'min-version': '0.5.0',
'package': 'hydratk'
},
'lxml': {
'min-version': '3.3.3',
'package': 'lxml'
},
'pytz': {
'min-version': '2016.6.1',
'package': 'pytz'
},
'simplejson': {
'min-version': '3.8.2',
'package': 'simplejson'
}
}
class Extension(extension.Extension):
"""Class Extension
"""
_test_repo_root = None
_templates_repo = None
_helpers_repo = None
_libs_repo = None
_test_run = None
_current_test_base_path = None
_use_helpers_dir = []
_use_lib_dir = []
_test_engine = None
_test_results_db = None
_test_results_output_create = True
_test_results_output_handler = ['console']
_run_mode = const.CORE_RUN_MODE_SINGLE_APP
_pp_got_ticket = False # Check if there was at least one ticket processed
_pp_attr = {
'test_run_started': False,
'test_run_completed': False
}
_active_tickets = []
def __getstate__(self):
return self.__dict__
def __setstate__(self, d): self.__dict__.update(d)
def _init_extension(self):
"""Method initializes extension
Args:
none
Returns:
void
"""
self._ext_id = 'yoda'
self._ext_name='Yoda'
self._ext_version = '0.2.3'
self._ext_author = 'Petr Czaderna <pc@hydratk.org>, HydraTK team <team@hydratk.org>'
self._ext_year = '2014 - 2018'
if not self._check_dependencies():
exit(0)
self._run_mode = self._mh.run_mode # synchronizing run mode
if int(self._mh.cfg['Extensions']['Yoda']['test_results_output_create']) in (0, 1):
self._test_results_output_create = bool(
int(self._mh.cfg['Extensions']['Yoda']['test_results_output_create']))
if type(self._mh.cfg['Extensions']['Yoda']['test_results_output_handler']).__name__ == 'list':
self._test_results_output_handler = self._mh.cfg[
'Extensions']['Yoda']['test_results_output_handler']
self._init_repos()
def _check_dependencies(self):
"""Method checks dependent modules
Args:
none
Returns:
bool
"""
return bootstrapper._check_dependencies(dep_modules, 'hydratk-ext-yoda')
def _uninstall(self):
"""Method returns additional uninstall data
Args:
none
Returns:
tuple: list (files), list (modules)
"""
files = [
'/usr/share/man/man1/yoda.1',
'{0}/hydratk/conf.d/hydratk-ext-yoda.conf'.format(syscfg.HTK_ETC_DIR),
'{0}/hydratk/yoda'.format(syscfg.HTK_VAR_DIR),
'/tmp/test_output'
]
if (self._test_repo_root != '{0}/hydratk/yoda'.format(syscfg.HTK_VAR_DIR)):
files.append(self._test_repo_root)
return files, dep_modules
def _init_repos(self):
"""Method initializes test repositories
Configuration option Extensions/Yoda/test_repo_root
lib - low level auxiliary test methods
helpers - high level auxiliary test methods
yoda-tests - test scripts
Args:
none
Returns:
void
"""
self._test_repo_root = self._mh.cfg['Extensions']['Yoda']['test_repo_root'].format(var_dir=syscfg.HTK_VAR_DIR)
self._libs_repo = self._mh.cfg['Extensions']['Yoda']['test_repo_root'].format(var_dir=syscfg.HTK_VAR_DIR) + '/lib'
self._templates_repo = self._mh.cfg['Extensions']['Yoda']['test_repo_root'].format(var_dir=syscfg.HTK_VAR_DIR) + '/yoda-tests/'
self._helpers_repo = self._mh.cfg['Extensions']['Yoda']['test_repo_root'].format(var_dir=syscfg.HTK_VAR_DIR) + '/helpers'
dmsg = '''
Init repos: test_repo_root: {0}
libs_repo: {1}
templates_repo: {2}
helpers_repo: {3}
'''.format(self._test_repo_root, self._libs_repo, self._templates_repo, self._helpers_repo)
self._mh.demsg('htk_on_debug_info', dmsg, self._mh.fromhere())
def _update_repos(self):
"""Method updates test repositories
Args:
none
Returns:
void
"""
self._libs_repo = self._test_repo_root + '/lib'
self._templates_repo = self._test_repo_root + '/yoda-tests/'
self._helpers_repo = self._test_repo_root + '/helpers'
dmsg = '''
Update repos: test_repo_root: {0}
libs_repo: {1}
templates_repo: {2}
helpers_repo: {3}
'''.format(self._test_repo_root, self._libs_repo, self._templates_repo, self._helpers_repo)
self._mh.demsg('htk_on_debug_info', dmsg, self._mh.fromhere())
def _do_imports(self):
pass
# def __getstate__(self):
# odict = self.__dict__.copy() # copy the dict since we change it
# odict['_mh'] = None # remove filehandle entry
# return odict
# def __setstate__(self, d):
# self.__dict__.update(d)
def _register_actions(self):
"""Method registers event hooks
Args:
none
Returns:
void
"""
hook = [
{'event': 'htk_on_cmd_options', 'callback': self.init_check},
{'event': 'yoda_before_init_tests',
'callback': self.check_test_results_db},
{'event': 'htk_on_cworker_init', 'callback': self.pp_actions},
{'event': 'htk_after_load_extensions',
'callback': self.check_pp_mode},
]
self._mh.register_event_hook(hook)
if self._mh.cli_cmdopt_profile == 'yoda':
self._register_standalone_actions()
else:
self._register_htk_actions()
self._test_engine = TestEngine()
# def __getinitargs__(self):
# return (None,)
def check_pp_mode(self, ev):
"""Method registers event hooks for parallel processing
Args:
none
Returns:
void
"""
if self._mh.run_mode == const.CORE_RUN_MODE_PP_APP:
hook = [{'event': 'htk_on_cobserver_ctx_switch', 'callback': self.pp_app_check},
#{'event' : 'htk_on_cobserver_ctx_switch', 'callback' : self.pp_app_check2 }
]
self._mh.register_event_hook(hook)
#self._mh.register_async_fn('pp_test', worker1)
#self._mh.register_async_fn_ex('pp_test2',worker2, Extension.worker_result)
self.init_libs()
self.init_helpers()
def _register_htk_actions(self):
"""Method registers command hooks
Args:
none
Returns:
void
"""
dmsg(self._mh._trn.msg('yoda_registering_actions', 'htk'))
self._mh.match_cli_command('yoda-run')
self._mh.match_cli_command('yoda-simul')
self._mh.match_cli_command('yoda-create-test-results-db')
self._mh.match_cli_command('yoda-create-testdata-db')
hook = [
{'command': 'yoda-run', 'callback': self.init_tests},
{'command': 'yoda-simul', 'callback': self.init_test_simul},
{'command': 'yoda-create-test-results-db',
'callback': self.create_test_results_db},
{'command': 'yoda-create-testdata-db',
'callback': self.create_testdata_db}
]
self._mh.register_command_hook(hook)
self._mh.match_long_option('yoda-test-path', True, 'yoda-test-path')
self._mh.match_long_option(
'yoda-test-repo-root-dir', True, 'yoda-test-repo-root-dir')
self._mh.match_long_option(
'yoda-db-results-dsn', True, 'yoda-db-results-dsn')
self._mh.match_long_option(
'yoda-db-testdata-dsn', True, 'yoda-db-testdata-dsn')
self._mh.match_long_option(
'yoda-test-run-name', True, 'yoda-test-run-name')
self._mh.match_long_option(
'yoda-multiply-tests', True, 'yoda-multiply-tests')
self._mh.match_long_option(
'yoda-test-results-output-create', True, 'yoda-test-results-output-create')
self._mh.match_long_option(
'yoda-test-results-output-handler', True, 'yoda-test-results-output-handler')
def _register_standalone_actions(self):
"""Method registers command hooks for standalone mode
Args:
none
Returns:
void
"""
dmsg(self._mh._trn.msg('yoda_registering_actions', 'standalone'))
option_profile = 'yoda'
help_title = '{h}' + self._ext_name + ' v' + self._ext_version + '{e}'
cp_string = '{u}' + "(c) " + self._ext_year + \
" " + self._ext_author + '{e}'
self._mh.set_cli_appl_title(help_title, cp_string)
self._mh.match_cli_command('run', option_profile)
self._mh.match_cli_command('simul', option_profile)
self._mh.match_cli_command('create-test-results-db', option_profile)
self._mh.match_cli_command('create-testdata-db', option_profile)
self._mh.match_cli_command('help', option_profile)
hook = [
{'command': 'run', 'callback': self.init_tests},
{'command': 'simul', 'callback': self.init_test_simul},
{'command': 'create-test-results-db',
'callback': self.create_test_results_db},
{'command': 'create-testdata-db',
'callback': self.create_testdata_db}
]
self._mh.register_command_hook(hook)
self._mh.match_cli_option(
('tp', 'test-path'), True, 'yoda-test-path', False, option_profile)
self._mh.match_cli_option(
('rd', 'test-repo-root-dir'), True, 'yoda-test-repo-root-dir', False, option_profile)
self._mh.match_cli_option(('oc', 'test-results-output-create'),
True, 'yoda-test-results-output-create', False, option_profile)
self._mh.match_cli_option(('oh', 'test-results-output-handler'),
True, 'yoda-test-results-output-handler', False, option_profile)
self._mh.match_long_option(
'db-results-dsn', True, 'yoda-db-results-dsn', False, option_profile)
self._mh.match_long_option(
'db-testdata-dsn', True, 'yoda-db-testdata-dsn', False, option_profile)
self._mh.match_cli_option(
('rn', 'test-run-name'), True, 'yoda-test-run-name', False, option_profile)
self._mh.match_long_option(
'multiply-tests', True, 'yoda-multiply-tests', False, option_profile)
self._mh.match_cli_option(
('c', 'config'), True, 'config', False, option_profile)
self._mh.match_cli_option(
('d', 'debug'), True, 'debug', False, option_profile)
self._mh.match_cli_option(
('e', 'debug-channel'), True, 'debug-channel', False, option_profile)
self._mh.match_cli_option(
('l', 'language'), True, 'language', False, option_profile)
self._mh.match_cli_option(
('m', 'run-mode'), True, 'run-mode', False, option_profile)
self._mh.match_cli_option(
('f', 'force'), False, 'force', False, option_profile)
self._mh.match_cli_option(
('i', 'interactive'), False, 'interactive', False, option_profile)
self._mh.match_cli_option(
('h', 'home'), False, 'home', False, option_profile)
def pp_actions(self, ev):
pass
def pp_app_check(self, ev):
"""Method ensures test run completion when all parallel execution are completed
Args:
ev (obj): not used
Returns:
void
Raises:
exception: Exception
event: yoda_before_check_results
"""
dmsg(
self._mh._trn.msg('yoda_context_switch', len(self._active_tickets)))
if len(self._active_tickets) > 0:
for index, ticket_id in enumerate(self._active_tickets):
dmsg(self._mh._trn.msg('yoda_checking_ticket', ticket_id))
if self._mh.async_ticket_completed(ticket_id):
self._mh.delete_async_ticket(ticket_id)
del self._active_tickets[index]
else:
dmsg(
self._mh._trn.msg('yoda_waiting_tickets', len(self._active_tickets)))
else:
print(self._pp_attr)
self._pp_attr['test_run_completed'] = True
try:
self._test_engine.test_run.end_time = time.time()
self._test_engine.test_run.update_db_record()
self._test_engine.test_run.write_custom_data()
except:
print(sys.exc_info())
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
raise Exception(
self._mh._trn.msg('yoda_update_test_run_db_error'))
ev = event.Event('yoda_before_check_results')
self._mh.fire_event(ev)
if ev.will_run_default():
self._check_results()
self._mh.stop_pp_app()
ev = event.Event('yoda_on_test_run_completed', self._test_engine.test_run.id)
self._mh.fire_event(ev)
def create_test_results_db(self):
"""Method creates results database
Args:
none
Returns:
obj: database
"""
dsn = self._mh.ext_cfg['Yoda']['db_results_dsn'].format(var_dir=syscfg.HTK_VAR_DIR)
dmsg(self._mh._trn.msg('yoda_create_db', dsn))
trdb = TestResultsDB(dsn)
trdb.create_database()
return trdb
def create_testdata_db(self):
"""Method creates testdata database
Database dsn is read from command option yoda-db-testdata-dsn or configuration
Database can be rewritten by command option force
Args:
none
Returns:
bool
"""
try:
dsn = CommandlineTool.get_input_option('yoda-db-testdata-dsn')
force = CommandlineTool.get_input_option('force')
if (not dsn):
dsn = self._mh.ext_cfg['Yoda']['db_testdata_dsn'].format(var_dir=syscfg.HTK_VAR_DIR)
db = DBO(dsn)._dbo_driver
db._parse_dsn(dsn)
result = True
if (not db.database_exists() or force):
if (force):
dmsg(self._mh._trn.msg('yoda_remove_testdata_db', dsn))
db.remove_database()
print(self._mh._trn.msg('yoda_create_testdata_db', dsn))
db.connect()
dbdir = os.path.join(self._mh.ext_cfg['Yoda']['test_repo_root'].format(var_dir=syscfg.HTK_VAR_DIR), 'db_testdata')
script = file_get_contents(
os.path.join(dbdir, 'db_struct.sql'))
db._cursor.executescript(script)
script = file_get_contents(os.path.join(dbdir, 'db_data.sql'))
db._cursor.executescript(script)
print(self._mh._trn.msg('yoda_testdata_db_created'))
else:
print(self._mh._trn.msg('yoda_testdata_db_exists', dsn))
result = False
return result
except Error as ex:
print(self._mh._trn.msg('yoda_testdata_db_error', ex))
return False
def init_check(self, ev):
"""Event listener waiting for htk_on_cmd_options event
If there's --yoda-test-repo-root-dir parameter presence, it will try to override current settings
Args:
ev (object): hydratk.core.event.Event
Returns:
void
"""
test_repo = CommandlineTool.get_input_option('yoda-test-repo-root-dir')
if test_repo != False and os.path.exists(test_repo) and os.path.isdir(test_repo):
self._test_repo_root = test_repo
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_test_repo_root_override', test_repo), self._mh.fromhere())
self._update_repos()
test_results_output_create = CommandlineTool.get_input_option(
'yoda-test-results-output-create')
if test_results_output_create != False and int(test_results_output_create) in (0, 1):
self._mh.ext_cfg['Yoda']['test_results_output_create'] = int(
test_results_output_create)
self._test_results_output_create = bool(
int(test_results_output_create))
dmsg(self._mh._trn.msg('yoda_test_results_output_override',
self._mh.ext_cfg['Yoda']['test_results_output_create']), 3)
test_results_output_handler = CommandlineTool.get_input_option(
'yoda-test-results-output-handler')
if test_results_output_handler != False and int(test_results_output_handler) in (0, 1):
self._mh.ext_cfg['Yoda']['test_results_output_handler'] = int(
test_results_output_handler)
self._test_results_output_handler = bool(
int(test_results_output_handler))
dmsg(self._mh._trn.msg('yoda_test_results_handler_override',
self._mh.ext_cfg['Yoda']['test_results_output_handler']), 3)
db_results_dsn = CommandlineTool.get_input_option(
'yoda-db-results-dsn')
if db_results_dsn != False and db_results_dsn not in (None, ''):
self._mh.ext_cfg['Yoda']['db_results_dsn'] = db_results_dsn
dmsg(self._mh._trn.msg('yoda_test_results_db_override',
self._mh.ext_cfg['Yoda']['db_results_dsn']), 3)
test_run_name = CommandlineTool.get_input_option('yoda-test-run-name')
if test_run_name != False:
self._test_engine.test_run.name = test_run_name
def init_test_simul(self):
"""Method enables simulated execution
Args:
none
Returns:
void
"""
self._test_engine.test_simul_mode = True
self.init_tests()
def init_test_results_db(self):
"""Method initialized results database
Configuration option - Yoda/db_results_dsn
Args:
none
Returns:
void
Raises:
exception: Exception
"""
dsn = self._mh.ext_cfg['Yoda']['db_results_dsn'].format(var_dir=syscfg.HTK_VAR_DIR)
dmsg(self._mh._trn.msg('yoda_test_results_db_init', dsn))
trdb = TestResultsDB(dsn)
if trdb.db_check_ok() == False:
raise Exception(
self._mh._trn.msg('yoda_test_results_db_check_fail', dsn))
else:
dmsg(self._mh._trn.msg('yoda_test_results_db_check_ok', dsn))
self._test_engine.test_results_db = trdb
def check_test_results_db(self, ev):
"""Method check if results database is successfully created
Configuration option - Yoda/db_results_autocreate
It is created if autocreate enabled
Args:
ev: not used
Returns:
void
Raises:
exception: Exception
"""
dsn = self._mh.ext_cfg['Yoda']['db_results_dsn'].format(var_dir=syscfg.HTK_VAR_DIR)
dmsg(self._mh._trn.msg('yoda_test_results_db_init', dsn))
trdb = TestResultsDB(dsn)
if trdb.db_check_ok() == False:
if int(self._mh.ext_cfg['Yoda']['db_results_autocreate']) == 1:
try:
dmsg(self._mh._trn.msg('yoda_create_db', dsn))
trdb.create_database()
self._test_engine.test_results_db = trdb
except:
print(str(sys.exc_info()))
else:
raise Exception(
self._mh._trn.msg('yoda_test_results_db_check_fail', dsn))
else:
dmsg(self._mh._trn.msg('yoda_test_results_db_check_ok', dsn))
self._test_engine.test_results_db = trdb
def init_tests(self):
"""Method is initializing tests
Args:
none
Returns:
void
Raises:
event: yoda_before_init_tests
event: yoda_before_process_tests
event: yoda_before_check_results
"""
self._test_engine.test_repo_root = self._test_repo_root
self._test_engine.libs_repo = self._libs_repo
self._test_engine.templates_repo = self._templates_repo
self._test_engine.helpers_repo = self._helpers_repo
ev = event.Event('yoda_before_init_tests')
self._mh.fire_event(ev)
if ev.will_run_default():
test_path = CommandlineTool.get_input_option('yoda-test-path')
if test_path == False:
test_path = ''
self.init_libs()
self.init_helpers()
if test_path != '' and test_path[0] == '/': # global test set
self._test_engine.run_mode_area = 'global'
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_running_tset_global', test_path), self._mh.fromhere())
else:
self._test_engine.run_mode_area = 'inrepo'
test_path = self._templates_repo + test_path
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_running_tset_repo', test_path), self._mh.fromhere())
multiply_tests = CommandlineTool.get_input_option(
'yoda-multiply-tests')
test_files = []
test_file_id = []
if multiply_tests != False:
multiply_tests = int(multiply_tests)
if multiply_tests > 0:
for i in range(multiply_tests):
tfiles, tfile_id = self._test_engine.get_all_tests_from_path(
test_path)
test_files += tfiles
test_file_id += tfile_id
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_multiply_tests', i), self._mh.fromhere())
else:
test_files, test_file_id = self._test_engine.get_all_tests_from_path(
test_path)
ev = event.Event(
'yoda_before_process_tests', test_files, test_file_id)
if (self._mh.fire_event(ev) > 0):
test_files = ev.argv(0)
test_file_id = ev.argv(1)
if ev.will_run_default():
self.process_tests(test_files, test_file_id)
if self._mh.run_mode == const.CORE_RUN_MODE_SINGLE_APP:
ev = event.Event('yoda_before_check_results')
self._mh.fire_event(ev)
if ev.will_run_default():
self._check_results()
ev = event.Event('yoda_on_test_run_completed',self._test_engine.test_run.id)
self._mh.fire_event(ev)
def init_global_tests(self, test_base_path):
pass
def init_inrepo_tests(self, test_base_path):
if os.path.exists(self._test_repo_root):
if os.path.exists(self.test_base_path):
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_start_test_from', test_base_path), self._mh.fromhere())
else:
self._mh.demsg('htk_on_error', self._mh._trn.msg(
'yoda_invalid_test_base_path', self._current_test_base_path), self._mh.fromhere())
else:
self._mh.demsg('htk_on_error', self._mh._trn.msg(
' yoda_invalid_test_repo_root', self._test_repo_root), self._mh.fromhere())
def init_helpers(self):
"""Method initializes helpers repository
Args:
none
Returns:
void
Raises:
event: yoda_before_append_helpers_dir
"""
self._use_helpers_dir.append(self._helpers_repo)
ev = event.Event(
'yoda_before_append_helpers_dir', self._use_helpers_dir)
if (self._mh.fire_event(ev) > 0):
self._use_helpers_dir = ev.argv(0)
if ev.will_run_default():
if isinstance(self._use_helpers_dir, list):
for helpers_dir in self._use_helpers_dir:
'''TODO also check with warning helpers_dir/__init__.py presence to see if it's proper package directory'''
if os.path.exists(helpers_dir):
sys.path.append(helpers_dir)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_added_helpers_dir', helpers_dir), self._mh.fromhere())
else:
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_helpers_dir_not_exists', helpers_dir), self._mh.fromhere())
def init_libs(self):
"""Method initializes libraries repository
Args:
none
Returns:
void
Raises:
event: yoda_before_append_lib_dir
"""
self._use_lib_dir.append(self._libs_repo)
ev = event.Event('yoda_before_append_lib_dir', self._use_lib_dir)
if (self._mh.fire_event(ev) > 0):
self._use_lib_dir = ev.argv(0)
if ev.will_run_default():
if isinstance(self._use_lib_dir, list):
for lib_dir in self._use_lib_dir:
'''TODO also check with warning lib_dir/__init__.py presence to see if it's proper package directory'''
if os.path.exists(lib_dir):
sys.path.append(lib_dir)
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_added_lib_dir', lib_dir), self._mh.fromhere())
else:
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_lib_dir_not_exists', lib_dir), self._mh.fromhere())
def process_tests(self, test_files, test_file_id):
"""Method determines whether test sets will be executed in single or parallel mode
Args:
test_files (obj): list or str, test files
Returns:
void
Raises:
exception: Exception
event: yoda_before_parse_test_file
"""
dmsg(self._mh._trn.msg('yoda_parsing_test_case',
self._test_engine._test_simul_mode, self._mh.run_mode))
total_ts = len(test_files)
if total_ts > 0:
self._test_engine.test_run.total_test_sets = total_ts
if self._test_engine.have_test_results_db:
try:
self._test_engine.test_run.create_db_record()
except:
print(sys.exc_info())
raise Exception(
self._mh._trn.msg('yoda_create_test_run_db_error'))
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_process_test_sets_total', total_ts), self._mh.fromhere())
for tf, tfid in zip(test_files, test_file_id):
if type(tf).__name__ == 'list':
for ctf, ctfid in zip(tf, tfid):
ev = event.Event(
'yoda_before_parse_test_file', ctf, ctfid)
if (self._mh.fire_event(ev) > 0):
ctf = ev.argv(0)
ctfid = ev.argv(1)
if ev.will_run_default():
try:
if self._mh.run_mode == const.CORE_RUN_MODE_SINGLE_APP:
self.process_test_set(ctf, ctfid)
else:
self.pp_process_test_set(ctf, ctfid)
except BreakTestSet as exc:
dmsg(
self._mh._trn.msg('yoda_received_break', 'test set'))
continue
except BreakTestRun as exc:
dmsg(
self._mh._trn.msg('yoda_received_break', 'test run'))
break
else:
ev = event.Event('yoda_before_parse_test_file', tf, tfid)
if (self._mh.fire_event(ev) > 0):
tf = ev.argv(0)
tfid = ev.argv(1)
if ev.will_run_default():
try:
if self._mh.run_mode == const.CORE_RUN_MODE_SINGLE_APP:
self.process_test_set(tf, tfid)
else:
self.pp_process_test_set(tf, tfid)
except BreakTestSet as exc:
dmsg(
self._mh._trn.msg('yoda_received_break', 'test set'))
continue
except BreakTestRun as exc:
dmsg(
self._mh._trn.msg('yoda_received_break', 'test run'))
break
if self._mh.run_mode == const.CORE_RUN_MODE_SINGLE_APP:
try:
self._test_engine.test_run.end_time = time.time()
self._test_engine.test_run.update_db_record()
self._test_engine.test_run.write_custom_data()
except:
print(sys.exc_info())
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
raise Exception(
self._mh._trn.msg('yoda_update_test_run_db_error'))
else:
self._mh.demsg('htk_on_debug_info', self._mh._trn.msg(
'yoda_no_tests_found_in_path', self._current_test_base_path), self._mh.fromhere())
def pp_process_test_set(self, test_set_file, test_set_file_id):
"""Method creates ticket to execute test set in parallel mode
Args:
test_set_file (str): filename
Returns:
void
"""
dmsg(self._mh._trn.msg('yoda_processing_tset_parallel', test_set_file))
ticket_id = self._mh.async_ext_fn(
(self, 'pp_run_test_set'), None, test_set_file, test_set_file_id)
dmsg(self._mh._trn.msg('yoda_got_ticket', ticket_id, test_set_file))
self._active_tickets.append(ticket_id)
def pp_run_test_set(self, test_set_file, test_set_file_id):
"""Method executes test set in parallel mode
Args:
test_set_file (str): filename
Returns:
void
Raises:
exception: Exception
"""
self.init_test_results_db()
dmsg(self._mh._trn.msg('yoda_processing_tset', test_set_file), 1)
tset_struct = self._test_engine.load_tset_from_file(test_set_file)
if tset_struct != False:
tset_obj = self._test_engine.parse_tset_struct(
tset_struct, test_set_file_id)
self._test_engine.test_run.norun_tests += tset_obj.parsed_tests[
'total_tco']
if tset_obj != False:
if self._test_engine.have_test_results_db:
try:
dmsg(
self._mh._trn.msg('yoda_create_test_set_db', test_set_file), 1)
tset_obj.create_db_record()
except:
print(sys.exc_info())
raise Exception(
self._mh._trn.msg('yoda_create_test_set_db_error'))
else:
raise Exception(
self._mh._trn.msg('yoda_test_results_db_missing'))
tset_obj.run()
if self._test_engine.have_test_results_db:
try:
tset_obj.end_time = time.time()
tset_obj.update_db_record()
tset_obj.write_custom_data()
except:
print(sys.exc_info())
raise Exception(
self._mh._trn.msg('yoda_update_test_set_db_error'))
else:
raise Exception("Failed to load tset_struct")
def process_test_set(self, test_set_file, test_set_file_id):
"""Method executes test set in single mode
Args:
test_set_file (str): filename
Returns:
void
Raises:
exception: Exception
"""
tset_struct = self._test_engine.load_tset_from_file(test_set_file)
if tset_struct != False:
tset_obj = self._test_engine.parse_tset_struct(
tset_struct, test_set_file_id)
self._test_engine.test_run.norun_tests += tset_obj.parsed_tests[
'total_tco']
if tset_obj != False:
if self._test_engine.have_test_results_db:
try:
tset_obj.create_db_record()
except:
print(sys.exc_info())
raise Exception(
self._mh._trn.msg('yoda_create_test_set_db_error'))
tset_obj.run()
if self._test_engine.have_test_results_db:
try:
tset_obj.end_time = time.time()
tset_obj.update_db_record()
tset_obj.write_custom_data()
except:
print(sys.exc_info())
raise Exception(
self._mh._trn.msg('yoda_update_test_set_db_error'))
def _check_results(self):
"""Method prepares results in requested format
Args:
none
Returns:
void
Raises:
event: yoda_on_check_results
"""
ev = event.Event(
'yoda_on_check_results', self._test_engine.test_run.id)
self._mh.fire_event(ev)
if ev.will_run_default():
if self._test_results_output_create == True:
for output_handler in self._test_results_output_handler:
trof = TestResultsOutputFactory(self._mh.ext_cfg['Yoda']['db_results_dsn'].format(var_dir=syscfg.HTK_VAR_DIR), output_handler)
trof.create(self._test_engine.test_run)
|
hydratk/hydratk-ext-yoda
|
src/hydratk/extensions/yoda/yoda.py
|
Python
|
bsd-3-clause
| 36,383
|
# Mark Recapture Helper Scripts
import json
import DeriveFinalResultSet as DRS, mongod_helper as mh
import DataStructsHelperAPI as DS
import importlib
import pandas as pd
import warnings
import sys, math
importlib.reload(mh)
def PRINT(jsonLike):
print(json.dumps(jsonLike, indent=4))
def genNidMarkRecapDict(mongo_client, source, days_dict, filter_species=None):
exif_tab_obj = mh.mongod_table(mongo_client, "exif_tab", source)
cursor = exif_tab_obj.query(cols=['date'])
img_dt_dict = mh.key_val_converter(cursor, 'date')
# population estimation using GGR and GZC datasets are done using dates
if source in ["GZC", "GGR"]:
img_dt_dict = {gid: DS.getDateFromStr(img_dt_dict[gid], '%Y-%m-%d %H:%M:%S', '%Y-%m-%d') for gid in img_dt_dict.keys()}
else:
'''
generally, for population estimation using Flickr/Bing images, the images were divided into annual epochs,
this could change and in that case the below line should be modified
'''
img_dt_dict = {gid: DS.getDateFromStr(img_dt_dict[gid], '%Y-%m-%d %H:%M:%S', '%Y') for gid in img_dt_dict.keys()}
# Retain only the gids for the dates in the days_dict
filtered_gid = list(filter(lambda x: img_dt_dict[x] in days_dict.keys(), img_dt_dict.keys()))
gid_days_num = {gid: days_dict[img_dt_dict[gid]] for gid in filtered_gid}
gid_nid = DRS.getCountingLogic(mongo_client, "NID", source, False, mongo=True)
if filter_species != None:
try:
gid_species = DRS.getCountingLogic(mongo_client, "SPECIES", source, False, mongo=True)
except Exception as e:
print("Exception occured at counting logic step")
print(e)
return
gid_days_num = {gid: gid_days_num[gid] for gid in gid_days_num if
gid in gid_species.keys() and filter_species in gid_species[gid]}
nidMarkRecap = {}
for gid in gid_days_num.keys(): # only iterate over the GIDs of interest
if gid in gid_nid.keys(): # not all images with valid EXIF feature will have an annotation
for nid in gid_nid[gid]:
if int(nid) > 0: # and int(nid) != 45: # ignore all the false positives --and ignore NID 45
nidMarkRecap[nid] = nidMarkRecap.get(nid, []) + [gid_days_num[gid]]
nidMarkRecapSet = {nid: list(set(nidMarkRecap[nid])) for nid in nidMarkRecap.keys()}
return nidMarkRecapSet
# Return Petersen-Lincoln Index for mark-recapture
def applyMarkRecap(nidMarkRecapSet):
uniqueIndsDay1 = {nid for nid in nidMarkRecapSet if 1 in nidMarkRecapSet[nid]}
uniqueIndsDay2 = {nid for nid in nidMarkRecapSet if 2 in nidMarkRecapSet[nid]}
marks = len(uniqueIndsDay1)
recaptures = len(uniqueIndsDay1 & uniqueIndsDay2)
day2_sights = len(uniqueIndsDay2)
try:
population = day2_sights * marks / recaptures
confidence = 1.96 * math.sqrt(marks ** 2 * day2_sights * (day2_sights - recaptures) / recaptures ** 2)
except:
warnings.warn("There are no recaptures for this case.")
population = 0
confidence = 0
return marks, recaptures, population, confidence
def genSharedGids(gidList, gidPropMapFl, shareData='proportion', probabThreshold=1):
df = pd.DataFrame.from_csv(gidPropMapFl)
if shareData == 'proportion':
gidPropDict = df['Proportion'].to_dict()
highSharedGids = {str(gid) for gid in gidPropDict.keys() if float(gidPropDict[gid]) >= 80.0}
else:
gidShrDict = df['share'].to_dict()
highSharedGids = {str(gid) for gid in gidShrDict.keys() if float(gidShrDict[gid]) >= probabThreshold}
return list(set(gidList) & highSharedGids)
def runMarkRecap(source, days_dict, filter_species=None):
client = mh.mongod_instance()
return applyMarkRecap(genNidMarkRecapDict(client, source, days_dict, filter_species=filter_species))
if __name__ == "__main__":
client = mh.mongod_instance()
source = "flickr_giraffe"
days_dict = {'2014':1, "2015" : 2}
print(genNidMarkRecapDict(client, source, days_dict, filter_species="giraffe_reticulated"))
|
smenon8/AnimalWildlifeEstimator
|
script/MarkRecapHelper.py
|
Python
|
bsd-3-clause
| 4,133
|
# coding=utf-8
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
from blueman.Constants import *
from blueman.Functions import dprint
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk
class ManagerToolbar:
def __init__(self, blueman):
self.blueman = blueman
self.blueman.List.connect("device-selected", self.on_device_selected)
self.blueman.List.connect("device-property-changed", self.on_device_propery_changed)
self.blueman.List.connect("adapter-changed", self.on_adapter_changed)
self.blueman.List.connect("adapter-property-changed", self.on_adapter_property_changed)
#toolbar = blueman.Builder.get_object("toolbar2")
#for c in toolbar.get_children():
# c.set_expand(True)
self.b_search = blueman.Builder.get_object("b_search")
self.b_search.connect("clicked", lambda button: blueman.inquiry())
self.b_bond = blueman.Builder.get_object("b_bond")
self.b_bond.connect("clicked", self.on_action, self.blueman.bond)
self.b_trust = blueman.Builder.get_object("b_trust")
self.b_trust.connect("clicked", self.on_action, self.blueman.toggle_trust)
self.b_trust.set_homogeneous(False)
self.b_trust.props.label = _("Untrust")
size = Gtk.Requisition()
(size, nsize) = Gtk.Widget.get_preferred_size(self.b_trust)
self.b_trust.props.label = _("Trust")
size2 = Gtk.Requisition()
(size2, nsize2) = Gtk.Widget.get_preferred_size(self.b_trust)
self.b_trust.props.width_request = max(size.width, size2.width)
self.b_remove = blueman.Builder.get_object("b_remove")
self.b_remove.connect("clicked", self.on_action, self.blueman.remove)
self.b_setup = blueman.Builder.get_object("b_setup")
self.b_setup.connect("clicked", self.on_action, self.blueman.setup)
self.b_setup.set_homogeneous(False)
self.b_send = blueman.Builder.get_object("b_send")
self.b_send.props.sensitive = False
self.b_send.connect("clicked", self.on_action, self.blueman.send)
self.b_send.set_homogeneous(False)
self.on_adapter_changed(blueman.List, blueman.List.GetAdapterPath())
def on_action(self, button, func):
device = self.blueman.List.GetSelectedDevice()
if device is not None:
func(device)
def on_adapter_property_changed(self, List, adapter, key_value):
key, value = key_value
if key == "Discovering":
if value:
self.b_search.props.sensitive = False
else:
self.b_search.props.sensitive = True
def on_adapter_changed(self, lst, adapter_path):
dprint("toolbar adapter", adapter_path)
if adapter_path is None:
self.b_search.props.sensitive = False
self.b_send.props.sensitive = False
else:
self.b_search.props.sensitive = True
def on_device_selected(self, dev_list, device, tree_iter):
if device is None or tree_iter is None:
self.b_bond.props.sensitive = False
self.b_remove.props.sensitive = False
self.b_trust.props.sensitive = False
self.b_setup.props.sensitive = False
else:
row = dev_list.get(tree_iter, "bonded", "trusted", "fake", "objpush")
self.b_setup.props.sensitive = True
if row["bonded"]:
self.b_bond.props.sensitive = False
else:
self.b_bond.props.sensitive = True
if row["trusted"]:
self.b_trust.props.sensitive = True
self.b_trust.props.icon_name = "blueman-untrust"
self.b_trust.props.label = _("Untrust")
else:
self.b_trust.props.sensitive = True
self.b_trust.props.icon_name = "blueman-trust"
self.b_trust.props.label = _("Trust")
if row["fake"]:
self.b_remove.props.sensitive = False
self.b_trust.props.sensitive = False
self.b_bond.props.sensitive = True
else:
self.b_remove.props.sensitive = True
if row["objpush"]:
self.b_send.props.sensitive = True
else:
self.b_send.props.sensitive = False
def on_device_propery_changed(self, dev_list, device, tree_iter, key_value):
key, value = key_value
if dev_list.compare(tree_iter, dev_list.selected()):
if key == "Trusted" or key == "Paired" or key == "UUIDs":
self.on_device_selected(dev_list, device, tree_iter)
|
yars068/blueman
|
blueman/gui/manager/ManagerToolbar.py
|
Python
|
gpl-3.0
| 4,778
|
import uuid
import requests
import json
import os
import logging
def get_conf_file():
f = open("conf/net/int_service/giles_conf.json", "r")
conf = json.loads(f.read())
f.close()
return conf
def get_giles_base_url():
conf = get_conf_file()
base = conf['giles_base_url']
return base
def get_giles_api_key():
conf = get_conf_file()
api_key = conf['giles_api_key']
return api_key
class StatArchiver:
GILES_BASE_URL = get_giles_base_url()
GILES_API_KEY = get_giles_api_key()
#QUERY_URL = "http://localhost:8079/api/query"
#ARCHIVER_URL = "http://localhost:8079/add/apikey"
def __init__(self, collection):
self.query_url = os.path.join(self.GILES_BASE_URL, 'api', 'query')
self.archiver_url = os.path.join(self.GILES_BASE_URL, 'add', self.GILES_API_KEY)
self.collection = collection
# NOTE: When we insert an entry to the Archiver, we associate that entry with a stream UUID, which is
# a function of the stat and user_uuid.
# No mapping needs to be maintained since this UUID can be reconstructed as long as the above data is found.
def insert(self, entry):
assert type(entry) == dict
stat = entry['stat']
user_uuid = str(entry['user'])
# TODO: Support more precise timestamps
# Giles has some problem unmarshalling floats
#client_ts = int(entry['ts'])
client_ts = int(entry['ts'])
reading = entry['reading']
#UUID is a function of things which are stored already, so we don't need to maintain the mapping.
stream_uuid = str(uuid.uuid5(uuid.NAMESPACE_DNS, stat + ',' + user_uuid))
path = '/' + user_uuid
smapMsg = {
path: {
"Metadata": {
"SourceName": stat,
"Collection": self.collection,
},
"Properties": {
"Timezone": "America/Los_Angeles",
"ReadingType": "double",
"UnitofTime": "s",
"StreamType": "numeric",
"UnitofMeasure": "Ambiguous"
},
"Readings": [
[
client_ts,
reading
]
],
"uuid": stream_uuid
}
}
for key in entry:
if key != "reading" and key != 'ts':
smapMsg[path]["Metadata"][key] = entry[key]
try:
json.dumps(smapMsg)
except Exception as e:
logging.debug("Error storing entry for user %s, stat %s at timestamp %s, with reading %f: entry is not JSON serializable" % (user_uuid, stat, client_ts, reading))
metadataString = ['(' + str(metakey) + ',' + str(metaval) + '), ' for metakey, metaval in smapMsg[self.path]["Metadata"].items()]
# if string not empty, truncate last comma
if len(metadataString) > 2:
metadataString = metadataString[0:-2]
logging.debug("Metadata values: %s" % (metadataString))
logging.debug("Exception: " + str(e))
return None
# @TODO: Do some error-checking on the response to make sure it actually
# really did work
response = requests.post(self.archiver_url, data=json.dumps(smapMsg))
success = response.content == ''
return success
def remove(self):
queryMsg = 'delete where Metadata/Collection="' + self.collection + '"'
response = requests.post(self.query_url, data=queryMsg)
return response
# Tags contain metadata associated with an entry
# Return all tags for all streams stored in archiver's current path
def query_tags(self):
queryMsg = 'select * where Metadata/Collection="' + self.collection + '"'
#print(queryMsg)
#print(self.query_url)
response = requests.post(self.query_url, data=queryMsg)
try:
return json.loads(response.content)
except Exception as e:
return response.content
# Return all readings for all streams stored in archiver's current path
def query_readings(self):
queryMsg = 'select data before now where Metadata/Collection="' + self.collection + '"'
response = requests.post(self.query_url, data=queryMsg)
try:
return json.loads(response.content)
except Exception as e:
return response.content
|
yw374cornell/e-mission-server
|
emission/net/int_service/giles/archiver.py
|
Python
|
bsd-3-clause
| 4,541
|
"""Module defining how to handle component settings"""
import asyncio
import json
import locale
import logging
import os
import time
class SettingsHandler(object):
"""Settings handler class"""
def __init__(self, component):
self.component = component
self.key = os.path.join("/config", self.component.name)
self.global_key = "/config/global"
# TODO use only one client !!
self.etcd_wrapper = component.etcd_wrapper
self.logger = logging.getLogger(name="tep").getChild(component.name).getChild('settings')
self.language = None
self.nlu_engine = None
self.params = {}
self._wait_config = True
def save(self, value, key=None):
"""Serialize (json) value and save it in etcd"""
if key is not None:
key = os.path.join("/config", key)
else:
key = self.key
# self.delete(key)
self.etcd_wrapper.write(key, value)
def delete(self, key=None):
"""Delete settings from etcd"""
if key is not None:
key = os.path.join("/config", key)
else:
key = self.key
self.etcd_wrapper.delete(key, recursive=True)
def stop(self):
"""Stop waiting for settings"""
self.logger.info("Stopping settings")
self._wait_config = False
def read(self):
"""Read settings from etcd and update component config"""
raw_data = None
while not raw_data:
raw_data = self.etcd_wrapper.read(self.key)
if not raw_data:
self.logger.info("Component settings not found, waiting")
time.sleep(3)
continue
self.logger.info("Component settings received")
self.params = json.loads(raw_data.value) # pylint: disable=E1101
self.component.set_config(config=self.params)
def read_global(self):
"""Read global settings from etcd and update component global config"""
raw_data = None
while not raw_data:
raw_data = self.etcd_wrapper.read(self.global_key)
if not raw_data:
self.logger.info("Global settings not found, waiting")
time.sleep(3)
continue
self.logger.info("Global settings received")
data = json.loads(raw_data.value)
if data.get('language') != self.language:
self.language = data['language']
self.logger.info("Language %s set", self.language)
# Set locale
encoding = locale.getlocale()[1]
locale.setlocale(locale.LC_ALL, (self.language, encoding))
# TODO Implement reload or not ???
self._reload_needed = True
self.logger.info("Reloading")
if data.get('nlu_engine') != self.nlu_engine:
self.nlu_engine = data['nlu_engine']
self.logger.info("NLU engine `%s` set", self.nlu_engine)
# TODO Implement reload or not ???
self._reload_needed = True
self.logger.info("Reloading")
async def async_read(self, watch=False):
"""Watch for component settings change in etcd
This is done by the subtaskers
"""
while self._wait_config:
raw_data = await self.etcd_wrapper.async_read(self.key, wait=watch)
if not raw_data:
self.logger.info("Component settings not found, waiting")
await asyncio.sleep(3)
continue
self.logger.info("Component settings received")
self.params = json.loads(raw_data.value) # pylint: disable=E1101
self.component.set_config(config=self.params)
# TODO Implement reload or not ???
# self.logger.info("Reloading")
# self._reload_needed = True
# TODO improve this
if not watch:
break
async def async_read_global(self, watch=False):
"""Watch for global settings change in etcd
This is done by the subtaskers
"""
while self._wait_config:
raw_data = await self.etcd_wrapper.async_read(self.global_key, wait=watch)
if not raw_data:
self.logger.info("Global settings not found, waiting")
await asyncio.sleep(3)
continue
self.logger.info("Global settings received")
data = json.loads(raw_data.value)
if data.get('language') != self.language:
self.language = data['language']
self.logger.info("Language %s set", self.language)
# Set locale
encoding = locale.getlocale()[1]
locale.setlocale(locale.LC_ALL, (self.language, encoding))
# TODO Implement reload or not ???
self._reload_needed = True
self.logger.info("Reloading")
if data.get('nlu_engine') != self.nlu_engine:
self.nlu_engine = data['nlu_engine']
self.logger.info("NLU engine `%s` set", self.nlu_engine)
# TODO Implement reload or not ???
self._reload_needed = True
self.logger.info("Reloading")
# TODO improve this
if not watch:
break
|
TuxEatPi/common
|
tuxeatpi_common/settings.py
|
Python
|
apache-2.0
| 5,416
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.