code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
"""
A script to walk through all commits of a repository.
Works with `number_of_strategies.py` to get the number of strategies in the
library at each commit.
"""
from git import Repo
from tqdm import tqdm
import os
import subprocess
path_to_repo = "~/src/Axelrod"
repo = Repo(path_to_repo)
all_commits = [c for c in repo.iter_commits()]
git = repo.git
number_of_strategies = []
dates = []
git.checkout('master')
try:
os.remove('data')
except OSError:
pass
for c in tqdm(sorted(all_commits, key=lambda x:x.committed_date)):
for rubbish in [".DS_Store",
"axelrod/.DS_Store",
"axelrod/tests/.DS_Store",
"axelrod/strategies/.DS_Store"]: # Having to delete some files that were not in gitignore at the time of the commit
try:
os.remove(path_to_repo + rubbish)
except OSError:
pass
git.checkout(c)
try:
subprocess.call(['python2', '-B', 'number_of_strategies.py',
str(c.committed_date), c.hexsha, str(c.author)])
dates.append(c.committed_date)
except ImportError:
pass
git.checkout('master')
|
Axelrod-Python/An-open-reproducible-framework-for-the-study-of-the-iterated-prisoners-dilemma
|
scripts/scrape_repo.py
|
Python
|
mit
| 1,161
|
'''
Tests for nrg mapping procedures.
'''
from numpy import *
from numpy.testing import dec,assert_,assert_raises,assert_almost_equal,assert_allclose
from matplotlib.pyplot import *
from scipy import sparse as sps
from scipy.linalg import qr,eigvalsh,norm
import time,pdb,sys
from ..utils import *
from ..discretization import get_wlist,quick_map
from ..chain import *
from ..hybri_sc import *
from ..chainmapper import *
def get_chain(z=0.5,method='qr'):
'''
run this sample, visual check is quite slow!
'''
#generate the hybridization function.
nband=4
Gamma=0.5/pi
Lambda=1.8
D=[-1.,0.5] #the energy window.
wlist=get_wlist(w0=1e-12,Nw=10000,mesh_type='log',Gap=0,D=D)
#rhofunc=lambda w:identity(4)+0.3*w*Gmat[0]+0.3*w**2*Gmat[2] #the case with degeneracy
rhofunc=lambda w:identity(4)+0.3*w*Gmat[0]+0.3*w**2*Gmat[2]+0.1*kron(sz,sz) #the case without degeneracy.
#create the discretized model
N=33 #the chain length
tick_type='adaptive'
discmodel=quick_map(rhofunc=rhofunc,wlist=wlist,N=N,z=z,Nx=200000,tick_params={'tick_type':tick_type,'Lambda':Lambda},autofix=1e-5)[1]
#map to a chain
chains=map2chain(discmodel,nsite=2*N,normalize_method=method)
return chains[0]
def get_chain_sc(z=0.5,method='qr'):
'''
run this sample, visual check is quite slow!
'''
#generate the hybridization function.
nband=2
Gamma=0.5/pi
Lambda=1.6
D0=2.
Gap=0.3
D=sqrt(D0**2+Gap**2)
wlist=get_wlist(w0=1e-12,Nw=10000,mesh_type='sclog',Gap=Gap,D=D)
rhofunc=get_hybri_skew(Gap,Gamma,D=D,eta=1e-12,skew=0.3)
rholist=array([rhofunc(w) for w in wlist])
#create the discretized model
N=23 #the chain length
tick_type='adaptive'
discmodel=quick_map(rhofunc=rhofunc,wlist=wlist,N=N,z=z,Nx=200000,tick_params={'tick_type':tick_type,'Lambda':Lambda},autofix=1e-5)[1]
#map to a chain
chains=map2chain(discmodel,normalize_method=method)
return chains[0]
def test_checkscale():
print('Test Scaling of random 4 band model.')
ion()
methods=['qr','mpqr','sqrtm']
for method in methods:
chain1=get_chain(z=0.5,method=method)
show_scaling(chain1)
legend(methods)
pdb.set_trace()
def test_checkscale_sc():
print('Test Scaling of chain, superconducting, the offdiagonal part do not obey scaling.')
ion()
methods=['qr','mpqr','sqrtm']
for method in methods:
chain1=get_chain_sc(z=0.5,method=method)
show_scaling(chain1)
legend(methods)
pdb.set_trace()
if __name__=='__main__':
test_checkscale()
test_checkscale_sc()
|
GiggleLiu/nrg_mapping
|
nrgmap/tests/test_checkscale.py
|
Python
|
mit
| 2,657
|
import nose
import unittest
from soap.datatype import auto_type, int_type, float_type, IntegerArrayType
from soap.expression import expression_factory, operators, Variable, Subscript
from soap.semantics import IntegerInterval, ErrorSemantics
from soap.program.flow import (
AssignFlow, IfFlow, WhileFlow, ForFlow, CompositionalFlow,
PragmaInputFlow, PragmaOutputFlow, ProgramFlow
)
from soap.parser import stmt_parse, expr_parse, parse
class Base(unittest.TestCase):
def setUp(self):
self.a = Variable('a', IntegerArrayType([10]))
self.w = Variable('w', int_type)
self.x = Variable('x', auto_type)
self.y = Variable('y', auto_type)
self.z = Variable('z', auto_type)
self.i1 = IntegerInterval(1)
self.i2 = IntegerInterval(2)
self.i3 = IntegerInterval(3)
self.decl = {var.name: auto_type for var in (self.x, self.y, self.z)}
self.expr_parse = lambda expr: expr_parse(expr, self.decl)
class TestExpressionParser(Base):
def test_compound_boolean_expression(self):
bool_expr_1 = expression_factory(
operators.UNARY_NEGATION_OP, expression_factory(
operators.LESS_OP, self.x, self.i3))
bool_expr_2 = expression_factory(
operators.NOT_EQUAL_OP, self.y, self.i1)
bool_expr = expression_factory(
operators.AND_OP, bool_expr_1, bool_expr_2)
self.assertEqual(expr_parse('!(x < 3) && y != 1'), bool_expr)
def test_operator_precedence(self):
neg_y = expression_factory(operators.UNARY_SUBTRACT_OP, self.y)
expr = expression_factory(
operators.ADD_OP, self.x,
expression_factory(operators.MULTIPLY_OP, neg_y, self.z))
self.assertEqual(expr_parse('x + -y * z'), expr)
expr = expression_factory(
operators.MULTIPLY_OP,
expression_factory(operators.ADD_OP, self.x, neg_y),
self.z)
self.assertEqual(expr_parse('(x + -y) * z'), expr)
def test_special_unary_arithmetic_expression(self):
xpy = expression_factory(operators.ADD_OP, self.x, self.y)
expr = expression_factory(operators.EXPONENTIATE_OP, xpy)
self.assertEqual(expr_parse('exp(x + y)'), expr)
def test_select_expression(self):
expr = expression_factory(
operators.TERNARY_SELECT_OP,
expression_factory(operators.LESS_OP, self.x, self.i3),
expression_factory(operators.ADD_OP, self.y, self.i1),
expression_factory(operators.MULTIPLY_OP, self.y, self.i2))
self.assertEqual(expr_parse('x < 3 ? y + 1 : y * 2'), expr)
def test_variable_subscript(self):
expr = expression_factory(
operators.INDEX_ACCESS_OP, self.x, Subscript(self.i1))
self.assertEqual(expr_parse('x[1]'), expr)
expr = expression_factory(
operators.INDEX_ACCESS_OP, self.x,
Subscript(expression_factory(operators.ADD_OP, self.y, self.i1)))
self.assertEqual(expr_parse('x[y + 1]'), expr)
expr = expression_factory(
operators.INDEX_ACCESS_OP, self.x, Subscript(self.y, self.i1))
self.assertEqual(expr_parse('x[y][1]'), expr)
expr = expression_factory(
operators.INDEX_ACCESS_OP, self.x,
Subscript(expression_factory(
operators.INDEX_ACCESS_OP, self.y, Subscript(self.i1))))
self.assertEqual(expr_parse('x[y[1]]'), expr)
class TestStatementParser(Base):
def setUp(self):
super().setUp()
self.stmt_parse = lambda prog: stmt_parse(prog, self.decl)
def test_assign_statement(self):
expr = expression_factory(
operators.ADD_OP, self.y, self.i1)
flow = AssignFlow(self.x, expr)
self.assertEqual(self.stmt_parse('x = y + 1;'), flow)
def test_boolean_assign_statement(self):
raise nose.SkipTest # can't bother with this now
expr = expression_factory(
operators.LESS_EQUAL_OP, self.y, self.i1)
flow = AssignFlow(self.x, expr)
self.assertEqual(self.stmt_parse('x = y < 1;'), flow)
def test_declaration_assign_statement(self):
flow = AssignFlow(self.w, self.i1)
self.assertEqual(self.stmt_parse('int w = 1;'), flow)
def test_declaration_statement(self):
self.stmt_parse('int w;')
self.stmt_parse('float a[10][10];')
def test_operator_assign_statement(self):
expr = expression_factory(
operators.ADD_OP, self.x, self.i1)
flow = AssignFlow(self.x, expr)
self.assertEqual(self.stmt_parse('x += 1;'), flow)
def test_increment_statement(self):
expr = expression_factory(
operators.ADD_OP, self.x, self.i1)
flow = AssignFlow(self.x, expr)
self.assertEqual(self.stmt_parse('x++;'), flow)
def test_if_statement(self):
bool_expr = expression_factory(operators.LESS_OP, self.x, self.i3)
assign_flow_1 = AssignFlow(self.y, self.x)
assign_flow_2 = AssignFlow(self.x, self.y)
flow = IfFlow(bool_expr, assign_flow_1)
self.assertEqual(self.stmt_parse('if (x < 3) {y = x;}'), flow)
flow = IfFlow(bool_expr, assign_flow_1, assign_flow_2)
self.assertEqual(
self.stmt_parse('if (x < 3) {y = x;} else {x = y;}'), flow)
def test_single_line_if_statement(self):
bool_expr = expression_factory(operators.LESS_OP, self.x, self.i3)
assign_flow_1 = AssignFlow(self.y, self.x)
assign_flow_2 = AssignFlow(self.x, self.y)
flow = IfFlow(bool_expr, assign_flow_1)
self.assertEqual(self.stmt_parse('if (x < 3) y = x;'), flow)
flow = IfFlow(bool_expr, assign_flow_1, assign_flow_2)
self.assertEqual(
self.stmt_parse('if (x < 3) y = x; else x = y;'), flow)
def test_while_statement(self):
bool_expr = expression_factory(operators.LESS_OP, self.x, self.i3)
assign_flow = AssignFlow(self.y, self.x)
flow = WhileFlow(bool_expr, assign_flow)
self.assertEqual(self.stmt_parse('while (x < 3) {y = x;}'), flow)
def test_single_line_while_statement(self):
bool_expr = expression_factory(operators.LESS_OP, self.x, self.i3)
assign_flow = AssignFlow(self.y, self.x)
flow = WhileFlow(bool_expr, assign_flow)
self.assertEqual(self.stmt_parse('while (x < 3) y = x;'), flow)
def test_for_statement(self):
bool_expr = expression_factory(operators.LESS_OP, self.x, self.i3)
init_flow = AssignFlow(self.x, self.i1)
incr_flow = AssignFlow(self.x, expression_factory(
operators.ADD_OP, self.x, self.i1))
assign_flow = AssignFlow(self.y, self.x)
flow = ForFlow(init_flow, bool_expr, incr_flow, assign_flow)
parsed_flow = self.stmt_parse('for (x = 1; x < 3; x = x + 1) {y = x;}')
self.assertEqual(parsed_flow, flow)
def test_single_line_for_statement(self):
bool_expr = expression_factory(operators.LESS_OP, self.x, self.i3)
init_flow = AssignFlow(self.x, self.i1)
incr_flow = AssignFlow(self.x, expression_factory(
operators.ADD_OP, self.x, self.i1))
assign_flow = AssignFlow(self.y, self.x)
flow = ForFlow(init_flow, bool_expr, incr_flow, assign_flow)
parsed_flow = self.stmt_parse('for (x = 1; x < 3; x = x + 1) y = x;')
self.assertEqual(parsed_flow, flow)
def test_compound_statement(self):
flow = CompositionalFlow(
[AssignFlow(self.y, self.x), AssignFlow(self.x, self.y)])
self.assertEqual(self.stmt_parse('y = x; x = y;'), flow)
class TestProgramParser(Base):
def setUp(self):
super().setUp()
self.w = Variable('w', float_type)
self.x = Variable('x', int_type)
self.y = Variable('y', float_type)
self.z = Variable('z', float_type)
self.decl = {
'x': int_type,
'y': float_type,
'z': float_type,
}
def test_full(self):
expr = expression_factory(
operators.ADD_OP, expression_factory(
operators.ADD_OP, self.x, self.y), self.z)
inputs = [
(self.x, self.i1),
(self.y, ErrorSemantics([3.0, 4.0], [0, 0])),
(self.z, ErrorSemantics([5, 6], [0, 0])),
]
outputs = [self.w]
body = CompositionalFlow([
PragmaInputFlow(inputs),
PragmaOutputFlow(outputs),
AssignFlow(self.w, expr),
])
flow = ProgramFlow(body)
prog = """
#pragma soap input \
int x=1, float y=[3.0, 4.0], float z=[5.0, 6.0][0, 0]
#pragma soap output w
float w = x + y + z;
"""
parsed_flow = parse(prog)
self.assertListEqual(list(parsed_flow.inputs.items()), inputs)
self.assertListEqual(parsed_flow.outputs, outputs)
self.assertEqual(parsed_flow, flow)
|
admk/soap
|
tests/test_parser.py
|
Python
|
mit
| 8,996
|
#!/usr/bin/env python3
import time
from multiprocessing import Process
import selfdrive.crash as crash
from common.params import Params
from selfdrive.launcher import launcher
from selfdrive.swaglog import cloudlog
from selfdrive.version import version, dirty
ATHENA_MGR_PID_PARAM = "AthenadPid"
def main():
params = Params()
dongle_id = params.get("DongleId").decode('utf-8')
cloudlog.bind_global(dongle_id=dongle_id, version=version, dirty=dirty, is_eon=True)
crash.bind_user(id=dongle_id)
crash.bind_extra(version=version, dirty=dirty, is_eon=True)
crash.install()
try:
while 1:
cloudlog.info("starting athena daemon")
proc = Process(name='athenad', target=launcher, args=('selfdrive.athena.athenad',))
proc.start()
proc.join()
cloudlog.event("athenad exited", exitcode=proc.exitcode)
time.sleep(5)
except Exception:
cloudlog.exception("manage_athenad.exception")
finally:
params.delete(ATHENA_MGR_PID_PARAM)
if __name__ == '__main__':
main()
|
vntarasov/openpilot
|
selfdrive/athena/manage_athenad.py
|
Python
|
mit
| 1,020
|
from rx.disposable import CompositeDisposable
from rx.observable import Producer
import rx.linq.sink
from threading import RLock
class TakeCount(Producer):
def __init__(self, source, count):
self.source = source
self.count = count
def omega(self, count):
if self.count <= count:
return self
else:
return TakeCount(self.source, count)
def run(self, observer, cancel, setSink):
sink = self.Sink(self, observer, cancel)
setSink(sink)
return self.source.subscribeSafe(sink)
class Sink(rx.linq.sink.Sink):
def __init__(self, parent, observer, cancel):
super(TakeCount.Sink, self).__init__(observer, cancel)
self.parent = parent
self.remaining = self.parent.count
def onNext(self, value):
if self.remaining > 0:
self.remaining -= 1
self.observer.onNext(value)
if self.remaining == 0:
self.observer.onCompleted()
self.dispose()
def onError(self, exception):
self.observer.onError(exception)
self.dispose()
def onCompleted(self):
self.observer.onCompleted()
self.dispose()
class TakeTime(Producer):
def __init__(self, source, duration, scheduler):
self.source = source
self.duration = duration
self.scheduler = scheduler
def omega(self, duration):
if self.duration <= duration:
return self
else:
return TakeTime(self.source, duration, self.scheduler)
def run(self, observer, cancel, setSink):
sink = self.Sink(self, observer, cancel)
setSink(sink)
return self.source.subscribeSafe(sink)
class Sink(rx.linq.sink.Sink):
def __init__(self, parent, observer, cancel):
super(TakeTime.Sink, self).__init__(observer, cancel)
self.parent = parent
self.remaining = self.parent.count
def run(self):
self.gate = RLock()
t = self.parent.scheduler.scheduleWithRelative(self.parent.duration, self.tick)
d = self.parent.source.subscribeSafe(self)
return CompositeDisposable(t, d)
def tick(self):
with self.gate:
self.observer.onCompleted()
self.dispose()
def onNext(self, value):
with self.gate:
self.observer.onNext(value)
def onError(self, exception):
with self.gate:
self.observer.onError(exception)
self.dispose()
def onCompleted(self):
with self.gate:
self.observer.onCompleted()
self.dispose()
|
akuendig/RxPython
|
rx/linq/take.py
|
Python
|
mit
| 2,452
|
__author__ = 'isparks'
import unittest
from rwslib.builders import *
from xml.etree import cElementTree as ET
def obj_to_doc(obj,*args, **kwargs):
"""Convert an object to am XML document object"""
builder = ET.TreeBuilder()
obj.build(builder, *args, **kwargs)
return builder.close()
class TestInheritance(unittest.TestCase):
"""The things we do for 100% coverage."""
def test_inheritance_warning(self):
class NewObj(ODMElement):
"""We do not override the __lshift__ method"""
pass
with self.assertRaises(ValueError):
# Exercise __lshift__
NewObj() << object()
class TestAttributeSetters(unittest.TestCase):
class TestElem(ODMElement):
"""Test class with a bad __lshift__ implementation"""
def __init__(self):
self.user = None
self.locations = []
def __lshift__(self, other):
self.set_single_attribute(other, UserRef, "xxxuser") #Incorrect spelling of user attribute
self.set_list_attribute(other, LocationRef, "xxxlocations") #Incorrect spelling of location attribute
def test_single_attribute_misspelling(self):
tested = TestAttributeSetters.TestElem()
with self.assertRaises(AttributeError):
tested << UserRef("Fred")
def test_list_attribute_misspelling(self):
tested = TestAttributeSetters.TestElem()
with self.assertRaises(AttributeError):
tested << LocationRef("Site 22")
class TestUserRef(unittest.TestCase):
def test_accepts_no_children(self):
with self.assertRaises(ValueError):
UserRef("Gertrude") << object()
def test_builder(self):
"""Test building XML"""
tested = UserRef('Fred')
doc = obj_to_doc(tested)
self.assertEqual(doc.attrib['UserOID'],"Fred")
self.assertEqual(doc.tag,"UserRef")
class TestLocationRef(unittest.TestCase):
def test_accepts_no_children(self):
with self.assertRaises(ValueError):
LocationRef("Nowhereville") << object()
def test_builder(self):
"""Test building XML"""
tested = LocationRef('Gainesville')
doc = obj_to_doc(tested)
self.assertEqual(doc.attrib['LocationOID'], "Gainesville")
self.assertEqual(doc.tag, "LocationRef")
class TestReasonForChange(unittest.TestCase):
def test_accepts_no_children(self):
with self.assertRaises(ValueError):
ReasonForChange("Because I wanted to") << object()
def test_builder(self):
"""Test building XML"""
tested = ReasonForChange("Testing 1..2..3")
doc = obj_to_doc(tested)
self.assertEqual("Testing 1..2..3", doc.text)
self.assertEqual(doc.tag, "ReasonForChange")
class TestDateTimeStamp(unittest.TestCase):
def test_accepts_no_children(self):
with self.assertRaises(ValueError):
DateTimeStamp(datetime.now()) << object()
def test_builder_with_datetime(self):
dt = datetime(2015, 9, 11, 10, 15, 22, 80)
tested = DateTimeStamp(dt)
doc = obj_to_doc(tested)
self.assertEqual(dt_to_iso8601(dt), doc.text)
self.assertEqual(doc.tag, "DateTimeStamp")
def test_builder_with_string(self):
dt = "2009-02-04T14:10:32-05:00"
tested = DateTimeStamp(dt)
doc = obj_to_doc(tested)
self.assertEqual(dt, doc.text)
self.assertEqual(doc.tag, "DateTimeStamp")
class TestAuditRecord(unittest.TestCase):
def setUp(self):
self.tested = AuditRecord(edit_point=AuditRecord.EDIT_DATA_MANAGEMENT,
used_imputation_method= False,
identifier='X2011',
include_file_oid=False)
self.tested << UserRef("Fred")
self.tested << LocationRef("Site102")
self.tested << ReasonForChange("Data Entry Error")
self.tested << DateTimeStamp(datetime(2015, 9, 11, 10, 15, 22, 80))
def test_identifier_must_not_start_digit(self):
with self.assertRaises(AttributeError):
AuditRecord(identifier='2011')
with self.assertRaises(AttributeError):
AuditRecord(identifier='*Hello')
# Underscore OK
ar = AuditRecord(identifier='_Hello')
self.assertEqual('_Hello', ar.id)
# Letter OK
ar = AuditRecord(identifier='Hello')
self.assertEqual('Hello', ar.id)
def test_accepts_no_invalid_children(self):
with self.assertRaises(ValueError):
AuditRecord() << object()
def test_invalid_edit_point(self):
with self.assertRaises(AttributeError):
AuditRecord(edit_point='Blah')
def test_builder(self):
doc = obj_to_doc(self.tested)
self.assertEqual(doc.tag, "AuditRecord")
self.assertEqual(AuditRecord.EDIT_DATA_MANAGEMENT, doc.attrib["EditPoint"])
self.assertEqual("No", doc.attrib["UsedImputationMethod"])
self.assertEqual("No", doc.attrib["mdsol:IncludeFileOID"])
self.assertEqual("UserRef", doc.getchildren()[0].tag)
self.assertEqual("LocationRef", doc.getchildren()[1].tag)
self.assertEqual("DateTimeStamp", doc.getchildren()[2].tag)
self.assertEqual("ReasonForChange", doc.getchildren()[3].tag)
def test_no_user_ref(self):
"""Test with no user ref should fail on build with a ValueError"""
self.tested.user_ref = None
with self.assertRaises(ValueError) as err:
doc = obj_to_doc(self.tested)
self.assertIn("UserRef", err.exception.message)
def test_no_location_ref(self):
"""Test with no location ref should fail on build with a ValueError"""
self.tested.location_ref = None
with self.assertRaises(ValueError) as err:
doc = obj_to_doc(self.tested)
self.assertIn("LocationRef", err.exception.message)
def test_no_datetime_stamp(self):
"""Test with no datetimestamp should fail on build with a ValueError"""
self.tested.date_time_stamp = None
with self.assertRaises(ValueError) as err:
doc = obj_to_doc(self.tested)
self.assertIn("DateTimeStamp", err.exception.message)
class TestMdsolQuery(unittest.TestCase):
"""Test extension MdsolQuery"""
def get_tested(self):
return MdsolQuery(status=QueryStatusType.Open, value="Data missing", query_repeat_key=123,
recipient="Site from System", requires_response=True)
def test_basic(self):
tested = self.get_tested()
self.assertEqual("Data missing",tested.value)
self.assertEqual(123,tested.query_repeat_key)
self.assertEqual(QueryStatusType.Open,tested.status)
self.assertEqual("Site from System",tested.recipient)
self.assertEqual(True,tested.requires_response)
def test_builder(self):
tested = self.get_tested()
tested.response = "Done"
doc = obj_to_doc(tested)
self.assertEqual("mdsol:Query", doc.tag)
self.assertEqual("Yes", doc.attrib['RequiresResponse'])
self.assertEqual("Site from System", doc.attrib['Recipient'])
self.assertEqual("123", doc.attrib['QueryRepeatKey'])
self.assertEqual("Data missing", doc.attrib['Value'])
self.assertEqual("Done", doc.attrib['Response'])
def test_invalid_status_value(self):
"""Status must come from QueryStatusType"""
with self.assertRaises(AttributeError):
MdsolQuery(status='A test')
class TestItemData(unittest.TestCase):
"""Test ItemData classes"""
def setUp(self):
self.tested = ItemData('FIELDA',"TEST")
def test_basic(self):
tested = self.tested
self.assertEqual(tested.itemoid, "FIELDA")
self.assertEqual(tested.value, "TEST")
self.assertEqual(tested.lock, None)
self.assertEqual(tested.freeze, None)
self.assertEqual(tested.verify, None)
def test_only_accepts_itemdata(self):
"""Test that an ItemData will not accept any old object"""
with self.assertRaises(ValueError):
self.tested << {"Field1" : "ValueC"}
def test_accepts_query(self):
"""Test that an ItemData will accept a query"""
query = MdsolQuery()
self.tested << query
self.assertEqual(query, self.tested.queries[0])
def test_accepts_measurement_unit_ref(self):
"""Test that an ItemData will accept a measurement unit ref"""
mur = MeasurementUnitRef("Celsius")
self.tested << mur
self.assertEqual(mur, self.tested.measurement_unit_ref)
def test_isnull_not_set(self):
"""Isnull should not be set where we have a value not in '', None"""
doc = obj_to_doc(self.tested)
# Check IsNull attribute is missing
def do():
doc.attrib['IsNull']
self.assertRaises(KeyError,do)
def test_specify(self):
"""Test specify"""
specify_value = 'A Specify'
self.tested.specify_value = specify_value
doc = obj_to_doc(self.tested)
self.assertEqual(doc.attrib['mdsol:SpecifyValue'],specify_value)
def test_freeze_lock_verify(self):
tested = ItemData('FIELDA',"TEST", lock=True, verify=True, freeze=False)
self.assertEqual(tested.lock, True)
self.assertEqual(tested.freeze, False)
self.assertEqual(tested.verify, True)
def test_builder(self):
"""Test building XML"""
tested = ItemData('FIELDA',"TEST", lock=True, verify=True, freeze=False)
tested << AuditRecord(edit_point=AuditRecord.EDIT_DATA_MANAGEMENT,
used_imputation_method= False,
identifier="x2011",
include_file_oid=False)(
UserRef("Fred"),
LocationRef("Site102"),
ReasonForChange("Data Entry Error"),
DateTimeStamp(datetime(2015, 9, 11, 10, 15, 22, 80))
)
tested << MdsolQuery()
tested << MeasurementUnitRef("Celsius")
doc = obj_to_doc(tested)
self.assertEqual(doc.attrib['ItemOID'],"FIELDA")
self.assertEqual(doc.attrib['Value'],"TEST")
self.assertEqual(doc.attrib['mdsol:Verify'],"Yes")
self.assertEqual(doc.attrib['mdsol:Lock'],"Yes")
self.assertEqual(doc.attrib['mdsol:Freeze'],"No")
self.assertEqual(doc.tag,"ItemData")
self.assertEqual("AuditRecord",doc.getchildren()[0].tag)
self.assertEqual("MeasurementUnitRef",doc.getchildren()[1].tag)
self.assertEqual("mdsol:Query",doc.getchildren()[2].tag)
def test_transaction_type(self):
tested = self.tested
tested.transaction_type = 'Update'
doc = obj_to_doc(tested)
self.assertEqual(doc.attrib['TransactionType'],"Update")
def test_null_value(self):
"""Null or empty string values are treated specially with IsNull property and no value"""
tested = self.tested
tested.value = ''
doc = obj_to_doc(tested)
self.assertEqual(doc.attrib['IsNull'],"Yes")
#Check Value attribute is also missing
def do():
doc.attrib["Value"]
self.assertRaises(KeyError,do)
def test_invalid_transaction_type(self):
def do():
ItemData("A","val",transaction_type='invalid')
self.assertRaises(AttributeError, do )
class TestItemGroupData(unittest.TestCase):
"""Test ItemGroupData classes"""
def setUp(self):
self.tested = ItemGroupData()(
ItemData("Field1","ValueA"),
ItemData("Field2","ValueB")
)
def test_children(self):
"""Test there are 2 children"""
self.assertEqual(2, len(self.tested.items))
def test_two_same_invalid(self):
"""Test adding a duplicate field causes error"""
def do():
self.tested << ItemData("Field1","ValueC")
self.assertRaises(ValueError,do)
def test_only_accepts_itemdata(self):
"""Test that an ItemGroupData will only accept an ItemData element"""
def do():
self.tested << {"Field1" : "ValueC"}
self.assertRaises(ValueError,do)
def test_invalid_transaction_type(self):
def do():
ItemGroupData(transaction_type='invalid')
self.assertRaises(AttributeError, do )
def test_builders_basic(self):
doc = obj_to_doc(self.tested,"TESTFORM")
self.assertEqual(doc.attrib["ItemGroupOID"],"TESTFORM")
self.assertEqual(len(doc),2)
self.assertEqual(doc.tag,"ItemGroupData")
def test_transaction_type(self):
"""Test transaction type inserted if set"""
self.tested.transaction_type = 'Context'
doc = obj_to_doc(self.tested,"TESTFORM")
self.assertEqual(doc.attrib["TransactionType"],"Context")
def test_whole_item_group(self):
"""mdsol:Submission should be wholeitemgroup or SpecifiedItemsOnly"""
doc = obj_to_doc(self.tested,"TESTFORM")
self.assertEqual(doc.attrib["mdsol:Submission"],"SpecifiedItemsOnly")
self.tested.whole_item_group = True
doc = obj_to_doc(self.tested,"TESTFORM")
self.assertEqual(doc.attrib["mdsol:Submission"],"WholeItemGroup")
class TestFormData(unittest.TestCase):
"""Test FormData classes"""
def setUp(self):
self.tested = FormData("TESTFORM_A") (
ItemGroupData()(
ItemData("Field1","ValueA"),
ItemData("Field2","ValueB")
),
ItemGroupData()(
ItemData("Field3","ValueC"),
),
ItemGroupData()(
ItemData("Field4","ValueD"),
),
)
def test_children(self):
"""Test there are 3 children"""
self.assertEqual(3, len(self.tested.itemgroups))
def test_invalid_transaction_type(self):
"""Can only be insert, update, upsert not context"""
def do():
FormData("MYFORM",transaction_type='context')
self.assertRaises(AttributeError, do )
def test_only_accepts_itemgroupdata(self):
"""Test that only ItemGroupData can be inserted"""
def do():
# Bzzzt. Should be ItemGroupData
self.tested << ItemData("Field1","ValueC")
self.assertRaises(ValueError,do)
def test_only_add_itemgroup_once(self):
"""Test that an ItemGroupData can only be added once"""
igd = ItemGroupData()
self.tested << igd
def do():
self.tested << igd
self.assertRaises(ValueError,do)
def test_builders_basic(self):
doc = obj_to_doc(self.tested)
self.assertEqual(doc.attrib["FormOID"], "TESTFORM_A")
self.assertEqual(len(doc), 3)
self.assertEqual(doc.tag, "FormData")
def test_transaction_type(self):
"""Test transaction type inserted if set"""
self.tested.transaction_type = 'Update'
doc = obj_to_doc(self.tested)
self.assertEqual(doc.attrib["TransactionType"], self.tested.transaction_type)
def test_invalid_transaction_type_direct_assign(self):
"""Test transaction type will not allow you to set to invalid choice"""
def do():
self.tested.transaction_type = 'invalid'
self.assertRaises(AttributeError,do)
def test_form_repeat_key(self):
"""Test transaction type inserted if set"""
tested = FormData("TESTFORM_A", form_repeat_key=9) (
ItemGroupData()(
ItemData("Field1", "ValueA"),
ItemData("Field2", "ValueB")
)
)
doc = obj_to_doc(tested)
self.assertEqual(doc.attrib["FormRepeatKey"],"9")
class TestStudyEventData(unittest.TestCase):
"""Test StudyEventData classes"""
def setUp(self):
self.tested = StudyEventData('VISIT_1') (
FormData("TESTFORM_A") (
ItemGroupData()(
ItemData("Field1", "ValueA"),
ItemData("Field2", "ValueB")
),
ItemGroupData(item_group_repeat_key=2)(
ItemData("Field3", "ValueC"),
),
)
)
def test_transaction_type(self):
"""Test transaction type inserted if set"""
self.tested.transaction_type = 'Update'
doc = obj_to_doc(self.tested)
self.assertEqual(doc.attrib["TransactionType"],self.tested.transaction_type)
def test_builders_basic(self):
doc = obj_to_doc(self.tested)
self.assertEqual(doc.attrib["StudyEventOID"],"VISIT_1")
self.assertIsNone(doc.attrib.get("StudyEventRepeatKey"))
self.assertEqual(len(doc),1)
self.assertEqual(doc.tag,"StudyEventData")
def test_only_add_formdata_once(self):
"""Test that an FormData object can only be added once"""
fd = FormData("FORM1")
self.tested << fd
def do():
self.tested << fd
self.assertRaises(ValueError,do)
def test_invalid_transaction_type_direct_assign(self):
"""Test transaction type will not allow you to set to invalid choice"""
def do():
self.tested.transaction_type = 'upsert'
self.assertRaises(AttributeError,do)
def test_invalid_transaction_type(self):
"""According to docs does not permit upserts"""
def do():
StudyEventData("V2",transaction_type='upsert')
self.assertRaises(AttributeError, do )
def test_only_accepts_formdata(self):
"""Test that only FormData can be inserted"""
def do():
# Bzzzt. Should be ItemGroupData
self.tested << ItemData("Field1", "ValueC")
self.assertRaises(ValueError,do)
class TestSubjectData(unittest.TestCase):
"""Test SubjectData classes"""
def setUp(self):
self.tested = SubjectData("SITE1","SUBJECT1")(
StudyEventData('VISIT_1')(
FormData("TESTFORM_A")(
ItemGroupData()(
ItemData("Field1", "ValueA"),
ItemData("Field2", "ValueB")
),
ItemGroupData(item_group_repeat_key=2)(
ItemData("Field3", "ValueC"),
),
)
)
)
def test_basic(self):
"""Test there are 3 children"""
self.assertEqual("SITE1", self.tested.sitelocationoid)
self.assertEqual("SUBJECT1", self.tested.subject_key)
# Default transaction type
self.assertEqual("Update", self.tested.transaction_type)
def test_invalid_transaction_type_direct_assign(self):
"""Test transaction type will not allow you to set to invalid choice"""
def do():
self.tested.transaction_type = 'UpDateSert'
self.assertRaises(AttributeError, do)
def test_children(self):
"""Test there is 1 child"""
self.assertEqual(1, len(self.tested.study_events))
def test_invalid_transaction_type(self):
"""According to docs does not permit upserts"""
def do():
SubjectData("SITEA", "SUB1", transaction_type='upsert')
self.assertRaises(AttributeError, do )
def test_builder(self):
"""XML produced"""
doc = obj_to_doc(self.tested)
# Test default transaction tyoe
self.assertEqual(doc.attrib["TransactionType"], "Update")
self.assertEqual(doc.tag, "SubjectData")
def test_only_add_studyeventdata_once(self):
"""Test that a StudyEventData object can only be added once"""
sed = StudyEventData("V1")
self.tested << sed
def do():
self.tested << sed
self.assertRaises(ValueError,do)
def test_does_not_accept_all_elements(self):
"""Test that,for example, ItemData cannot be accepted"""
def do():
self.tested << ItemData("Field1", "ValueC")
self.assertRaises(ValueError, do)
def test_accepts_auditrecord(self):
"""Test that AuditRecord can be inserted"""
ar = AuditRecord(used_imputation_method=False,
identifier='ABC1',
include_file_oid=False)(
UserRef('test_user'),
LocationRef('test_site'),
ReasonForChange("Testing"),
DateTimeStamp(datetime.now())
)
self.tested << ar
self.assertEqual(self.tested.audit_record, ar)
class TestClinicalData(unittest.TestCase):
"""Test ClinicalData classes"""
def setUp(self):
self.tested = ClinicalData("STUDY1", "DEV")(
SubjectData("SITE1","SUBJECT1")(
StudyEventData('VISIT_1')(
FormData("TESTFORM_A")(
ItemGroupData()(
ItemData("Field1", "ValueA"),
ItemData("Field2", "ValueB")
),
ItemGroupData(item_group_repeat_key=2)(
ItemData("Field3", "ValueC"),
),
)
)
)
)
def test_basic(self):
"""Test there are 3 children"""
self.assertEqual("STUDY1", self.tested.projectname)
self.assertEqual("DEV", self.tested.environment)
# Test default MetadataVersionOID
self.assertEqual("1", self.tested.metadata_version_oid)
def test_metadata_version_oid(self):
self.tested.metadata_version_oid = '2'
doc = obj_to_doc(self.tested)
self.assertEqual(doc.attrib["MetaDataVersionOID"],self.tested.metadata_version_oid)
def test_only_accepts_subjectdata(self):
"""Test that only SubjectData can be inserted"""
tested = ClinicalData("STUDY1", "DEV")
def do():
tested << object()
self.assertRaises(ValueError,do)
def test_only_accepts_one_subject(self):
"""Test that only one SubjectData can be inserted"""
def do():
self.tested << SubjectData("SITE2", "SUBJECT2")
self.assertRaises(ValueError,do)
def test_builder(self):
"""XML produced"""
doc = obj_to_doc(self.tested)
self.assertEqual(doc.tag,"ClinicalData")
class TestODM(unittest.TestCase):
"""Test ODM wrapper class"""
def setUp(self):
self.tested = ODM("MY TEST SYSTEM", description="My test message")(
ClinicalData("STUDY1","DEV")(
SubjectData("SITE1","SUBJECT1")(
StudyEventData('VISIT_1')(
FormData("TESTFORM_A")(
ItemGroupData()(
ItemData("Field1", "ValueA"),
ItemData("Field2", "ValueB")
),
ItemGroupData(item_group_repeat_key=2)(
ItemData("Field3", "ValueC"),
),
)
)
)
)
)
def test_basic(self):
"""Basic tests"""
# If no fileoid is given, a unique id is generated
self.assertEqual(True,self.tested.fileoid is not None)
self.assertEqual("My test message", self.tested.description)
def test_assign_fileoid(self):
"""Test if you assign a fileoid it keeps it"""
tested = ODM("MY TEST SYSTEM", fileoid="F1")
self.assertEqual("F1", tested.fileoid)
def test_only_accepts_valid_children(self):
"""Test that only ClinicalData or Study can be inserted"""
def do():
self.tested << ItemData("Field1", "ValueC")
self.assertRaises(ValueError,do)
def test_accepts_clinicaldata_and_study(self):
"""Test that accepts clinicaldata"""
tested = ODM("MY TEST SYSTEM", fileoid="F1")
cd = ClinicalData("Project1","DEV")
study = Study("PROJ1",project_type=Study.PROJECT)
tested << cd
tested << study
self.assertEqual(study,tested.study)
self.assertEqual(cd, tested.clinical_data)
def test_getroot(self):
"""XML produced"""
doc = self.tested.getroot()
self.assertEqual(doc.tag,"ODM")
self.assertEqual(doc.attrib["Originator"], "MY TEST SYSTEM")
self.assertEqual(doc.attrib["Description"], self.tested.description)
self.assertEqual("ClinicalData", doc.getchildren()[0].tag)
def test_getroot_study(self):
"""XML produced with a study child"""
tested = ODM("MY TEST SYSTEM", fileoid="F1")
study = Study("PROJ1",project_type=Study.PROJECT)
tested << study
doc = tested.getroot()
self.assertEqual(doc.tag,"ODM")
self.assertEqual("Study", doc.getchildren()[0].tag)
def test_str_well_formed(self):
"""Make an XML string from the object, parse it to ensure it's well formed"""
doc = ET.fromstring(str(self.tested))
NS_ODM = '{http://www.cdisc.org/ns/odm/v1.3}'
self.assertEqual(doc.tag,NS_ODM + "ODM")
self.assertEqual(doc.attrib["Originator"], "MY TEST SYSTEM")
self.assertEqual(doc.attrib["Description"], self.tested.description)
if __name__ == '__main__':
unittest.main()
|
Oli76/rwslib
|
rwslib/tests/test_builders.py
|
Python
|
mit
| 25,505
|
class Solution(object):
def simplifyPath(self, path):
"""
:type path: str
:rtype: str
"""
parts = path.strip().strip("/").split("/")
stack = []
for p in parts:
if p == "." or not p: continue
elif p == "..": stack.pop(len(stack)-1) if stack else None
else: stack.append(p)
return "/"+"/".join(stack)
|
scaugrated/leetcode
|
algorithm/Simplify_Path.py
|
Python
|
mit
| 403
|
import numbers # noqa: E402
try:
basestring # basestring was removed in Python 3
except NameError:
basestring = str
def test_trade(exchange, trade, symbol, now):
assert trade
sampleTrade = {
'info': {'a': 1, 'b': 2, 'c': 3}, # the original decoded JSON as is
'id': '12345-67890:09876/54321', # string trade id
'timestamp': 1502962946216, # Unix timestamp in milliseconds
'datetime': '2017-08-17 12:42:48.000', # ISO8601 datetime with milliseconds
'symbol': 'ETH/BTC', # symbol
'order': '12345-67890:09876/54321', # string order id or None/None/null
'type': 'limit', # order type, 'market', 'limit' or None/None/null
'side': 'buy', # direction of the trade, 'buy' or 'sell'
'takerOrMaker': 'taker', # string, 'taker' or 'maker'
'price': 0.06917684, # float price in quote currency
'amount': 1.5, # amount of base currency
'cost': 0.10376526, # total cost(including fees), `price * amount`
}
keys = list(sampleTrade.keys())
for i in range(0, len(keys)):
key = keys[i]
assert key in trade
fee = trade['fee'] if ('fee' in trade) else None
fees = trade['fees'] if ('fees' in trade) else None
# logical XOR
# doesn't work when both fee is defined and fees is defined
# if fee or fees:
# assert not (fee and fees)
# }
if fee:
assert('cost' in fee) and ('currency' in fee)
if fees:
assert isinstance(fees, list)
for i in range(0, len(fees)):
fee = fees[i]
assert('cost' in fee) and ('currency' in fee)
id = trade['id']
assert(id is None) or (isinstance(id, basestring))
timestamp = trade['timestamp']
assert isinstance(timestamp, numbers.Real) or timestamp is None
if timestamp:
assert timestamp > 1230940800000 # 03 Jan 2009 - first block
assert timestamp < 2147483648000 # 19 Jan 2038 - int32 overflows
adjustedNow = now + 60000
assert timestamp < adjustedNow, 'trade.timestamp is greater than or equal to current time: trade: ' + exchange.iso8601(timestamp) + ' now: ' + exchange.iso8601(now)
assert trade['datetime'] == exchange.iso8601(timestamp)
assert trade['symbol'] == symbol, 'trade symbol is not equal to requested symbol: trade: ' + trade['symbol'] + ' requested: ' + symbol
assert trade['type'] is None or isinstance(trade['type'], basestring)
assert trade['side'] is None or trade['side'] == 'buy' or trade['side'] == 'sell', 'unexpected trade side ' + trade['side']
assert trade['order'] is None or isinstance(trade['order'], basestring)
assert isinstance(trade['price'], numbers.Real), 'trade.price is not a number'
assert trade['price'] > 0
assert isinstance(trade['amount'], numbers.Real), 'trade.amount is not a number'
assert trade['amount'] >= 0
assert trade['cost'] is None or isinstance(trade['cost'], numbers.Real), 'trade.cost is not a number'
assert trade['cost'] is None or trade['cost'] >= 0
takerOrMaker = trade['takerOrMaker']
assert takerOrMaker is None or takerOrMaker == 'taker' or takerOrMaker == 'maker'
|
ccxt/ccxt
|
python/ccxt/test/test_trade.py
|
Python
|
mit
| 3,336
|
from queue import Queue
from threading import Event, Thread
from usb import USBError, ENDPOINT_OUT, ENDPOINT_IN
from usb.control import get_interface
from usb.core import find
from usb.util import find_descriptor, endpoint_direction, claim_interface, dispose_resources
from libAnt.drivers.driver import Driver, DriverException
from libAnt.loggers.logger import Logger
class USBDriver(Driver):
"""
An implementation of a USB ANT+ device driver
"""
def __init__(self, vid, pid, logger: Logger = None):
super().__init__(logger=logger)
self._idVendor = vid
self._idProduct = pid
self._dev = None
self._epOut = None
self._epIn = None
self._interfaceNumber = None
self._packetSize = 0x20
self._queue = None
self._loop = None
self._driver_open = False
def __str__(self):
if self.isOpen():
return str(self._dev)
return "Closed"
class USBLoop(Thread):
def __init__(self, ep, packetSize: int, queue: Queue):
super().__init__()
self._stopper = Event()
self._ep = ep
self._packetSize = packetSize
self._queue = queue
def stop(self) -> None:
self._stopper.set()
def run(self) -> None:
while not self._stopper.is_set():
try:
data = self._ep.read(self._packetSize, timeout=1000)
for d in data:
self._queue.put(d)
except USBError as e:
if e.errno not in (60, 110) and e.backend_error_code != -116: # Timout errors
self._stopper.set()
# We Put in an invalid byte so threads will realize the device is stopped
self._queue.put(None)
def _isOpen(self) -> bool:
return self._driver_open
def _open(self) -> None:
print('USB OPEN START')
try:
# find the first USB device that matches the filter
self._dev = find(idVendor=self._idVendor, idProduct=self._idProduct)
if self._dev is None:
raise DriverException("Could not open specified device")
# Detach kernel driver
try:
if self._dev.is_kernel_driver_active(0):
try:
self._dev.detach_kernel_driver(0)
except USBError as e:
raise DriverException("Could not detach kernel driver")
except NotImplementedError:
pass # for non unix systems
# set the active configuration. With no arguments, the first
# configuration will be the active one
self._dev.set_configuration()
# get an endpoint instance
cfg = self._dev.get_active_configuration()
self._interfaceNumber = cfg[(0, 0)].bInterfaceNumber
interface = find_descriptor(cfg, bInterfaceNumber=self._interfaceNumber,
bAlternateSetting=get_interface(self._dev,
self._interfaceNumber))
claim_interface(self._dev, self._interfaceNumber)
self._epOut = find_descriptor(interface, custom_match=lambda e: endpoint_direction(
e.bEndpointAddress) == ENDPOINT_OUT)
self._epIn = find_descriptor(interface, custom_match=lambda e: endpoint_direction(
e.bEndpointAddress) == ENDPOINT_IN)
if self._epOut is None or self._epIn is None:
raise DriverException("Could not initialize USB endpoint")
self._queue = Queue()
self._loop = self.USBLoop(self._epIn, self._packetSize, self._queue)
self._loop.start()
self._driver_open = True
print('USB OPEN SUCCESS')
except IOError as e:
self._close()
raise DriverException(str(e))
def _close(self) -> None:
print('USB CLOSE START')
if self._loop is not None:
if self._loop.is_alive():
self._loop.stop()
self._loop.join()
self._loop = None
try:
self._dev.reset()
dispose_resources(self._dev)
except:
pass
self._dev = self._epOut = self._epIn = None
self._driver_open = False
print('USB CLOSE END')
def _read(self, count: int, timeout=None) -> bytes:
data = bytearray()
for i in range(0, count):
b = self._queue.get(timeout=timeout)
if b is None:
self._close()
raise DriverException("Device is closed!")
data.append(b)
return bytes(data)
def _write(self, data: bytes) -> None:
return self._epOut.write(data)
def _abort(self) -> None:
pass # not implemented for USB driver, use timeouts instead
|
half2me/libant
|
libAnt/drivers/usb.py
|
Python
|
mit
| 5,017
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.38
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
# This file is compatible with both classic and new-style classes.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
try:
fp, pathname, description = imp.find_module('_pdq', [dirname(__file__)])
_mod = imp.load_module('_pdq', fp, pathname, description)
finally:
if fp is not None: fp.close()
return _mod
_pdq = swig_import_helper()
del swig_import_helper
else:
import _pdq
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
TRUE = _pdq.TRUE
FALSE = _pdq.FALSE
MAXNODES = _pdq.MAXNODES
MAXBUF = _pdq.MAXBUF
MAXSTREAMS = _pdq.MAXSTREAMS
MAXCHARS = _pdq.MAXCHARS
VOID = _pdq.VOID
OPEN = _pdq.OPEN
CLOSED = _pdq.CLOSED
MEM = _pdq.MEM
CEN = _pdq.CEN
DLY = _pdq.DLY
MSQ = _pdq.MSQ
ISRV = _pdq.ISRV
FCFS = _pdq.FCFS
PSHR = _pdq.PSHR
LCFS = _pdq.LCFS
TERM = _pdq.TERM
TRANS = _pdq.TRANS
BATCH = _pdq.BATCH
EXACT = _pdq.EXACT
APPROX = _pdq.APPROX
CANON = _pdq.CANON
VISITS = _pdq.VISITS
DEMAND = _pdq.DEMAND
PDQ_SP = _pdq.PDQ_SP
PDQ_MP = _pdq.PDQ_MP
TOL = _pdq.TOL
class SYSTAT_TYPE(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SYSTAT_TYPE, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SYSTAT_TYPE, name)
__repr__ = _swig_repr
__swig_setmethods__["response"] = _pdq.SYSTAT_TYPE_response_set
__swig_getmethods__["response"] = _pdq.SYSTAT_TYPE_response_get
if _newclass:response = _swig_property(_pdq.SYSTAT_TYPE_response_get, _pdq.SYSTAT_TYPE_response_set)
__swig_setmethods__["thruput"] = _pdq.SYSTAT_TYPE_thruput_set
__swig_getmethods__["thruput"] = _pdq.SYSTAT_TYPE_thruput_get
if _newclass:thruput = _swig_property(_pdq.SYSTAT_TYPE_thruput_get, _pdq.SYSTAT_TYPE_thruput_set)
__swig_setmethods__["residency"] = _pdq.SYSTAT_TYPE_residency_set
__swig_getmethods__["residency"] = _pdq.SYSTAT_TYPE_residency_get
if _newclass:residency = _swig_property(_pdq.SYSTAT_TYPE_residency_get, _pdq.SYSTAT_TYPE_residency_set)
__swig_setmethods__["physmem"] = _pdq.SYSTAT_TYPE_physmem_set
__swig_getmethods__["physmem"] = _pdq.SYSTAT_TYPE_physmem_get
if _newclass:physmem = _swig_property(_pdq.SYSTAT_TYPE_physmem_get, _pdq.SYSTAT_TYPE_physmem_set)
__swig_setmethods__["highwater"] = _pdq.SYSTAT_TYPE_highwater_set
__swig_getmethods__["highwater"] = _pdq.SYSTAT_TYPE_highwater_get
if _newclass:highwater = _swig_property(_pdq.SYSTAT_TYPE_highwater_get, _pdq.SYSTAT_TYPE_highwater_set)
__swig_setmethods__["malloc"] = _pdq.SYSTAT_TYPE_malloc_set
__swig_getmethods__["malloc"] = _pdq.SYSTAT_TYPE_malloc_get
if _newclass:malloc = _swig_property(_pdq.SYSTAT_TYPE_malloc_get, _pdq.SYSTAT_TYPE_malloc_set)
__swig_setmethods__["mpl"] = _pdq.SYSTAT_TYPE_mpl_set
__swig_getmethods__["mpl"] = _pdq.SYSTAT_TYPE_mpl_get
if _newclass:mpl = _swig_property(_pdq.SYSTAT_TYPE_mpl_get, _pdq.SYSTAT_TYPE_mpl_set)
__swig_setmethods__["maxN"] = _pdq.SYSTAT_TYPE_maxN_set
__swig_getmethods__["maxN"] = _pdq.SYSTAT_TYPE_maxN_get
if _newclass:maxN = _swig_property(_pdq.SYSTAT_TYPE_maxN_get, _pdq.SYSTAT_TYPE_maxN_set)
__swig_setmethods__["maxTP"] = _pdq.SYSTAT_TYPE_maxTP_set
__swig_getmethods__["maxTP"] = _pdq.SYSTAT_TYPE_maxTP_get
if _newclass:maxTP = _swig_property(_pdq.SYSTAT_TYPE_maxTP_get, _pdq.SYSTAT_TYPE_maxTP_set)
__swig_setmethods__["minRT"] = _pdq.SYSTAT_TYPE_minRT_set
__swig_getmethods__["minRT"] = _pdq.SYSTAT_TYPE_minRT_get
if _newclass:minRT = _swig_property(_pdq.SYSTAT_TYPE_minRT_get, _pdq.SYSTAT_TYPE_minRT_set)
def __init__(self):
this = _pdq.new_SYSTAT_TYPE()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pdq.delete_SYSTAT_TYPE
__del__ = lambda self : None;
SYSTAT_TYPE_swigregister = _pdq.SYSTAT_TYPE_swigregister
SYSTAT_TYPE_swigregister(SYSTAT_TYPE)
cvar = _pdq.cvar
class TERMINAL_TYPE(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TERMINAL_TYPE, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TERMINAL_TYPE, name)
__repr__ = _swig_repr
__swig_setmethods__["name"] = _pdq.TERMINAL_TYPE_name_set
__swig_getmethods__["name"] = _pdq.TERMINAL_TYPE_name_get
if _newclass:name = _swig_property(_pdq.TERMINAL_TYPE_name_get, _pdq.TERMINAL_TYPE_name_set)
__swig_setmethods__["pop"] = _pdq.TERMINAL_TYPE_pop_set
__swig_getmethods__["pop"] = _pdq.TERMINAL_TYPE_pop_get
if _newclass:pop = _swig_property(_pdq.TERMINAL_TYPE_pop_get, _pdq.TERMINAL_TYPE_pop_set)
__swig_setmethods__["think"] = _pdq.TERMINAL_TYPE_think_set
__swig_getmethods__["think"] = _pdq.TERMINAL_TYPE_think_get
if _newclass:think = _swig_property(_pdq.TERMINAL_TYPE_think_get, _pdq.TERMINAL_TYPE_think_set)
__swig_setmethods__["sys"] = _pdq.TERMINAL_TYPE_sys_set
__swig_getmethods__["sys"] = _pdq.TERMINAL_TYPE_sys_get
if _newclass:sys = _swig_property(_pdq.TERMINAL_TYPE_sys_get, _pdq.TERMINAL_TYPE_sys_set)
def __init__(self):
this = _pdq.new_TERMINAL_TYPE()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pdq.delete_TERMINAL_TYPE
__del__ = lambda self : None;
TERMINAL_TYPE_swigregister = _pdq.TERMINAL_TYPE_swigregister
TERMINAL_TYPE_swigregister(TERMINAL_TYPE)
class BATCH_TYPE(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, BATCH_TYPE, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, BATCH_TYPE, name)
__repr__ = _swig_repr
__swig_setmethods__["name"] = _pdq.BATCH_TYPE_name_set
__swig_getmethods__["name"] = _pdq.BATCH_TYPE_name_get
if _newclass:name = _swig_property(_pdq.BATCH_TYPE_name_get, _pdq.BATCH_TYPE_name_set)
__swig_setmethods__["pop"] = _pdq.BATCH_TYPE_pop_set
__swig_getmethods__["pop"] = _pdq.BATCH_TYPE_pop_get
if _newclass:pop = _swig_property(_pdq.BATCH_TYPE_pop_get, _pdq.BATCH_TYPE_pop_set)
__swig_setmethods__["sys"] = _pdq.BATCH_TYPE_sys_set
__swig_getmethods__["sys"] = _pdq.BATCH_TYPE_sys_get
if _newclass:sys = _swig_property(_pdq.BATCH_TYPE_sys_get, _pdq.BATCH_TYPE_sys_set)
def __init__(self):
this = _pdq.new_BATCH_TYPE()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pdq.delete_BATCH_TYPE
__del__ = lambda self : None;
BATCH_TYPE_swigregister = _pdq.BATCH_TYPE_swigregister
BATCH_TYPE_swigregister(BATCH_TYPE)
class TRANSACTION_TYPE(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TRANSACTION_TYPE, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TRANSACTION_TYPE, name)
__repr__ = _swig_repr
__swig_setmethods__["name"] = _pdq.TRANSACTION_TYPE_name_set
__swig_getmethods__["name"] = _pdq.TRANSACTION_TYPE_name_get
if _newclass:name = _swig_property(_pdq.TRANSACTION_TYPE_name_get, _pdq.TRANSACTION_TYPE_name_set)
__swig_setmethods__["arrival_rate"] = _pdq.TRANSACTION_TYPE_arrival_rate_set
__swig_getmethods__["arrival_rate"] = _pdq.TRANSACTION_TYPE_arrival_rate_get
if _newclass:arrival_rate = _swig_property(_pdq.TRANSACTION_TYPE_arrival_rate_get, _pdq.TRANSACTION_TYPE_arrival_rate_set)
__swig_setmethods__["saturation_rate"] = _pdq.TRANSACTION_TYPE_saturation_rate_set
__swig_getmethods__["saturation_rate"] = _pdq.TRANSACTION_TYPE_saturation_rate_get
if _newclass:saturation_rate = _swig_property(_pdq.TRANSACTION_TYPE_saturation_rate_get, _pdq.TRANSACTION_TYPE_saturation_rate_set)
__swig_setmethods__["sys"] = _pdq.TRANSACTION_TYPE_sys_set
__swig_getmethods__["sys"] = _pdq.TRANSACTION_TYPE_sys_get
if _newclass:sys = _swig_property(_pdq.TRANSACTION_TYPE_sys_get, _pdq.TRANSACTION_TYPE_sys_set)
def __init__(self):
this = _pdq.new_TRANSACTION_TYPE()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pdq.delete_TRANSACTION_TYPE
__del__ = lambda self : None;
TRANSACTION_TYPE_swigregister = _pdq.TRANSACTION_TYPE_swigregister
TRANSACTION_TYPE_swigregister(TRANSACTION_TYPE)
class JOB_TYPE(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, JOB_TYPE, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, JOB_TYPE, name)
__repr__ = _swig_repr
__swig_setmethods__["should_be_class"] = _pdq.JOB_TYPE_should_be_class_set
__swig_getmethods__["should_be_class"] = _pdq.JOB_TYPE_should_be_class_get
if _newclass:should_be_class = _swig_property(_pdq.JOB_TYPE_should_be_class_get, _pdq.JOB_TYPE_should_be_class_set)
__swig_setmethods__["network"] = _pdq.JOB_TYPE_network_set
__swig_getmethods__["network"] = _pdq.JOB_TYPE_network_get
if _newclass:network = _swig_property(_pdq.JOB_TYPE_network_get, _pdq.JOB_TYPE_network_set)
__swig_setmethods__["term"] = _pdq.JOB_TYPE_term_set
__swig_getmethods__["term"] = _pdq.JOB_TYPE_term_get
if _newclass:term = _swig_property(_pdq.JOB_TYPE_term_get, _pdq.JOB_TYPE_term_set)
__swig_setmethods__["batch"] = _pdq.JOB_TYPE_batch_set
__swig_getmethods__["batch"] = _pdq.JOB_TYPE_batch_get
if _newclass:batch = _swig_property(_pdq.JOB_TYPE_batch_get, _pdq.JOB_TYPE_batch_set)
__swig_setmethods__["trans"] = _pdq.JOB_TYPE_trans_set
__swig_getmethods__["trans"] = _pdq.JOB_TYPE_trans_get
if _newclass:trans = _swig_property(_pdq.JOB_TYPE_trans_get, _pdq.JOB_TYPE_trans_set)
def __init__(self):
this = _pdq.new_JOB_TYPE()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pdq.delete_JOB_TYPE
__del__ = lambda self : None;
JOB_TYPE_swigregister = _pdq.JOB_TYPE_swigregister
JOB_TYPE_swigregister(JOB_TYPE)
class NODE_TYPE(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, NODE_TYPE, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, NODE_TYPE, name)
__repr__ = _swig_repr
__swig_setmethods__["devtype"] = _pdq.NODE_TYPE_devtype_set
__swig_getmethods__["devtype"] = _pdq.NODE_TYPE_devtype_get
if _newclass:devtype = _swig_property(_pdq.NODE_TYPE_devtype_get, _pdq.NODE_TYPE_devtype_set)
__swig_setmethods__["sched"] = _pdq.NODE_TYPE_sched_set
__swig_getmethods__["sched"] = _pdq.NODE_TYPE_sched_get
if _newclass:sched = _swig_property(_pdq.NODE_TYPE_sched_get, _pdq.NODE_TYPE_sched_set)
__swig_setmethods__["devname"] = _pdq.NODE_TYPE_devname_set
__swig_getmethods__["devname"] = _pdq.NODE_TYPE_devname_get
if _newclass:devname = _swig_property(_pdq.NODE_TYPE_devname_get, _pdq.NODE_TYPE_devname_set)
__swig_setmethods__["visits"] = _pdq.NODE_TYPE_visits_set
__swig_getmethods__["visits"] = _pdq.NODE_TYPE_visits_get
if _newclass:visits = _swig_property(_pdq.NODE_TYPE_visits_get, _pdq.NODE_TYPE_visits_set)
__swig_setmethods__["service"] = _pdq.NODE_TYPE_service_set
__swig_getmethods__["service"] = _pdq.NODE_TYPE_service_get
if _newclass:service = _swig_property(_pdq.NODE_TYPE_service_get, _pdq.NODE_TYPE_service_set)
__swig_setmethods__["demand"] = _pdq.NODE_TYPE_demand_set
__swig_getmethods__["demand"] = _pdq.NODE_TYPE_demand_get
if _newclass:demand = _swig_property(_pdq.NODE_TYPE_demand_get, _pdq.NODE_TYPE_demand_set)
__swig_setmethods__["resit"] = _pdq.NODE_TYPE_resit_set
__swig_getmethods__["resit"] = _pdq.NODE_TYPE_resit_get
if _newclass:resit = _swig_property(_pdq.NODE_TYPE_resit_get, _pdq.NODE_TYPE_resit_set)
__swig_setmethods__["utiliz"] = _pdq.NODE_TYPE_utiliz_set
__swig_getmethods__["utiliz"] = _pdq.NODE_TYPE_utiliz_get
if _newclass:utiliz = _swig_property(_pdq.NODE_TYPE_utiliz_get, _pdq.NODE_TYPE_utiliz_set)
__swig_setmethods__["qsize"] = _pdq.NODE_TYPE_qsize_set
__swig_getmethods__["qsize"] = _pdq.NODE_TYPE_qsize_get
if _newclass:qsize = _swig_property(_pdq.NODE_TYPE_qsize_get, _pdq.NODE_TYPE_qsize_set)
__swig_setmethods__["avqsize"] = _pdq.NODE_TYPE_avqsize_set
__swig_getmethods__["avqsize"] = _pdq.NODE_TYPE_avqsize_get
if _newclass:avqsize = _swig_property(_pdq.NODE_TYPE_avqsize_get, _pdq.NODE_TYPE_avqsize_set)
def __init__(self):
this = _pdq.new_NODE_TYPE()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _pdq.delete_NODE_TYPE
__del__ = lambda self : None;
NODE_TYPE_swigregister = _pdq.NODE_TYPE_swigregister
NODE_TYPE_swigregister(NODE_TYPE)
def CreateClosed(*args):
return _pdq.CreateClosed(*args)
CreateClosed = _pdq.CreateClosed
def CreateClosed_p(*args):
return _pdq.CreateClosed_p(*args)
CreateClosed_p = _pdq.CreateClosed_p
def CreateOpen(*args):
return _pdq.CreateOpen(*args)
CreateOpen = _pdq.CreateOpen
def CreateOpen_p(*args):
return _pdq.CreateOpen_p(*args)
CreateOpen_p = _pdq.CreateOpen_p
def CreateNode(*args):
return _pdq.CreateNode(*args)
CreateNode = _pdq.CreateNode
def CreateMultiNode(*args):
return _pdq.CreateMultiNode(*args)
CreateMultiNode = _pdq.CreateMultiNode
def GetStreamsCount():
return _pdq.GetStreamsCount()
GetStreamsCount = _pdq.GetStreamsCount
def GetNodesCount():
return _pdq.GetNodesCount()
GetNodesCount = _pdq.GetNodesCount
def GetResponse(*args):
return _pdq.GetResponse(*args)
GetResponse = _pdq.GetResponse
def GetResidenceTime(*args):
return _pdq.GetResidenceTime(*args)
GetResidenceTime = _pdq.GetResidenceTime
def GetThruput(*args):
return _pdq.GetThruput(*args)
GetThruput = _pdq.GetThruput
def GetLoadOpt(*args):
return _pdq.GetLoadOpt(*args)
GetLoadOpt = _pdq.GetLoadOpt
def GetUtilization(*args):
return _pdq.GetUtilization(*args)
GetUtilization = _pdq.GetUtilization
def GetQueueLength(*args):
return _pdq.GetQueueLength(*args)
GetQueueLength = _pdq.GetQueueLength
def PDQ_GetThruMax(*args):
return _pdq.PDQ_GetThruMax(*args)
PDQ_GetThruMax = _pdq.PDQ_GetThruMax
def Init(*args):
return _pdq.Init(*args)
Init = _pdq.Init
def Report():
return _pdq.Report()
Report = _pdq.Report
def SetDebug(*args):
return _pdq.SetDebug(*args)
SetDebug = _pdq.SetDebug
def SetDemand(*args):
return _pdq.SetDemand(*args)
SetDemand = _pdq.SetDemand
def SetDemand_p(*args):
return _pdq.SetDemand_p(*args)
SetDemand_p = _pdq.SetDemand_p
def SetVisits(*args):
return _pdq.SetVisits(*args)
SetVisits = _pdq.SetVisits
def SetVisits_p(*args):
return _pdq.SetVisits_p(*args)
SetVisits_p = _pdq.SetVisits_p
def Solve(*args):
return _pdq.Solve(*args)
Solve = _pdq.Solve
def SetWUnit(*args):
return _pdq.SetWUnit(*args)
SetWUnit = _pdq.SetWUnit
def SetTUnit(*args):
return _pdq.SetTUnit(*args)
SetTUnit = _pdq.SetTUnit
def SetComment(*args):
return _pdq.SetComment(*args)
SetComment = _pdq.SetComment
def GetComment():
return _pdq.GetComment()
GetComment = _pdq.GetComment
def PrintNodes():
return _pdq.PrintNodes()
PrintNodes = _pdq.PrintNodes
def GetNode(*args):
return _pdq.GetNode(*args)
GetNode = _pdq.GetNode
def getjob(*args):
return _pdq.getjob(*args)
getjob = _pdq.getjob
|
peterlharding/PDQ
|
python/pdq.py
|
Python
|
mit
| 16,835
|
"""
###############################################################################
Controller: Overall controller class
###############################################################################
"""
import pickle as _pickle
import copy as _copy
import time
import random
import string
import OpenPNM
from OpenPNM.Base import logging
logger = logging.getLogger()
class Controller(dict):
# The following __instance__ class variable and subclassed __new__ method
# makes the Controller class a 'Singleton'. This way, any instantiation
# of a controller object anywhere in the code will return the same object.
__instance__ = None
def __new__(cls, *args, **kwargs):
if Controller.__instance__ is None:
Controller.__instance__ = dict.__new__(cls)
return Controller.__instance__
def __init__(self):
self.comments = 'Using OpenPNM ' + OpenPNM.__version__
def __str__(self):
lines = []
horizontal_rule = 60 * '-'
for net in self.networks():
lines.append(horizontal_rule)
lines.append('{0:<15} {1:<20} ({2})'.format('Object:',
'Name',
'Class'))
lines.append(horizontal_rule)
lines.append('{0:<15} {1:<20} ({2})'.format('Network:',
net.name,
net.__class__.__name__))
for geom in net._geometries:
str = '++ {0:<12} {1:<20} ({2})'
if geom in self.values():
lines.append(str.format('Geometry: ',
geom.name,
geom.__class__.__name__))
else:
lines.append(str.format('ERROR: ',
geom.name,
'Object Not in Controller'))
for phase in net._phases:
if len(phase._phases) == 0:
str = '+ {0:<13} {1:<20} ({2})'
lines.append(str.format('Pure Phase: ',
phase.name,
phase.__class__.__name__))
if len(phase._phases) > 1:
str = '+ {0:<13} {1:<20} ({2})'
lines.append(str.format('Mixture Phase: ',
phase.name,
phase.__class__.__name__))
comps = phase.phases()
for compname in comps:
str = '++ {0:<12} {1:<20} ({2})'
lines.append(str.format('Component Phase: ',
compname,
phase.__class__.__name__))
for phys in phase._physics:
str = '++ {0:<12} {1:<20} ({2})'
if phys in self.values():
lines.append(str.format('Physics: ',
phys.name,
phys.__class__.__name__))
else:
lines.append(str.format('ERROR: ',
phys.name,
'Object Not in Controller'))
return '\n'.join(lines)
def _setloglevel(self, level):
logger.setLevel(level)
def _getloglevel(self):
return 'Log level is currently set to: ' + str(logger.level)
loglevel = property(fget=_getloglevel, fset=_setloglevel)
def networks(self):
r"""
Returns a list of all Network objects
"""
return self._get_objects(obj_type='GenericNetwork')
def geometries(self):
r"""
Returns a list of all Geometry objects
"""
return self._get_objects(obj_type='GenericGeometry')
def phases(self):
r"""
Returns a list of all Phase objects
"""
return self._get_objects(obj_type='GenericPhase')
def physics(self):
r"""
Returns a list of all Physics objects
"""
return self._get_objects(obj_type='GenericPhysics')
def algorithms(self):
r"""
Returns a list of all Algorithm objects
"""
return self._get_objects(obj_type='GenericAlgorithm')
def _get_objects(self, obj_type):
temp = []
for obj in list(self.keys()):
mro = [item.__name__ for item in self[obj].__class__.__mro__]
if obj_type in mro:
temp.append(self[obj])
return temp
def purge_object(self, obj, mode='single'):
r"""
Remove an object, including all traces of it in its associated objects
Parameters
----------
obj : OpenPNM Object
The object to be removed. This method removes all traces of the
object from everywhere, including all the object tracking lists and
label dictionaries of every object.
mode : string
Dicates the type of purge to be performed. Options are:
- 'single': Only purges the specified object
- 'complete': Purges the specified object AND all of its associated
objects
Notes
-----
To only remove an object from the Contoller object use the dictionary's
native ``pop`` method.
Examples
--------
>>> import OpenPNM
>>> ctrl = OpenPNM.Base.Controller()
>>> pn = OpenPNM.Network.TestNet()
>>> geom = OpenPNM.Geometry.GenericGeometry(network=pn,
... pores=pn.Ps,
... throats=pn.Ts)
# Label entries are added to the Network where geom is defined
>>> 'pore.'+geom.name in pn.keys()
True
>>> ctrl.purge_object(geom)
# geom is removed from Controller object
>>> geom.name in ctrl.keys()
False
# geom's labels are removed from the Network too
>>> 'pore.' + geom.name in pn.keys()
False
"""
if mode == 'complete':
if obj._net is None:
net = obj
else:
net = obj._net
for item in net.geometries() + net.phases() + net.physics():
blank = self.pop(item, None)
del self[net.name]
elif mode == 'single':
name = obj.name
for item in list(self.keys()):
# Remove label arrays from all other objects
self[item].pop('pore.' + name, None)
self[item].pop('throat.' + name, None)
# Remove associations on other objects
self[item]._geometries[:] = \
[x for x in self[item]._geometries if x is not obj]
self[item]._phases[:] = \
[x for x in self[item]._phases if x is not obj]
self[item]._physics[:] = \
[x for x in self[item]._physics if x is not obj]
# Remove object from Controller dict
del self[name]
def ghost_object(self, obj):
r"""
Create a ghost OpenPNM Object containing all the data, methods and
associations of the original object, but without registering the ghost
anywhere. This ghost is intended as a disposable object, for
instance, to receive data without overwriting existing data.
Parameters
----------
obj : OpenPNM Object
The object to be cloned can be any OpenPNM Object
Returns
-------
A clone of the specified object is returned, but it retains all its links
to the objects associated with the original object. The cloned object is
not associated with the Network.
Examples
--------
>>> import OpenPNM
>>> ctrl = OpenPNM.Base.Controller()
>>> pn = OpenPNM.Network.TestNet()
>>> pn2 = ctrl.ghost_object(pn)
>>> pn is pn2 # A copy of pn is created
False
>>> pn2.keys() == pn.keys() # They have otherwise identical data
True
>>> pn2 in ctrl.values() # pn2 is not associated with existing Controller
False
It can also be used to create ghosts of other object types:
>>> geom = OpenPNM.Geometry.TestGeometry(network=pn,
... pores=pn.Ps,
... throats=pn.Ts)
>>> geo2 = ctrl.ghost_object(geom)
>>> geom is geo2
False
# Ghost has same name as ancestor
>>> geom.name == geo2.name
True
# But they are not the same object
>>> geo2 is ctrl[geo2.name]
False
# The ghost is not registered with the Controller
>>> geo2 in ctrl.values()
False
# The following comparisons look at some 'behind the scenes' information
# The ghost and ancestor are assoicated with the same Network
>>> geo2._net == geom._net
True
# But the Network remains aware of the ancestor only
>>> geo2 in pn._geometries
False
"""
obj_new = _copy.copy(obj)
obj_new.__dict__ = _copy.copy(obj.__dict__)
self.update({obj.name: obj})
return obj_new
def save_simulation(self, network, filename=''):
r"""
Save a single Network simulation to a 'net' file, including all of its
associated objects, but not Algorithms
Parameters
----------
network : OpenPNM Network object
The Network to save
filename : string, optional
If no filename is given the name of the Network is used
"""
if filename == '':
filename = network.name
else:
filename = filename.rstrip('.net')
# Save nested dictionary pickle
_pickle.dump(network, open(filename + '.net', 'wb'))
def load_simulation(self, filename):
r"""
Loads a Network simulation fromt the specified 'net' file and adds it
to the Controller
Parameters
----------
filename : string
The name of the file containing the Network simulation to load
"""
filename = filename.rstrip('.net')
net = _pickle.load(open(filename + '.net', 'rb'))
self[net.name] = net
def save(self, filename=''):
r"""
Save the entire state of the Controller to a 'pnm' file.
Parameters
----------
filename : string, optional
The file name to save as. If no filename is provided the current
date and time is used.
Examples
--------
.. code-block:: python
import OpenPNM
ctrl = OpenPNM.Base.Controller()
ctrl.clear() # Ensure no previous objects are present
pn = OpenPNM.Network.TestNet()
ctrl.save('test.pnm')
pn.name in ctrl.keys()
#=> True
ctrl.clear()
ctrl.keys()
dict_keys([])
ctrl.load('test.pnm')
pn.name in ctrl.keys()
#=> True
"""
if filename == '':
from datetime import datetime
i = datetime.now()
filename = i.strftime('%Y-%m-%d_%H-%M-%S')
else:
filename = filename.rstrip('.pnm')
# Save nested dictionary pickle
_pickle.dump(self, open(filename + '.pnm', 'wb'))
def load(self, filename):
r"""
Load an entire Controller from a 'pnm' file.
Parameters
----------
filename : string
The file name of the Controller to load.
Notes
-----
This calls the ``clear`` method of the Controller object, so it will
remove all existing objects in the current workspace.
"""
filename = filename.strip('.pnm')
if self != {}:
print('Warning: Loading data onto non-empty controller object' +
'existing data will be lost')
self.clear()
self = _pickle.load(open(filename+'.pnm', 'rb'))
def export(self, network=None, filename='', fileformat='VTK'):
r"""
Export data to the specified file format.
Parameters
----------
network : OpenPNM Network Object
This Network and all of its phases will be written to the specified
file. If no Netowrk is given it will check to ensure that only one
Network exists on the Controller and use that. If there is more
than one Network an error is thrown.
filename : string, optional
The file name to save as. If no name is given then the name of
suppiled object is used. If no object is given, the name of the
Network is used.
fileformat : string
The type of file to create. Options are:
1. VTK: Suitable for visualizing in VTK capable software such as Paraview
2. MAT: Suitable for loading data into Matlab for post-processing
"""
if network is None:
if len(self.networks()) == 1:
network = self.networks()[0]
else:
raise Exception('Multiple Networks found, please specify \
which to export')
import OpenPNM.Utilities.IO as io
if fileformat == 'VTK':
phases = network._phases
io.VTK.save(filename=filename, network=network, phases=phases)
return
if fileformat == 'MAT':
phases = network._phases
io.MAT.save(filename=filename, network=network, phases=phases)
return
def _script(self, filename, mode='read'):
r"""
Save or reload the script files used for the modeling
Parameters
----------
filename : string
The name of the file to read or write
mode : string
Whether to 'archive' the given script file on the object or to
'retrieve' it from the object and create a new file with it. The
default is 'archive'.
"""
filename = filename.split('.')[0]+'.py'
if mode == 'archive':
with open(filename, 'rb') as read_file:
contents = read_file.read()
self._script = contents
if mode == 'retrieve':
with open(filename, 'wb') as write_file:
write_file.write(self._script)
def _set_comments(self, string):
if hasattr(self, '_comments') is False:
self._comments = {}
self._comments[time.strftime('%c')] = string
def _get_comments(self):
if hasattr(self, '_comments') is False:
print('No comments found')
else:
for key in list(self._comments.keys()):
print(key, ': ', self._comments[key])
comments = property(fget=_get_comments, fset=_set_comments)
def clone_simulation(self, network, name=None):
r"""
Accepts a Network object and creates a complete clone including all
associated objects. All objects in the cloned simulation are
registered with the Controller object and are fully functional.
Parameters
----------
network : OpenPNM Network Object
The Network object that is to be cloned. Because a Network has
handles to ALL associated objects it acts as the representative
for the entire simulation.
name : string
This string will be appended to the name of all cloned objects.
Returns
-------
A handle to the new Network object, which will include handles to
clones of all associated objects.
See Also
--------
ghost_object
Notes
-----
One useful application of this method is to create a cloned simulation
that can be trimmed to a smaller size. This smaller simulation will
result in much faster Algorithms calculations.
Examples
--------
>>> import OpenPNM
>>> ctrl = OpenPNM.Base.Controller()
>>> pn = OpenPNM.Network.TestNet()
>>> pn2 = ctrl.clone_simulation(pn, name='cloned')
>>> pn2 is pn
False
"""
if network._parent is not None:
logger.error('Cannot clone a network that is already a clone')
return
if name is None:
name = ''.join(random.choice(string.ascii_uppercase +
string.ascii_lowercase +
string.digits) for _ in range(5))
if self._validate_name(network.name + '_' + name) is False:
logger.error('The provided name is already in use')
return
net = _copy.deepcopy(network) # Make clone
# Add supplied name suffix to all cloned objects
for item in net._simulation():
item._parent = network
item.name = item.name + '_' + name
# Add parent Network numbering to clone
net['pore.' + network.name] = network.Ps
net['throat.' + network.name] = network.Ts
return net
def _validate_name(self, name):
valid_name = True
for item_name in list(self.keys()):
# Check object names for conflict
if name == item_name:
return False
# Also check array names on all objects
for array_name in list(self[item_name].keys()):
if name == array_name.split('.')[-1]:
return False
return valid_name
def _insert_simulation(self, network):
for item in network._simulation():
if item.name in self.keys():
raise Exception('An object named '+item.name+' is already present')
if network.name not in self.keys():
self[network.name] = network
for item in network._simulation():
self[item.name] = item
else:
print('Duplicate name found in Controller')
|
amdouglas/OpenPNM
|
OpenPNM/Base/__Controller__.py
|
Python
|
mit
| 18,622
|
import _plotly_utils.basevalidators
class CautoValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(self, plotly_name="cauto", parent_name="bar.marker.line", **kwargs):
super(CautoValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {}),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/bar/marker/line/_cauto.py
|
Python
|
mit
| 458
|
# -*- coding: utf-8 -*-
import psycopg2
from psycopg2 import errorcodes as codes
class PgClientError(Exception):
""" Common pgclient exception class"""
CLASS_CODE = None
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
def __str__(self):
return '{}({}, {}, {})'.format(
self.__class__.__name__,
getattr(self, 'message'),
getattr(self, 'pgcode'),
getattr(self, 'diag'))
class ErrorsRegistry(object):
"""Registry for all-related postgres errors.
The idea is to translate psycopg2.Error to more meaningful classes"""
ERRORS = {}
@classmethod
def register(cls, code):
"""Register decorator
:param code: postgres class error code, for example '08'
"""
def wrapper(klass):
klass.CLASS_CODE = code
cls.ERRORS[code] = klass
return klass
return wrapper
@classmethod
def get_error_class(cls, pg_code):
"""Get error class from registry by pg code (internal postgresql code)
:param pg_code: str
:return:
"""
if not pg_code:
return PgClientError
return cls.ERRORS.get(pg_code[:2])
@classmethod
def get_error(cls, pg_error):
"""Get error instance by psycopg2.Error
:param pg_error: psycopg2.Error instance
:return: PgClientError instance
"""
error_cls = cls.get_error_class(pg_error.pgcode)
return error_cls(
message=getattr(pg_error, 'message', pg_error.pgerror),
pgcode=pg_error.pgcode,
pgerror=pg_error.pgerror,
diag=pg_error.diag)
registry = ErrorsRegistry
@registry.register(code=codes.CLASS_SUCCESSFUL_COMPLETION)
class SuccessfulCompletion(PgClientError):
pass
@registry.register(code=codes.CLASS_WARNING)
class PgWarning(PgClientError):
pass
@registry.register(code=codes.CLASS_NO_DATA)
class NoDataWarning(PgClientError):
pass
@registry.register(code=codes.CLASS_SQL_STATEMENT_NOT_YET_COMPLETE)
class SQLStatementNotYetComplete(PgClientError):
pass
@registry.register(code=codes.CLASS_CONNECTION_EXCEPTION)
class ConnectionException(PgClientError):
pass
@registry.register(code=codes.CLASS_TRIGGERED_ACTION_EXCEPTION)
class TriggeredActionException(PgClientError):
pass
@registry.register(code=codes.CLASS_FEATURE_NOT_SUPPORTED)
class FeatureNotSupported(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_TRANSACTION_INITIATION)
class InvalidTransactionInitiation(PgClientError):
pass
@registry.register(code=codes.CLASS_LOCATOR_EXCEPTION)
class LocatorException(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_GRANTOR)
class InvalidGrantor(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_ROLE_SPECIFICATION)
class InvalidRoleSpecification(PgClientError):
pass
@registry.register(code=codes.CLASS_DIAGNOSTICS_EXCEPTION)
class DiagnosticsException(PgClientError):
pass
@registry.register(code=codes.CLASS_CASE_NOT_FOUND)
class CaseNotFound(PgClientError):
pass
@registry.register(code=codes.CLASS_CARDINALITY_VIOLATION)
class CardinalityViolation(PgClientError):
pass
@registry.register(code=codes.CLASS_DATA_EXCEPTION)
class DataException(PgClientError):
pass
@registry.register(code=codes.CLASS_INTEGRITY_CONSTRAINT_VIOLATION)
class IntegrityConstraintViolation(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_CURSOR_STATE)
class InvalidCursorState(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_TRANSACTION_STATE)
class InvalidTransactionState(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_SQL_STATEMENT_NAME)
class InvalidSQLStatementName(PgClientError):
pass
@registry.register(code=codes.CLASS_TRIGGERED_DATA_CHANGE_VIOLATION)
class TriggeredDataChangeViolation(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_AUTHORIZATION_SPECIFICATION)
class InvalidAuthorizationSpecification(PgClientError):
pass
@registry.register(
code=codes.CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST)
class DependentPrivilegeDescriptorsStillExist(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_TRANSACTION_TERMINATION)
class InvalidTransactionTermination(PgClientError):
pass
@registry.register(code=codes.CLASS_SQL_ROUTINE_EXCEPTION)
class SQLRoutineException(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_CURSOR_NAME)
class InvalidCursorName(PgClientError):
pass
@registry.register(code=codes.CLASS_EXTERNAL_ROUTINE_EXCEPTION)
class ExternalRoutineException(PgClientError):
pass
@registry.register(code=codes.CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION)
class ExternalRoutineInvocationException(PgClientError):
pass
@registry.register(code=codes.CLASS_SAVEPOINT_EXCEPTION)
class SavepointException(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_CATALOG_NAME)
class InvalidCatalogName(PgClientError):
pass
@registry.register(code=codes.CLASS_INVALID_SCHEMA_NAME)
class InvalidSchemaName(PgClientError):
pass
@registry.register(code=codes.CLASS_TRANSACTION_ROLLBACK)
class TransactionRollback(PgClientError):
pass
@registry.register(code=codes.CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION)
class SyntaxErrorOrAccessRuleViolation(PgClientError):
pass
@registry.register(code=codes.CLASS_WITH_CHECK_OPTION_VIOLATION)
class WithCheckOptionViolation(PgClientError):
pass
@registry.register(code=codes.CLASS_INSUFFICIENT_RESOURCES)
class InsufficientResources(PgClientError):
pass
@registry.register(code=codes.CLASS_PROGRAM_LIMIT_EXCEEDED)
class ProgramLimitExceeded(PgClientError):
pass
@registry.register(code=codes.CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE)
class ObjectNotInPrerequisiteState(PgClientError):
pass
@registry.register(code=codes.CLASS_OPERATOR_INTERVENTION)
class OperatorIntervention(PgClientError):
pass
@registry.register(code=codes.CLASS_SYSTEM_ERROR)
class PgSystemError(PgClientError):
pass
@registry.register(code=codes.CLASS_CONFIGURATION_FILE_ERROR)
class ConfigurationFileError(PgClientError):
pass
@registry.register(code=codes.CLASS_FOREIGN_DATA_WRAPPER_ERROR)
class ForeignDataWrapperError(PgClientError):
pass
@registry.register(code=codes.CLASS_PL_PGSQL_ERROR)
class PLPgSQLError(PgClientError):
pass
@registry.register(code=codes.CLASS_INTERNAL_ERROR)
class InternalError(PgClientError):
pass
|
prawn-cake/pgclient
|
pgclient/exceptions.py
|
Python
|
mit
| 6,638
|
from django import template
from froide.foirequestfollower.forms import FollowRequestForm
register = template.Library()
def followrequest(context, foirequest, user, name):
form = FollowRequestForm(foirequest, user)
following = False
if user.is_authenticated:
if foirequest.followed_by(user):
following = True
form.following = following
context[name] = form
return ""
register.simple_tag(takes_context=True)(followrequest)
|
CodeforHawaii/froide
|
froide/foirequestfollower/templatetags/follower_tags.py
|
Python
|
mit
| 471
|
#!/usr/bin/env python2.6
import numpy
m = numpy.array([[1, 2, 3], [2, 4, 6], [3, 5, 2]])
print m[m == 2]
|
rik0/rk-exempla
|
algorithms/python/matrix_substitution.py
|
Python
|
mit
| 107
|
#note: using this for user input
import sys
from time import sleep
class Input():
def __init__(self, pygame):
self.pygame = pygame
self.left = 0
self.right = 0
self.paused = 0
def checkInput(self):
#needs to run at least once each game loop
for event in self.pygame.event.get():
#quit game
if event.type == self.pygame.QUIT:
sys.exit()
if event.type == self.pygame.KEYDOWN:
if event.key == self.pygame.K_LEFT:
self.leftKeyDown()
if event.key == self.pygame.K_RIGHT:
self.rightKeyDown()
if event.type == self.pygame.KEYUP:
if event.key == self.pygame.K_LEFT:
self.leftKeyUp()
if event.key == self.pygame.K_RIGHT:
self.rightKeyUp()
if event.key == self.pygame.K_p:
self.pause()
def leftKeyDown(self):
#what left key does
self.left = 1
def rightKeyDown(self):
#what right key does
self.right = 1
def leftKeyUp(self):
self.left = 0
def rightKeyUp(self):
self.right = 0
def pause(self):
if self.paused == 0:
self.paused = 1
sleep(1)
elif self.paused == 1:
self.paused = 0
|
golddiamonds/BreakOuttaHere
|
game/input.py
|
Python
|
mit
| 1,395
|
import math
previous = 1
iterator = 0
total = 0
for x in range(9):
iterator=iterator + 2 if x % 4 == 0 else iterator
total += previous
previous += iterator
print total
|
DavidOStewart/ProjectEuler
|
28.py
|
Python
|
mit
| 185
|
import sys
import os
import stat
import re
import copy
import shutil
from pwd import getpwnam
if __name__ == "__main__":
import docassemble.base.config
docassemble.base.config.load(arguments=sys.argv)
from docassemble.base.config import daconfig, S3_ENABLED, s3_config, AZURE_ENABLED, azure_config
import docassemble.base.amazon
import docassemble.base.microsoft
def main():
certs_location = daconfig.get('certs', None)
cloud = None
prefix = None
if S3_ENABLED:
my_config = copy.deepcopy(s3_config)
if certs_location is None:
cloud = docassemble.base.amazon.s3object(my_config)
prefix = 'certs/'
else:
m = re.search(r'^s3://([^/]+)/(.*)', certs_location)
if m:
prefix = m.group(2)
my_config['bucket'] = m.group(1)
cloud = docassemble.base.amazon.s3object(my_config)
elif AZURE_ENABLED:
my_config = copy.deepcopy(azure_config)
if certs_location is None:
prefix = 'certs/'
cloud = docassemble.base.microsoft.azureobject(my_config)
else:
m = re.search(r'^blob://([^/]+)/([^/]+)/(.*)', certs_location)
if m:
my_config['account name'] = m.group(1)
my_config['container'] = m.group(2)
prefix = m.group(3)
cloud = docassemble.base.microsoft.azureobject(my_config)
if cloud is not None and prefix is not None:
success = False
if not re.search(r'/$', prefix):
prefix = prefix + '/'
dest = daconfig.get('cert install directory', '/etc/ssl/docassemble')
if dest:
if not os.path.isdir(dest):
os.makedirs(dest)
for key in cloud.list_keys(prefix=prefix):
filename = re.sub(r'.*/', '', key.name)
fullpath = os.path.join(dest, filename)
sys.stderr.write("install_certs: saving " + str(key.name) + " to " + str(fullpath) + "\n")
key.get_contents_to_filename(fullpath)
os.chmod(fullpath, stat.S_IRUSR)
success = True
else:
sys.stderr.write("SSL destination directory not known\n")
sys.exit(1)
if success:
return
if certs_location is None:
if os.path.isdir('/usr/share/docassemble/certs'):
certs_location = '/usr/share/docassemble/certs'
else:
return
if not os.path.isdir(certs_location):
sys.stderr.write("certs directory " + str(certs_location) + " does not exist")
sys.exit(1)
dest = daconfig.get('cert install directory', '/etc/ssl/docassemble')
if dest:
if os.path.isdir(dest):
shutil.rmtree(dest)
shutil.copytree(certs_location, dest)
for root, dirs, files in os.walk(dest):
for the_file in files:
os.chmod(os.path.join(root, the_file), stat.S_IRUSR)
else:
sys.stderr.write("SSL destination directory not known")
sys.exit(1)
www_install = daconfig.get('web server certificate directory', '/var/www/.certs')
if www_install:
www_username = daconfig.get('web server user', 'www-data')
www_uid = getpwnam(www_username)[2]
www_gid = getpwnam(www_username)[3]
if os.path.isdir(www_install):
shutil.rmtree(www_install)
shutil.copytree(certs_location, www_install)
os.chown(www_install, www_uid, www_gid)
for root, dirs, files in os.walk(www_install):
for the_file in files:
os.chown(os.path.join(root, the_file), www_uid, www_gid)
os.chmod(os.path.join(root, the_file), stat.S_IRUSR)
return
if __name__ == "__main__":
main()
|
jhpyle/docassemble
|
docassemble_webapp/docassemble/webapp/install_certs.py
|
Python
|
mit
| 3,811
|
from django.test import RequestFactory
from test_plus.test import TestCase
from ..views import (
EventUpdate,
EventCreate
)
class BaseUserTestCase(TestCase):
def setUp(self):
self.user = self.make_user()
self.factory = RequestFactory()
class TestEventUpdateView(BaseUserTestCase):
def setUp(self):
# call BaseUserTestCase.setUp()
super(TestEventUpdateView, self).setUp()
# Instantiate the view directly. Never do this outside a test!
self.view = EventUpdate()
# Generate a fake request
request = self.factory.get('/fake-url')
# Attach the user to the request
request.user = self.user
# Attach the request to the view
self.view.request = request
def test_get_success_url(self):
# Expect: '/users/testuser/', as that is the default username for
# self.make_user()
self.assertEqual(
self.view.get_success_url(),
'/events/'
)
class TestEventCreateView(BaseUserTestCase):
def setUp(self):
# call BaseUserTestCase.setUp()
super(TestEventCreateView, self).setUp()
# Instantiate the view directly. Never do this outside a test!
self.view = EventCreate()
# Generate a fake request
request = self.factory.get('/fake-url')
# Attach the user to the request
request.user = self.user
# Attach the request to the view
self.view.request = request
def test_get_success_url(self):
# Expect: '/users/testuser/', as that is the default username for
# self.make_user()
self.assertEqual(
self.view.get_success_url(),
'/events/'
)
def test_get_templates_name(self):
pass
|
mansonul/events
|
events/tests/test_views.py
|
Python
|
mit
| 1,794
|
# -*- coding: utf-8 -*-
import random, time, sys, dht, bootstrap
random.seed(time.time())
class MyNetwork(dht.DHT):
def __init__(self, *args, **kwargs):
self._my_db = {}
super(MyNetwork, self).__init__(*args, **kwargs)
def handle_save(self, key, value):
self._my_db[key] = value
return True
def handle_load(self, key):
return self._my_db.get(key)
def handle_delete(self, key):
del self._my_db[key]
return True
def handle_has_key(self, key):
return key in self._my_db
def main():
# Uses port as first argument for communication (TCP+UDP)
my_id = random.randint(0, dht.MAX_ID)
port = random.randint(5000, 10000)
n = MyNetwork(node_id=my_id, port=port)
bootstrapper = bootstrap.Bootstrapper(network_id="test", node_id=my_id,
dht_port=port)
bootstrapper.start_network(n)
try:
print "My ID = %d" % my_id
print
# Hash your data (160-bit integer), for this example we'll get a random int
data_id = random.randint(0, dht.MAX_ID)
# Returns True on success, gets automatically replicated
print "Saving:", n.save(data_id, "Hallo!", replicas=20)
# Returns "Hallo!" (received from one of the nodes available in the network having this key)
print "Loading:", n.load(data_id)
# Is the key available in the network? Returns the number of replicas.
print "How many nodes have this key?", n.has_key(data_id)
# Removes the key+data from all nodes in the network
print "Removing:", n.delete(data_id)
print "How many nodes have this key now?", n.has_key(data_id)
raw_input("Enter to exit.")
finally:
# Make sure network is always shutting down
bootstrapper.stop_network(n)
if __name__ == "__main__":
main()
|
flosch/libdht
|
example.py
|
Python
|
mit
| 1,929
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-05-03 20:21
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('pxl', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='usermodel',
name='pxlboard_1',
),
migrations.RemoveField(
model_name='usermodel',
name='pxlboard_2',
),
migrations.RemoveField(
model_name='usermodel',
name='pxlboard_3',
),
migrations.AddField(
model_name='pxlboardmodel_1',
name='owner',
field=models.OneToOneField(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='pxlboardmodel_2',
name='owner',
field=models.OneToOneField(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='pxlboardmodel_3',
name='owner',
field=models.OneToOneField(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
PXL-CF2016/pxl-master-server
|
pxl/migrations/0002_auto_20160503_2021.py
|
Python
|
mit
| 1,453
|
__author__ = 'nightfade'
from example.echo_service_pb2 import IEchoService, IEchoClient_Stub
import logger
class EchoService(IEchoService):
def echo(self, rpc_controller, echo_string, callback):
""" called by RpcChannel.receive when a complete request reached.
"""
logger.get_logger('EchoService').info('echo service is called')
echo_string.message = echo_string.message
client_stub = IEchoClient_Stub(rpc_controller.rpc_channel)
client_stub.respond(rpc_controller, echo_string, callback=None)
if callback:
callback()
|
nightfade/protobuf-RPC
|
example/echo_service.py
|
Python
|
mit
| 594
|
import math
import pyglet
def clamp(x, min_x, max_x):
return min(max(x, min_x), max_x)
def float_to_ubyte(f):
return clamp(int(f * 256.0), 0, 255)
def float_to_byte(f):
return clamp(int(math.floor(f * 128.0)), -128, 127)
def char_to_float(c):
assert isinstance(c, str) and len(c) == 1
return float(ord(c)) / 255.0
def get_box_vertices(half_width=1.0, half_height=1.0):
return [(-half_width, -half_height), (half_width, -half_height),
(half_width, half_height), (-half_width, half_height)]
def get_circle_vertices(center=(0.0, 0.0), radius=1.0, angle=0.0, count=8):
vertices = []
x, y = center
for i in xrange(count):
a = angle + 2.0 * math.pi * float(i) / float(count)
vertex = x + radius * math.cos(a), y + radius * math.sin(a)
vertices.append(vertex)
return vertices
def get_circle_triangles(center=(0.0, 0.0), radius=1.0, angle=0.0, count=8):
vertices = get_circle_vertices(center, radius, angle, count)
triangles = []
for i in xrange(count):
j = (i + 1) % count
triangle = center, vertices[i], vertices[j]
triangles.append(triangle)
return triangles
def get_polygon_triangles(vertices):
centroid = get_polygon_centroid(vertices)
triangles = []
for i in xrange(len(vertices)):
j = (i + 1) % len(vertices)
triangle = centroid, vertices[i], vertices[j]
triangles.append(triangle)
return triangles
# TODO: calculate actual centroid
def get_polygon_centroid(vertices):
total_x = sum(x for x, y in vertices)
total_y = sum(y for x, y in vertices)
return total_x / float(len(vertices)), total_y / float(len(vertices))
def get_vertex_normals(p1, p2, p3):
x1, y1 = p1
x2, y2 = p2
x3, y3 = p3
n1 = 0.0, 0.0, 1.0
n2 = normalize((x2 - x1, y2 - y1, 0.0))
n3 = normalize((x3 - x1, y3 - y1, 0.0))
return [n1, n2, n3]
def get_face_normal(p1, p2, p3):
x1, y1 = p1
x2, y2 = p2
x3, y3 = p3
x = (x2 + x3) / 2.0
y = (y2 + y3) / 2.0
nx, ny = x - x1, y - y1
nz = abs((nx + ny) / 2.0)
return normalize((nx, ny, nz))
def normalize(v):
x, y, z = v
if x or y or z:
length = math.sqrt(x * x + y * y + z * z)
assert length
return x / length, y / length, z / length
else:
return 0.0, 0.0, 0.0
# TODO: linear interpolation for float coordinates
def sample_image(image_data, x, y):
assert isinstance(image_data, pyglet.image.ImageData)
assert image_data.format in ('RGB', 'RGBA')
i = int(y) * image_data.pitch + int(x) * len(image_data.format)
pixel = image_data.data[i:i + len(image_data.format)]
return tuple(map(char_to_float, pixel))
|
elemel/boxlet
|
lib/boxlet/utils.py
|
Python
|
mit
| 2,714
|
# -*- coding: utf-8 -*-
#__init__.py中创建蓝本
from flask import Blueprint
auth=Blueprint('auth',__name__)#参数是蓝本名字
from . import views
|
wangxiaoyangwz/WANG
|
app/auth/__init__.py
|
Python
|
mit
| 157
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from allauth.socialaccount.models import SocialAccount
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
username = serializers.CharField(source='username', read_only=True)
avatar_url = serializers.URLField(source='socialaccount_set.get.get_avatar_url', read_only=True)
class Meta:
model = User
fields = ('username', 'avatar_url')
|
woojing/pairgramming
|
dj_backend/api/serializers.py
|
Python
|
mit
| 468
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from blinker import Namespace
_signals = Namespace()
print_badge_template = _signals.signal('print-badge-template', '''
Called when printing a badge template.
The registration form is passed in the `regform` kwarg. The list of registration
objects are passed in the `registrations` kwarg and it may be modified.
''')
|
ThiefMaster/indico
|
indico/core/signals/event/designer.py
|
Python
|
mit
| 534
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# matrixscreener documentation build configuration file, created by
# sphinx-quickstart on Fri Nov 21 17:05:46 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from unittest.mock import MagicMock
# mock dependencies
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
#sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# show __init__ docstrings
autoclass_content = 'both'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinxcontrib.napoleon', # use numpy docstrings
]
napoleon_use_ivar = True
napoleon_use_rtype = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'matrixscreener'
copyright = '2014, Arve Seljebu'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.6.1'
# The full version, including alpha/beta/rc tags.
release = '0.6.1'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'matrixscreenerdoc'
|
arve0/matrixscreener
|
doc/conf.py
|
Python
|
mit
| 6,271
|
from django.apps import AppConfig
class FeedsConfig(AppConfig):
name = 'feeds'
def ready(self):
import feeds.signals # noqa
|
drgarcia1986/pbb
|
pbb/feeds/apps.py
|
Python
|
mit
| 144
|
def initialize(time):
pass
def apply_rate(time):
return [(0, 1)] # Rate 0 (1 Mbps), 1 attempt
def process_feedback(status, timestamp, delay, tries):
pass
|
pavpanchekha/bitrate-lab
|
pysim/minimal.py
|
Python
|
mit
| 168
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
from django.contrib import admin
from ivr.models import Report
logger = logging.getLogger(__name__)
admin.site.register(Report)
|
yeleman/cline
|
ivr/admin.py
|
Python
|
mit
| 336
|
import sys, os
def _component_backend_module():
module_name = os.environ.get('AKUNA_COMPONENT_BACKEND') or 'akuna.component.backends.basic'
__import__(module_name)
return sys.modules[module_name]
def register_comp(*args, **kwargs):
component_mod = _component_backend_module()
component_mod.register_comp(*args, **kwargs)
def register_component(*args, **kwargs):
if args:
# register_comp does not take args (only kwargs) so args will only
# contain <component> being registered if
# - @register_component decorator without arguments OR
# - direct call to register_component(<comp>, ...)
register_comp(*args, **kwargs)
return
def wrapped_component(component):
register_comp(component, *args, **kwargs)
return component
return wrapped_component
def query_component(*args, **kwargs):
component_mod = _component_backend_module()
return component_mod.query_component(*args, **kwargs)
def get_component(*args, **kwargs):
component_mod = _component_backend_module()
return component_mod.get_component(*args, **kwargs)
def filter_components(*args, **kwargs):
component_mod = _component_backend_module()
return component_mod.filter_components(*args, **kwargs)
|
stana/akuna-component
|
akuna/component/_api.py
|
Python
|
mit
| 1,284
|
"""Base Resource classes for Tastypie-based API"""
# Significant portions of this code are based on Tastypie
# (http://tastypieapi.org)
#
# This is mostly because I had to patch methods in the Tastypie API
# to provide additional hooks or workarounds.
#
# Tastypie's license is as follows:
#
# Copyright (c) 2010, Daniel Lindsley
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the tastypie nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL tastypie BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from cStringIO import StringIO
import base64
import os
import re
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.http import HttpResponse
from tastypie import fields, http
from tastypie.bundle import Bundle
from tastypie.exceptions import ImmediateHttpResponse, NotFound
from tastypie.resources import (ModelResource,
convert_post_to_patch)
from tastypie.utils import dict_strip_unicode_keys
from tastypie.utils.mime import build_content_type
class MultipartFileUploadModelResource(ModelResource):
"""
A version of ModelResource that accepts file uploads via
multipart forms.
Based on Work by Michael Wu and Philip Smith.
See https://github.com/toastdriven/django-tastypie/pull/606
This resource class also supports wrapping a serialized response in
a TEXTAREA element for use with the jQuery IFRAME Transport. See
http://cmlenz.github.com/jquery-iframe-transport/
"""
def deserialize(self, request, data, format='application/json'):
"""
Given a request, data and a format, deserializes the given data.
It relies on the request properly sending a ``CONTENT_TYPE`` header,
falling back to ``application/json`` if not provided.
Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``.
"""
# If the format of the request is
# or multipart/form-data, then ignore the data attribute and
# just grab the data to deserialize from the request
if format.startswith('multipart'):
deserialized = request.POST.copy()
deserialized.update(request.FILES)
else:
deserialized = self._meta.serializer.deserialize(data, format=request.META.get('CONTENT_TYPE', 'application/json'))
return deserialized
def put_detail(self, request, **kwargs):
"""
Either updates an existing resource or creates a new one with the
provided data.
Calls ``obj_update`` with the provided data first, but falls back to
``obj_create`` if the object does not already exist.
If a new resource is created, return ``HttpCreated`` (201 Created).
If ``Meta.always_return_data = True``, there will be a populated body
of serialized data.
If an existing resource is modified and
``Meta.always_return_data = False`` (default), return ``HttpNoContent``
(204 No Content).
If an existing resource is modified and
``Meta.always_return_data = True``, return ``HttpAccepted`` (202
Accepted).
"""
fmt = request.META.get('CONTENT_TYPE', 'application/json')
if fmt.startswith('multipart'):
body = None
else:
body = request.body
deserialized = self.deserialize(request, body, format=fmt)
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
try:
updated_bundle = self.obj_update(bundle=bundle, **self.remove_api_resource_names(kwargs))
if not self._meta.always_return_data:
return http.HttpNoContent()
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpAccepted)
except (NotFound, MultipleObjectsReturned):
updated_bundle = self.obj_create(bundle=bundle, **self.remove_api_resource_names(kwargs))
location = self.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
updated_bundle = self.full_dehydrate(updated_bundle)
updated_bundle = self.alter_detail_data_to_serialize(request, updated_bundle)
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
def iframed_request(self, request):
"""
Checks if the request was issued from an IFRAME
When being called from an IFRAME, an ``iframe`` parameter should be
added to the querystring.
"""
if not request:
return False
return 'iframe' in request.GET
def wrap_in_textarea(self, format, body):
"""
Wrap response text in a textarea
This allows the jQuery Iframe transport to detect the content type.
"""
return '<textarea data-type="%s">%s</textarea>' % (format, body)
def serialize(self, request, data, format, options=None):
serialized = super(MultipartFileUploadModelResource, self).serialize(
request, data, format, options)
if not self.iframed_request(request):
return serialized
else:
return self.wrap_in_textarea(format, serialized)
def build_content_type(self, request, desired_format):
"""Always return 'text/html' when the request is from an IFRAME"""
if self.iframed_request(request):
return 'text/html'
return build_content_type(desired_format)
def create_response(self, request, data, response_class=HttpResponse, **response_kwargs):
"""
Extracts the common "which-format/serialize/return-response" cycle.
This version overrides the content type header to be 'text/html' if
the request originates from an IFRAME.
"""
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
content_type = self.build_content_type(request, desired_format)
return response_class(content=serialized, content_type=content_type, **response_kwargs)
class HookedModelResource(MultipartFileUploadModelResource):
"""
A version of ModelResource with extra actions at various points in
the pipeline
This allows for doing things like creating related translation model
instances or doing row-level authorization checks in a DRY way since
most of the logic for the core logic of the request/response cycle
remains the same as ModelResource.
"""
def bundle_obj_setattr(self, bundle, key, value):
"""Hook for setting attributes of the bundle's object
This is useful if additional bundle objects also need to be modified
in addition to the core object.
"""
setattr(bundle.obj, key, value)
def get_object_class(self, bundle=None, **kwargs):
"""Get the resource's object class dynamically
By default just returns ``object_class`` as defined in the resource
declaration, but this can be overridden in subclasses to do something
more interesting.
"""
return self._meta.object_class
def post_bundle_obj_construct(self, bundle, **kwargs):
"""Hook executed after the object is constructed, but not saved"""
pass
def pre_bundle_obj_hydrate(self, bundle, **kwargs):
"""Hook executed before the bundle is hydrated"""
pass
def post_bundle_obj_hydrate(self, bundle):
"""Hook executed after the bundle is hydrated"""
pass
def post_bundle_obj_save(self, bundle, **kwargs):
"""Hook executed after the bundle is saved"""
pass
def post_bundle_obj_get(self, bundle):
"""Hook executed after the object is retrieved"""
pass
def post_obj_get(self, obj, **kwargs):
pass
def post_full_hydrate(self, obj, **kwargs):
pass
def full_hydrate(self, bundle):
"""
Given a populated bundle, distill it and turn it back into
a full-fledged object instance.
"""
if bundle.obj is None:
bundle.obj = self._meta.object_class()
self.post_bundle_obj_construct(bundle)
bundle = self.hydrate(bundle)
self.post_bundle_obj_hydrate(bundle)
for field_name, field_object in self.fields.items():
if field_object.readonly is True:
continue
# Check for an optional method to do further hydration.
method = getattr(self, "hydrate_%s" % field_name, None)
if method:
bundle = method(bundle)
if field_object.attribute:
value = field_object.hydrate(bundle)
# NOTE: We only get back a bundle when it is related field.
if isinstance(value, Bundle) and value.errors.get(field_name):
bundle.errors[field_name] = value.errors[field_name]
if value is not None or field_object.null:
# We need to avoid populating M2M data here as that will
# cause things to blow up.
if not getattr(field_object, 'is_related', False):
self.bundle_obj_setattr(bundle, field_object.attribute, value)
elif not getattr(field_object, 'is_m2m', False):
if value is not None:
setattr(bundle.obj, field_object.attribute, value.obj)
elif field_object.blank:
continue
elif field_object.null:
setattr(bundle.obj, field_object.attribute, value)
return bundle
def save(self, bundle, skip_errors=False, **kwargs):
self.is_valid(bundle)
if bundle.errors and not skip_errors:
raise ImmediateHttpResponse(response=self.error_response(bundle.request, bundle.errors))
# Check if they're authorized.
if bundle.obj.pk:
self.authorized_update_detail(self.get_object_list(bundle.request), bundle)
else:
self.authorized_create_detail(self.get_object_list(bundle.request), bundle)
# Save FKs just in case.
self.save_related(bundle)
# Save the main object.
bundle.obj.save()
self.post_bundle_obj_save(bundle, **kwargs)
bundle.objects_saved.add(self.create_identifier(bundle.obj))
# Now pick up the M2M bits.
m2m_bundle = self.hydrate_m2m(bundle)
self.save_m2m(m2m_bundle)
return bundle
def obj_create(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_create``.
"""
object_class = self.get_object_class(bundle, **kwargs)
bundle.obj = object_class()
self.post_bundle_obj_construct(bundle, **kwargs)
for key, value in kwargs.items():
self.bundle_obj_setattr(bundle, key, value)
self.authorized_create_detail(self.get_object_list(bundle.request), bundle)
self.pre_bundle_obj_hydrate(bundle, **kwargs)
bundle = self.full_hydrate(bundle)
self.post_full_hydrate(bundle, **kwargs)
return self.save(bundle)
def lookup_kwargs_with_identifiers(self, bundle, kwargs):
"""
Kwargs here represent uri identifiers Ex: /repos/<user_id>/<repo_name>/
We need to turn those identifiers into Python objects for generating
lookup parameters that can find them in the DB
"""
lookup_kwargs = {}
bundle.obj = self.get_object_list(bundle.request).model()
self.post_bundle_obj_construct(bundle, **kwargs)
# Override data values, we rely on uri identifiers
bundle.data.update(kwargs)
# We're going to manually hydrate, as opposed to calling
# ``full_hydrate``, to ensure we don't try to flesh out related
# resources & keep things speedy.
bundle = self.hydrate(bundle)
for identifier in kwargs:
if identifier == self._meta.detail_uri_name:
lookup_kwargs[identifier] = kwargs[identifier]
continue
field_object = self.fields[identifier]
# Skip readonly or related fields.
if field_object.readonly is True or getattr(field_object, 'is_related', False):
continue
# Check for an optional method to do further hydration.
method = getattr(self, "hydrate_%s" % identifier, None)
if method:
bundle = method(bundle)
if field_object.attribute:
value = field_object.hydrate(bundle)
lookup_kwargs[identifier] = value
return lookup_kwargs
def obj_update(self, bundle, skip_errors=False, **kwargs):
"""
A ORM-specific implementation of ``obj_update``.
"""
if not bundle.obj or not self.get_bundle_detail_data(bundle):
try:
lookup_kwargs = self.lookup_kwargs_with_identifiers(bundle, kwargs)
except:
# if there is trouble hydrating the data, fall back to just
# using kwargs by itself (usually it only contains a "pk" key
# and this will work fine.
lookup_kwargs = kwargs
try:
bundle.obj = self.obj_get(bundle, **lookup_kwargs)
self.post_obj_get(bundle.obj)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
self.authorized_update_detail(self.get_object_list(bundle.request), bundle)
bundle = self.full_hydrate(bundle)
self.post_full_hydrate(bundle, **kwargs)
return self.save(bundle, skip_errors=skip_errors)
def obj_delete(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_delete``.
Takes optional ``kwargs``, which are used to narrow the query to find
the instance.
"""
if not hasattr(bundle.obj, 'delete'):
try:
bundle.obj = self.obj_get(bundle=bundle, **kwargs)
self.post_obj_get(bundle.obj)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
self.authorized_delete_detail(self.get_object_list(bundle.request), bundle)
bundle.obj.delete()
def patch_detail(self, request, **kwargs):
"""
Updates a resource in-place.
Calls ``obj_update``.
If the resource is updated, return ``HttpAccepted`` (202 Accepted).
If the resource did not exist, return ``HttpNotFound`` (404 Not Found).
"""
request = convert_post_to_patch(request)
basic_bundle = self.build_bundle(request=request)
# We want to be able to validate the update, but we can't just pass
# the partial data into the validator since all data needs to be
# present. Instead, we basically simulate a PUT by pulling out the
# original data and updating it in-place.
# So first pull out the original object. This is essentially
# ``get_detail``.
try:
obj = self.cached_obj_get(bundle=basic_bundle, **self.remove_api_resource_names(kwargs))
except ObjectDoesNotExist:
return http.HttpNotFound()
except MultipleObjectsReturned:
return http.HttpMultipleChoices("More than one resource is found at this URI.")
self.post_obj_get(obj)
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
# Now update the bundle in-place.
deserialized = self.deserialize(request, request.body, format=request.META.get('CONTENT_TYPE', 'application/json'))
self.update_in_place(request, bundle, deserialized)
# TODO: Check if this try/except is neccessary
#try:
# self.update_in_place(request, bundle, deserialized)
#except ObjectDoesNotExist:
# return http.HttpNotFound()
#
if not self._meta.always_return_data:
return http.HttpAccepted()
else:
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle)
return self.create_response(request, bundle, response_class=http.HttpAccepted)
def apply_request_kwargs(self, obj_list, bundle, **kwargs):
"""
Hook for altering the default object list based on keyword
arguments
"""
return obj_list
def obj_get_list(self, bundle, **kwargs):
"""
Modify the default queryset based on keyword arguments
"""
obj_list = super(HookedModelResource, self).obj_get_list(bundle, **kwargs)
return self.apply_request_kwargs(obj_list, bundle, **kwargs)
class TranslatedModelResource(HookedModelResource):
"""A version of ModelResource that handles our translation implementation"""
language = fields.CharField(attribute='language', default=settings.LANGUAGE_CODE)
languages = fields.ListField(readonly=True)
def dehydrate_languages(self, bundle):
return bundle.obj.get_language_info()
def post_bundle_obj_construct(self, bundle, **kwargs):
"""
Create a translation object and add it to the bundle
"""
object_class = self.get_object_class(bundle, **kwargs)
translation_class = object_class.translation_class
bundle.translation_obj = translation_class()
def post_bundle_obj_save(self, bundle, **kwargs):
"""
Associate the translation object with its parent and save
"""
object_class = self._meta.object_class
fk_field_name = object_class.get_translation_fk_field_name()
setattr(bundle.translation_obj, fk_field_name, bundle.obj)
bundle.translation_obj.save()
# Update the translation object cache in the bundle object
# so further steps will get our fresh data
bundle.obj.set_translation_cache_item(bundle.translation_obj.language, bundle.translation_obj)
def post_bundle_obj_hydrate(self, bundle):
"""
Get the associated translation model instance
"""
if bundle.obj.pk:
language = bundle.data.get('language', self.fields['language'].default)
translation_set = getattr(bundle.obj, bundle.obj.translation_set)
bundle.translation_obj = translation_set.get(language=language)
def _get_translation_fields(self, bundle):
object_class = self.get_object_class(bundle)
return object_class.translated_fields + ['language']
def bundle_obj_setattr(self, bundle, key, value):
if not hasattr(bundle, 'translation_fields'):
bundle.translation_fields = self._get_translation_fields(bundle)
if key in bundle.translation_fields:
setattr(bundle.translation_obj, key, value)
else:
setattr(bundle.obj, key, value)
def put_detail(self, request, **kwargs):
try:
return super(TranslatedModelResource, self).put_detail(request, **kwargs)
except ObjectDoesNotExist:
return http.HttpNotFound()
def patch_detail(self, request, **kwargs):
try:
return super(TranslatedModelResource, self).patch_detail(request, **kwargs)
except ObjectDoesNotExist:
return http.HttpNotFound()
class DataUriResourceMixin(object):
def parse_data_uri(self, data_uri):
"""
Parse a data URI string
Returns a tuple of (mime_type, encoding, data) represented in the URI
See http://tools.ietf.org/html/rfc2397
"""
pattern = r"data:(?P<mime>[\w/]+);(?P<encoding>\w+),(?P<data>.*)"
m = re.search(pattern, data_uri)
return (m.group('mime'), m.group('encoding'), m.group('data'))
def _hydrate_file(self, bundle, file_model_class, file_field,
filename_field='filename'):
"""Decode the base-64 encoded file"""
def file_size(f):
f.seek(0, os.SEEK_END)
return f.tell()
file_uri = bundle.data.get(file_field, None)
if file_uri:
(content_type, encoding, data) = self.parse_data_uri(
file_uri)
filename = bundle.data.get(filename_field)
f = StringIO()
f.write(base64.b64decode(data))
size = file_size(f)
file = InMemoryUploadedFile(file=f, field_name=None,
name=filename,
content_type=content_type,
size=size, charset=None)
file_model = file_model_class.objects.create(file=file)
bundle.data[file_field] = file_model
f.close()
return bundle
|
denverfoundation/storybase
|
apps/storybase/api/resources.py
|
Python
|
mit
| 22,939
|
# -*- coding:utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
from functools import wraps
from flask import abort
from flask.ext.login import current_user
from .models import Permission
def permission_required(permission):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.can(permission):
abort(403)
return f(*args, **kwargs)
return decorated_function
return decorator
def admin_required(f):
return permission_required(Permission.ADMINISTER)(f)
|
Kopei/manualscore
|
app/decorators.py
|
Python
|
mit
| 581
|
#!venv/bin/python
from FlaskMedia import app
app.run(debug=True, port=5001)
|
samcheck/PyMedia
|
runFlaskMedia.py
|
Python
|
mit
| 76
|
# -*- coding: utf-8 -*-
import re
from .constants import (
PUNCT_SYMBOLS,
ABBR,
MONTHS,
UNDECIDED,
SHOULD_SPLIT,
SHOULD_NOT_SPLIT
)
from .regular_expressions import (
word_with_period,
no_punctuation,
numerical_expression,
repeated_dash_converter,
dash_converter,
pure_whitespace,
left_quote_shifter,
left_quote_converter,
one_letter_long_or_repeating,
left_single_quote_converter,
remaining_quote_converter,
english_nots,
english_contractions,
english_specific_appendages,
french_appendages,
right_single_quote_converter,
simple_dash_finder,
advanced_dash_finder,
url_file_finder,
shifted_ellipses,
shifted_standard_punctuation,
multi_single_quote_finder
)
def protect_shorthand(text, split_locations):
"""
Annotate locations in a string that contain
periods as being true periods or periods
that are a part of shorthand (and thus should
not be treated as punctuation marks).
Arguments:
----------
text : str
split_locations : list<int>, same length as text.
"""
word_matches = list(re.finditer(word_with_period, text))
total_words = len(word_matches)
for i, match in enumerate(word_matches):
match_start = match.start()
match_end = match.end()
for char_pos in range(match_start, match_end):
if split_locations[char_pos] == SHOULD_SPLIT and match_end - char_pos > 1:
match_start = char_pos
word = text[match_start:match_end]
if not word.endswith('.'):
# ensure that words contained within other words:
# e.g. 'chocolate.Mountains of' -> 'chocolate. Mountains of'
if (not word[0].isdigit() and
split_locations[match_start] == UNDECIDED):
split_locations[match_start] = SHOULD_SPLIT
continue
period_pos = match_end - 1
# this is not the last word, abbreviation
# is not the final period of the sentence,
# moreover:
word_is_in_abbr = word[:-1].lower() in ABBR
is_abbr_like = (
word_is_in_abbr or
one_letter_long_or_repeating.match(word[:-1]) is not None
)
is_digit = False if is_abbr_like else word[:-1].isdigit()
is_last_word = i == (total_words - 1)
is_ending = is_last_word and (match_end == len(text) or text[match_end:].isspace())
is_not_ending = not is_ending
abbreviation_and_not_end = (
len(word) > 1 and
is_abbr_like and
is_not_ending
)
if abbreviation_and_not_end and (
(not is_last_word and word_matches[i+1].group(0)[0].islower()) or
(not is_last_word and word_matches[i+1].group(0) in PUNCT_SYMBOLS) or
word[0].isupper() or
word_is_in_abbr or
len(word) == 2):
# next word is lowercase (e.g. not a new sentence?), or next word
# is punctuation or next word is totally uppercase (e.g. 'Mister.
# ABAGNALE called to the stand')
if split_locations[period_pos] == SHOULD_SPLIT and period_pos + 1 < len(split_locations):
split_locations[period_pos + 1] = SHOULD_SPLIT
split_locations[period_pos] = SHOULD_NOT_SPLIT
elif (is_digit and
len(word[:-1]) <= 2 and
not is_last_word and
word_matches[i+1].group(0).lower() in MONTHS):
# a date or weird number with a period:
if split_locations[period_pos] == SHOULD_SPLIT and period_pos + 1 < len(split_locations):
split_locations[period_pos + 1] = SHOULD_SPLIT
split_locations[period_pos] = SHOULD_NOT_SPLIT
elif split_locations[period_pos] == UNDECIDED:
# split this period into its own segment:
split_locations[period_pos] = SHOULD_SPLIT
def split_with_locations(text, locations):
"""
Use an integer list to split the string
contained in `text`.
Arguments:
----------
text : str, same length as locations.
locations : list<int>, contains values
'SHOULD_SPLIT', 'UNDECIDED', and
'SHOULD_NOT_SPLIT'. Will create
strings between each 'SHOULD_SPLIT'
locations.
Returns:
--------
Generator<str> : the substrings of text
corresponding to the slices given
in locations.
"""
start = 0
for pos, decision in enumerate(locations):
if decision == SHOULD_SPLIT:
if start != pos:
yield text[start:pos]
start = pos
if start != len(text):
yield text[start:]
def mark_regex(regex, text, split_locations):
"""
Regex that adds a 'SHOULD_SPLIT' marker at the end
location of each matching group of the given regex.
Arguments
---------
regex : re.Expression
text : str, same length as split_locations
split_locations : list<int>, split decisions.
"""
for match in regex.finditer(text):
end_match = match.end()
if end_match < len(split_locations):
split_locations[end_match] = SHOULD_SPLIT
def mark_begin_end_regex(regex, text, split_locations):
"""
Regex that adds a 'SHOULD_SPLIT' marker at the end
location of each matching group of the given regex,
and adds a 'SHOULD_SPLIT' at the beginning of the
matching group. Each character within the matching
group will be marked as 'SHOULD_NOT_SPLIT'.
Arguments
---------
regex : re.Expression
text : str, same length as split_locations
split_locations : list<int>, split decisions.
"""
for match in regex.finditer(text):
end_match = match.end()
begin_match = match.start()
for i in range(begin_match+1, end_match):
split_locations[i] = SHOULD_NOT_SPLIT
if end_match < len(split_locations):
if split_locations[end_match] == UNDECIDED:
split_locations[end_match] = SHOULD_SPLIT
if split_locations[begin_match] == UNDECIDED:
split_locations[begin_match] = SHOULD_SPLIT
def tokenize(text, normalize_ascii=True):
"""
Convert a single string into a list of substrings
split along punctuation and word boundaries. Keep
whitespace intact by always attaching it to the
previous token.
Arguments:
----------
text : str
normalize_ascii : bool, perform some replacements
on non-ascii characters to canonicalize the
string (defaults to True).
Returns:
--------
list<str>, list of substring tokens.
"""
# 1. If there's no punctuation, return immediately
if no_punctuation.match(text):
return [text]
# 2. let's standardize the input text to ascii (if desired)
# Note: this will no longer respect input-to-output character positions
if normalize_ascii:
# normalize these greco-roman characters to ascii:
text = text.replace(u"œ", "oe").replace(u"æ", "ae")
# normalize dashes:
text = repeated_dash_converter.sub("-", text)
# 3. let's construct an integer array of the possible split locations:
split_locations = [UNDECIDED] * len(text)
regexes = (
pure_whitespace,
left_quote_shifter,
left_quote_converter,
left_single_quote_converter,
remaining_quote_converter,
# regex can't fix this -> regex ca n't fix this
english_nots,
# you'll dig this -> you 'll dig this
english_contractions,
# the rhino's horns -> the rhino 's horns
english_specific_appendages,
# qu'a tu fais au rhino -> qu ' a tu fais au rhino,
french_appendages
)
# 4. Mark end locations for specific regular expressions:
for regex in regexes:
mark_regex(regex, text, split_locations)
begin_end_regexes = (
multi_single_quote_finder,
right_single_quote_converter,
# use dashes as the breakpoint:
# the rhino--truck -> the rhino -- truck
simple_dash_finder if normalize_ascii else advanced_dash_finder,
numerical_expression,
url_file_finder,
shifted_ellipses,
# the #rhino! -> the # rhino ! ;
# the rino[sic] -> the rino [ sic ]
shifted_standard_punctuation
)
# 5. Mark begin and end locations for other regular expressions:
for regex in begin_end_regexes:
mark_begin_end_regex(regex, text, split_locations)
# 6. Remove splitting on exceptional uses of periods:
# I'm with Mr. -> I 'm with Mr. , I'm with Mister. -> I 'm with Mister .
protect_shorthand(text, split_locations)
if normalize_ascii:
text = dash_converter.sub("-", text)
# 7. Return the split string using the integer list:
return list(split_with_locations(text, split_locations))
|
JonathanRaiman/xml_cleaner
|
ciseau/word_tokenizer.py
|
Python
|
mit
| 9,036
|
import pytest
from tadman import path_tools
def test_ending_slash_removal():
tests = ['/home/squid', '/path/to/something/', '/var/lib/foo/bar']
results = ['/home/squid', '/path/to/something', '/var/lib/foo/bar']
for x in range(len(tests)):
assert path_tools.last_slash_check(tests[x]) == results[x]
def test_name_version_split():
test_names = ['openbox-3.6.1', 'htop']
results = [('openbox', '3.6.1'), ('htop', '')]
for y in range(len(test_names)):
assert path_tools.name_version_split(test_names[y]) == results[y]
|
KeepPositive/Tadman
|
tests/test_path.py
|
Python
|
mit
| 570
|
# flake8: noqa
import sys
import codecs
import array
from functools import reduce
import numpy as np
def ensure_ndarray(buf):
"""Convenience function to coerce `buf` to a numpy array, if it is not already a
numpy array.
Parameters
----------
buf : array-like or bytes-like
A numpy array or any object exporting a buffer interface.
Returns
-------
arr : ndarray
A numpy array, sharing memory with `buf`.
Notes
-----
This function will not create a copy under any circumstances, it is guaranteed to
return a view on memory exported by `buf`.
"""
if isinstance(buf, np.ndarray):
# already a numpy array
arr = buf
elif isinstance(buf, array.array) and buf.typecode in 'cu':
# Guard condition, do not support array.array with unicode type, this is
# problematic because numpy does not support it on all platforms. Also do not
# support char as it was removed in Python 3.
raise TypeError('array.array with char or unicode type is not supported')
else:
# N.B., first take a memoryview to make sure that we subsequently create a
# numpy array from a memory buffer with no copy
mem = memoryview(buf)
# instantiate array from memoryview, ensures no copy
arr = np.array(mem, copy=False)
return arr
def ensure_contiguous_ndarray(buf, max_buffer_size=None):
"""Convenience function to coerce `buf` to a numpy array, if it is not already a
numpy array. Also ensures that the returned value exports fully contiguous memory,
and supports the new-style buffer interface. If the optional max_buffer_size is
provided, raise a ValueError if the number of bytes consumed by the returned
array exceeds this value.
Parameters
----------
buf : array-like or bytes-like
A numpy array or any object exporting a buffer interface.
max_buffer_size : int
If specified, the largest allowable value of arr.nbytes, where arr
is the returned array.
Returns
-------
arr : ndarray
A numpy array, sharing memory with `buf`.
Notes
-----
This function will not create a copy under any circumstances, it is guaranteed to
return a view on memory exported by `buf`.
"""
# ensure input is a numpy array
arr = ensure_ndarray(buf)
# check for object arrays, these are just memory pointers, actual memory holding
# item data is scattered elsewhere
if arr.dtype == object:
raise TypeError('object arrays are not supported')
# check for datetime or timedelta ndarray, the buffer interface doesn't support those
if arr.dtype.kind in 'Mm':
arr = arr.view(np.int64)
# check memory is contiguous, if so flatten
if arr.flags.c_contiguous or arr.flags.f_contiguous:
# can flatten without copy
arr = arr.reshape(-1, order='A')
else:
raise ValueError('an array with contiguous memory is required')
if max_buffer_size is not None and arr.nbytes > max_buffer_size:
msg = "Codec does not support buffers of > {} bytes".format(max_buffer_size)
raise ValueError(msg)
return arr
def ensure_bytes(buf):
"""Obtain a bytes object from memory exposed by `buf`."""
if not isinstance(buf, bytes):
# go via numpy, for convenience
arr = ensure_ndarray(buf)
# check for object arrays, these are just memory pointers,
# actual memory holding item data is scattered elsewhere
if arr.dtype == object:
raise TypeError('object arrays are not supported')
# create bytes
buf = arr.tobytes(order='A')
return buf
def ensure_text(s, encoding='utf-8'):
if not isinstance(s, str):
s = ensure_contiguous_ndarray(s)
s = codecs.decode(s, encoding)
return s
def ndarray_copy(src, dst):
"""Copy the contents of the array from `src` to `dst`."""
if dst is None:
# no-op
return src
# ensure ndarrays
src = ensure_ndarray(src)
dst = ensure_ndarray(dst)
# flatten source array
src = src.reshape(-1, order='A')
# ensure same data type
if dst.dtype != object:
src = src.view(dst.dtype)
# reshape source to match destination
if src.shape != dst.shape:
if dst.flags.f_contiguous:
order = 'F'
else:
order = 'C'
src = src.reshape(dst.shape, order=order)
# copy via numpy
np.copyto(dst, src)
return dst
|
zarr-developers/numcodecs
|
numcodecs/compat.py
|
Python
|
mit
| 4,557
|
# coding: utf-8
# (c) 2015-2020 Teruhisa Okada
from add_masklines import add_masklines
from basemap import basemap
from cmap import cmap
from dataset import Dataset
from edit_nc_var import edit_nc_var
from get_time import get_time
from get_vnames import get_vnames
from initialize import initialize
from levels import levels
from O2_saturation import O2_saturation, DOsat_mol, DOsat_g, DOp2mol, DOp2g
from plot_cost import plot_cost
from pickup import pickup, pickup_line
from parsers import tide_parser, line_parser, date_parser
from read_mp import read_mp
from run_time import run_time
from savefig import savefig
from unit2cff import unit2cff
GMT = 'seconds since 1968-05-23 00:00:00 GMT'
JST = 'seconds since 1968-05-23 09:00:00 GMT'
GMT_days = 'days since 1968-05-23 00:00:00 GMT'
JST_days = 'days since 1968-05-23 09:00:00 GMT'
g2mol_C = 1000.0 / 12.00
g2mol_N = 1000.0 / 14.01
g2mol_P = 1000.0 / 30.97
mol2g_C = 12.00 / 1000.0
mol2g_N = 14.01 / 1000.0
mol2g_P = 30.97 / 1000.0
g2mol_O2 = 44.66 / 1.42903
mol2g_O2 = 1.42903 / 44.66
|
okadate/romspy
|
romspy/__init__.py
|
Python
|
mit
| 1,042
|
#!/usr/bin/env python
from math import sqrt, cos, pi, sin
from .trajectory import Trajectory
class LemniscateTrajectory(object, Trajectory):
def __init__(self, radius, period):
Trajectory.__init__(self)
self.radius = radius
self. period = period
def get_position_at(self, t):
super(LemniscateTrajectory, self).get_position_at(t)
self.position.x = 2 * cos(2 * pi* t / self.period) / (sin(2 * pi * t / self.period) ** 2 + 1)
self.position.y = 2 * sin(2 * pi* t / self.period) * cos(2 * pi* t / self.period) / (sin(2 * pi * t / self.period) ** 2 + 1)
return self.position
def get_name(self):
return str(LemniscateTrajectory.__name__).replace('Trajectory', '').lower()
|
bit0001/trajectory_tracking
|
src/trajectory/lemniscate_trajectory.py
|
Python
|
mit
| 746
|
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponseRedirect, Http404
from django.core.urlresolvers import reverse
from tweets.forms import TweetForm
import cass
NUM_PER_PAGE = 40
def timeline(request):
form = TweetForm(request.POST or None)
if request.user['is_authenticated'] and form.is_valid():
cass.save_tweet(request.session['username'], form.cleaned_data['body'])
return HttpResponseRedirect(reverse('timeline'))
start = request.GET.get('start')
if request.user['is_authenticated']:
tweets,next = cass.get_timeline(request.session['username'],
start=start, limit=NUM_PER_PAGE)
else:
tweets,next = cass.get_timeline(cass.PUBLIC_TIMELINE_KEY, start=start,
limit=NUM_PER_PAGE)
context = {
'form': form,
'tweets': tweets,
'next': next,
}
return render_to_response('tweets/timeline.html', context,
context_instance=RequestContext(request))
def publicline(request):
start = request.GET.get('start')
tweets,next = cass.get_timeline(cass.PUBLIC_TIMELINE_KEY, start=start,
limit=NUM_PER_PAGE)
context = {
'tweets': tweets,
'next': next,
}
return render_to_response('tweets/publicline.html', context,
context_instance=RequestContext(request))
def userline(request, username=None):
try:
user = cass.get_user_by_username(username)
except cass.DatabaseError:
raise Http404
# Query for the friend ids
friend_usernames = []
if request.user['is_authenticated']:
friend_usernames = cass.get_friend_usernames(username) + [username]
# Add a property on the user to indicate whether the currently logged-in
# user is friends with the user
user['friend'] = username in friend_usernames
start = request.GET.get('start')
tweets,next = cass.get_userline(username, start=start, limit=NUM_PER_PAGE)
context = {
'user': user,
'username': username,
'tweets': tweets,
'next': next,
'friend_usernames': friend_usernames,
}
return render_to_response('tweets/userline.html', context,
context_instance=RequestContext(request))
|
adhish20/TwitterWithCassandra
|
tweets/views.py
|
Python
|
mit
| 2,293
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
PAGE_SIZE = 100
KEY_UUID = "test_key_a6af8952-54a6-11e9-b600-2816a84d0309"
LABEL_UUID = "1d7b2b28-549e-11e9-b51c-2816a84d0309"
KEY = "PYTHON_UNIT_" + KEY_UUID
LABEL = "test_label1_" + LABEL_UUID
LABEL_RESERVED_CHARS = "test_label2_*, \\" + LABEL_UUID # contains reserved chars *,\
TEST_CONTENT_TYPE = "test content type"
TEST_VALUE = "test value"
|
Azure/azure-sdk-for-python
|
sdk/appconfiguration/azure-appconfiguration/tests/consts.py
|
Python
|
mit
| 658
|
import datetime
from sqlalchemy.inspection import inspect
def to_dict(model):
d = {'entity': model.__tablename__}
for column in model.__table__.columns:
d[column.name] = getattr(model, column.name)
for relation in inspect(model.__class__).relationships:
try:
d[relation.key] = to_dict(getattr(model, relation.key))
except AttributeError:
d[relation.key] = map(to_dict, getattr(model, relation.key))
return d
def to_model(data, module):
model = getattr(module, data['entity'])()
for key, value in data.iteritems():
if key == 'entity':
continue
elif type(value) in [bool, str, unicode, int, float, datetime.date]:
setattr(model, key, value)
else:
try:
value.get('entity')
setattr(model, key, to_model(value, module))
except AttributeError:
setattr(model, key, map(lambda m: to_model(m, module), value))
return model
|
saltpy/planner
|
planner/model/translate.py
|
Python
|
mit
| 1,017
|
# Copyright (c) 2011-2013 Simplistix Ltd
# See license.txt for license details.
from nose.plugins.skip import SkipTest
try:
from testfixtures.components import TestComponents
except ImportError: # pragma: no cover
raise SkipTest('zope.component is not available')
from mock import Mock, call
from testfixtures import Replacer, compare
from testfixtures.compat import PY3
from unittest import TestCase
from .compat import catch_warnings
class ComponentsTests(TestCase):
def test_atexit(self):
m = Mock()
with Replacer() as r:
r.replace('atexit.register', m.register)
c = TestComponents()
expected = [call.register(c.atexit)]
compare(expected, m.mock_calls)
with catch_warnings(record=True) as w:
c.atexit()
self.assertTrue(len(w), 1)
compare(str(w[0].message), ( # pragma: no branch
"TestComponents instances not uninstalled by shutdown!"
))
c.uninstall()
compare(expected, m.mock_calls)
# check re-running has no ill effects
c.atexit()
compare(expected, m.mock_calls)
|
beblount/Steer-Clear-Backend-Web
|
env/Lib/site-packages/testfixtures/tests/test_components.py
|
Python
|
mit
| 1,275
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class StartTaskInformation(Model):
"""Information about a start task running on a compute node.
:param state: The state of the start task on the compute node. running -
The start task is currently running. completed - The start task has exited
with exit code 0, or the start task has failed and the retry limit has
reached, or the start task process did not run due to scheduling errors.
Possible values include: 'running', 'completed'
:type state: str or :class:`StartTaskState
<azure.batch.models.StartTaskState>`
:param start_time: The time at which the start task started running. This
value is reset every time the task is restarted or retried (that is, this
is the most recent time at which the start task started running).
:type start_time: datetime
:param end_time: The time at which the start task stopped running. This is
the end time of the most recent run of the start task, if that run has
completed (even if that run failed and a retry is pending). This element
is not present if the start task is currently running.
:type end_time: datetime
:param exit_code: The exit code of the program specified on the start task
command line. This property is set only if the start task is in the
completed state. In general, the exit code for a process reflects the
specific convention implemented by the application developer for that
process. If you use the exit code value to make decisions in your code, be
sure that you know the exit code convention used by the application
process. However, if the Batch service terminates the start task (due to
timeout, or user termination via the API) you may see an operating
system-defined exit code.
:type exit_code: int
:param scheduling_error: Any error encountered scheduling the start task.
:type scheduling_error: :class:`TaskSchedulingError
<azure.batch.models.TaskSchedulingError>`
:param retry_count: The number of times the task has been retried by the
Batch service. The task is retried if it exits with a nonzero exit code,
up to the specified MaxTaskRetryCount.
:type retry_count: int
:param last_retry_time: The most recent time at which a retry of the task
started running. This element is present only if the task was retried
(i.e. retryCount is nonzero). If present, this is typically the same as
startTime, but may be different if the task has been restarted for reasons
other than retry; for example, if the compute node was rebooted during a
retry, then the startTime is updated but the lastRetryTime is not.
:type last_retry_time: datetime
"""
_validation = {
'state': {'required': True},
'start_time': {'required': True},
'retry_count': {'required': True},
}
_attribute_map = {
'state': {'key': 'state', 'type': 'StartTaskState'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'exit_code': {'key': 'exitCode', 'type': 'int'},
'scheduling_error': {'key': 'schedulingError', 'type': 'TaskSchedulingError'},
'retry_count': {'key': 'retryCount', 'type': 'int'},
'last_retry_time': {'key': 'lastRetryTime', 'type': 'iso-8601'},
}
def __init__(self, state, start_time, retry_count, end_time=None, exit_code=None, scheduling_error=None, last_retry_time=None):
self.state = state
self.start_time = start_time
self.end_time = end_time
self.exit_code = exit_code
self.scheduling_error = scheduling_error
self.retry_count = retry_count
self.last_retry_time = last_retry_time
|
rjschwei/azure-sdk-for-python
|
azure-batch/azure/batch/models/start_task_information.py
|
Python
|
mit
| 4,264
|
"""
Builds out an arugment parser based on function signatures in various modules.
Each module is mapped to a sub-command name space, and each function of that
module is mapped to an operation of that sub command. Parameters to that
function are made into command line arguments. Invocation looks like:
command sub-command operation REQUIRED_ARG [...] [--OPTIONAL-ARG VAL]
"""
import argparse
import inspect
import logging
import sys
def _coerce_bool(some_str):
"""Stupid little method to try to assist casting command line args to
booleans
"""
if some_str.lower().strip() in ['n', 'no', 'off', 'f', 'false', '0']:
return False
return bool(some_str)
class Newman(object):
'''Container class to hold a bunch of customized (sub)parsers
'''
# TODO: Move this to some kind of optional plugin? Don't want to require
# Raven for folks who aren't using sentry.
def register_sentry_handler(self, sentry_dns, log_level=logging.ERROR):
from raven.handlers.logging import SentryHandler
sentry_handler = SentryHandler(sentry_dns)
sentry_handler.setLevel(log_level)
self.logger.addHandler(sentry_handler)
def __init__(self, description="A parser nobody bothered to customize",
sentry_dns=None, top_level_args=None):
"""Build an argument parser from module definitions and run the
function we were asked for
`top_level_args` should be a dictionary of argument name: default value
that will be handled by the function that instantiates Newman instead
of the operation that is ultimately called.
Use case: global config options/paths
"""
self.logger = logging.getLogger()
self.parser = argparse.ArgumentParser(
description=description,
add_help=True,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
self.sub_parsers = self.parser.add_subparsers(
title='task modules',
description='The following modules were loaded as task namespaces',
dest='module'
)
if sentry_dns:
self.register_sentry_handler(sentry_dns)
self.default_top_level_args = top_level_args or {}
for targ, default in top_level_args.items():
arg_type = type(default)
if isinstance(default, bool):
arg_type = _coerce_bool
self.parser.add_argument('--' + targ.replace('_', '-'),
type=arg_type, default=default)
self._parsed_args = None
@property
def func(self):
if not self._parsed_args:
self.parse_args()
return self._parsed_args['func']
@property
def real_args(self):
if not self._parsed_args:
self.parse_args()
return self._parsed_args['real_args']
@property
def top_level_args(self):
if not self._parsed_args:
self.parse_args()
return self._parsed_args['top_level_args']
def parse_args(self):
"""Generates a dictionary of parsed arguments.
`func` is the operation to be run.
`top_level_args` is a dict of any arguments that are used in the
calling proces.
`real_args` are the arguments that the operation will be invokes with.
"""
args = self.parser.parse_args() # oh the possibilities...
func = args.func # this gets plumbed through by load_module
real_args = [] # actual positional args we'll be sending to func
top_level_args = {} # args to be used by caller process, not operation
# yay, even more weird signature hacking. Try to turn the argparse
# arguments we got (if any) back into regular function arguments
fargs, varargs, null, fdefaults = inspect.getargspec(func)
for targ in self.default_top_level_args:
if hasattr(args, targ):
top_level_args[targ] = getattr(args, targ)
for farg in fargs:
if hasattr(args, farg):
# this function cares about this passed in arg
real_args.append(getattr(args, farg))
if varargs:
# this func takes varags
real_args += getattr(args, varargs)
self._parsed_args = {
'func': func,
'top_level_args': top_level_args,
'real_args': real_args
}
def go(self):
"""Call this in your CLI entry point once you've loaded all your tasks
(via load_module()). It will parse any command line args, choose the
correct function to call, and call it with your arguments, then exit.
If the arguments specify an unknown command, the usage help will be
printed and the program will exit with code 1
"""
real_args = self.real_args
func = self.func
exit_code = 2
if func:
try:
exit_code = func(*real_args)
except Exception as e:
self.logger.exception("%s (in loaded task)", e)
raise
sys.exit(exit_code)
def load_module(self, module, sub_command):
"""Load tasks from the given module, and makes them available under the
given subcommand.
Build the argument parser for the collected tasks. The sub-parsers get
attached to the passed in top level parser under the previously
registered sub-commands.
:param str module_name: python style module name - foo.bar.baz
:param str sub_command: the command name to associate with this module
:param top_level: The configured top level command parser
:type top_level: argparse.ArgumentParser
"""
# Add a sub-parser for this sub-command
mod_parser = self.sub_parsers.add_parser(
sub_command,
description=module.__doc__,
help=module.__doc__
)
mod_sub_parsers = mod_parser.add_subparsers(
title='tasks under %s' % sub_command,
help='The following are valid task commands',
dest='cmd'
)
for func_name, func_obj in inspect.getmembers(module,
inspect.isfunction):
# skip if we are looking at a private function
if func_name.startswith('_'):
continue
# TODO: Not sure what to do about this
if (not inspect.getmodule(func_obj).__name__.endswith(
module.__name__)):
# this check tries to avoid functions at the module level that
# were imported and not defined in that module
continue
# give each function it's own sub parser under its parent module
# and try to provide options based on the function signature
func_parser = mod_sub_parsers.add_parser(
func_name,
help=func_obj.__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
func_parser.set_defaults(func=func_obj)
# get the signature of the method we're setting up
args, varargs, _, defaults = inspect.getargspec(func_obj)
if varargs:
# used if a function accepts *args
func_parser.add_argument(varargs, nargs='*')
if defaults:
# defaults arrives as a tuple of argument defaults, but it's
# indexed from the furthest right argument. So it's possible
# you may get ['arg1', 'arg2'] as the args and (10,) as the
# defaults, where 10 is the default value for arg2. Confusing
# and weird, yes.
defaults = list(defaults)
defaults.reverse()
# now for each argument we found, go backwards (see above for why)
positionals = []
for cnt, arg in enumerate(reversed(args)):
if defaults and cnt < len(defaults):
# we're basically going backwards, but the arg parser
# doesn't care so this works. The signature made this
# optional, so try to make an educated guess as to the type
# of variable
kwargs = {
'help': 'taken from signature',
'default': defaults[cnt],
}
if isinstance(defaults[cnt], bool):
kwargs['type'] = _coerce_bool
elif defaults[cnt] is None:
pass
else:
kwargs['type'] = type(defaults[cnt])
func_parser.add_argument("--%s" % arg.replace("_", "-"),
**kwargs)
else:
# this is a positional arg, that we know pretty much
# nothing about
positionals.append(arg)
# Finally reverse the positional args again, so they're in the
# right order
for arg in reversed(positionals):
func_parser.add_argument(arg, help='taken from signature')
|
saltmine/newman-cli
|
newman/newman.py
|
Python
|
mit
| 9,343
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of rdlm-py released under the MIT license.
# See the LICENSE file for more information.
import argparse
import sys
from rdlmpy import RDLMClient
from urlparse import urlparse
parser = argparse.ArgumentParser(description='Release a lock')
parser.add_argument("lock_url", type=str, help="Lock Url (returned by lock-acquire.py)")
args = parser.parse_args()
url_parsed = urlparse(args.lock_url)
netloc = url_parsed.netloc
netloc_splitted = netloc.split(":")
hostname = netloc_splitted[0]
if len(netloc_splitted) == 1:
port = 80
else:
port = int(netloc_splitted[1])
client = RDLMClient(server=hostname, port=port)
return_code = 3
if client.lock_release(args.lock_url):
return_code = 0
else:
sys.stderr.write("Can't release the lock\n")
return_code = 1
sys.exit(return_code)
|
thefab/rdlm-py
|
lock-release.py
|
Python
|
mit
| 862
|
# -*- coding: utf-8 -*-
"""
irc.server
This server has basic support for:
* Connecting
* Channels
* Nicknames
* Public/private messages
It is MISSING support for notably:
* Server linking
* Modes (user and channel)
* Proper error reporting
* Basically everything else
It is mostly useful as a testing tool or perhaps for building something like a
private proxy on. Do NOT use it in any kind of production code or anything that
will ever be connected to by the public.
"""
#
# Very simple hacky ugly IRC server.
#
# Todo:
# - Encode format for each message and reply with
# events.codes['needmoreparams']
# - starting server when already started doesn't work properly. PID
# file is not changed, no error messsage is displayed.
# - Delete channel if last user leaves.
# - [ERROR] <socket.error instance at 0x7f9f203dfb90>
# (better error msg required)
# - Empty channels are left behind
# - No Op assigned when new channel is created.
# - User can /join multiple times (doesn't add more to channel,
# does say 'joined')
# - PING timeouts
# - Allow all numerical commands.
# - Users can send commands to channels they are not in (PART)
# Not Todo (Won't be supported)
# - Server linking.
import argparse
import errno
import logging
import socket
import select
import re
import socketserver
import typing
import jaraco.logging
from jaraco.stream import buffer
import irc.client
from . import events
log = logging.getLogger(__name__)
class IRCError(Exception):
"""
Exception thrown by IRC command handlers to notify client of a
server/client error.
"""
def __init__(self, code, value):
self.code = code
self.value = value
def __str__(self):
return repr(self.value)
@classmethod
def from_name(cls, name, value):
return cls(events.codes[name], value)
class IRCChannel:
"""
An IRC channel.
"""
def __init__(self, name, topic='No topic'):
self.name = name
self.topic_by = 'Unknown'
self.topic = topic
self.clients = set()
class IRCClient(socketserver.BaseRequestHandler):
"""
IRC client connect and command handling. Client connection is handled by
the ``handle`` method which sets up a two-way communication
with the client.
It then handles commands sent by the client by dispatching them to the
``handle_`` methods.
"""
class Disconnect(BaseException):
pass
def __init__(self, request, client_address, server):
self.user = None
self.host = client_address # Client's hostname / ip.
self.realname = None # Client's real name
self.nick = None # Client's currently registered nickname
self.send_queue = [] # Messages to send to client (strings)
self.channels = {} # Channels the client is in
super().__init__(request, client_address, server)
def handle(self):
log.info('Client connected: %s', self.client_ident())
self.buffer = buffer.LineBuffer()
try:
while True:
self._handle_one()
except self.Disconnect:
self.request.close()
def _handle_one(self):
"""
Handle one read/write cycle.
"""
ready_to_read, ready_to_write, in_error = select.select(
[self.request], [self.request], [self.request], 0.1
)
if in_error:
raise self.Disconnect()
# Write any commands to the client
while self.send_queue and ready_to_write:
msg = self.send_queue.pop(0)
self._send(msg)
# See if the client has any commands for us.
if ready_to_read:
self._handle_incoming()
def _handle_incoming(self):
try:
data = self.request.recv(1024)
except Exception:
raise self.Disconnect()
if not data:
raise self.Disconnect()
self.buffer.feed(data)
for line in self.buffer:
line = line.decode('utf-8')
self._handle_line(line)
def _handle_line(self, line):
response = None
try:
log.debug('from %s: %s' % (self.client_ident(), line))
command, sep, params = line.partition(' ')
handler = getattr(self, 'handle_%s' % command.lower(), None)
if not handler:
_tmpl = 'No handler for command: %s. Full line: %s'
log.info(_tmpl % (command, line))
raise IRCError.from_name(
'unknowncommand', '%s :Unknown command' % command
)
response = handler(params)
except AttributeError as e:
log.error(str(e))
raise
except IRCError as e:
response = ':%s %s %s' % (self.server.servername, e.code, e.value)
log.warning(response)
except Exception as e:
response = ':%s ERROR %r' % (self.server.servername, e)
log.error(response)
raise
if response:
self._send(response)
def _send(self, msg):
log.debug('to %s: %s', self.client_ident(), msg)
try:
self.request.send(msg.encode('utf-8') + b'\r\n')
except socket.error as e:
if e.errno == errno.EPIPE:
raise self.Disconnect()
else:
raise
def handle_nick(self, params):
"""
Handle the initial setting of the user's nickname and nick changes.
"""
nick = params
# Valid nickname?
if re.search(r'[^a-zA-Z0-9\-\[\]\'`^{}_]', nick):
raise IRCError.from_name('erroneusnickname', ':%s' % nick)
if self.server.clients.get(nick, None) == self:
# Already registered to user
return
if nick in self.server.clients:
# Someone else is using the nick
raise IRCError.from_name('nicknameinuse', 'NICK :%s' % (nick))
if not self.nick:
# New connection and nick is available; register and send welcome
# and MOTD.
self.nick = nick
self.server.clients[nick] = self
msg = f"Welcome to {__name__} v{irc._get_version()}."
response = ':%s %s %s :%s' % (
self.server.servername,
events.codes['welcome'],
self.nick,
msg,
)
self.send_queue.append(response)
response = ':%s 376 %s :End of MOTD command.' % (
self.server.servername,
self.nick,
)
self.send_queue.append(response)
return
# Nick is available. Change the nick.
message = ':%s NICK :%s' % (self.client_ident(), nick)
self.server.clients.pop(self.nick)
self.nick = nick
self.server.clients[self.nick] = self
# Send a notification of the nick change to all the clients in the
# channels the client is in.
for channel in self.channels.values():
self._send_to_others(message, channel)
# Send a notification of the nick change to the client itself
return message
def handle_user(self, params):
"""
Handle the USER command which identifies the user to the server.
"""
params = params.split(' ', 3)
if len(params) != 4:
raise IRCError.from_name('needmoreparams', 'USER :Not enough parameters')
user, mode, unused, realname = params
self.user = user
self.mode = mode
self.realname = realname
return ''
def handle_ping(self, params):
"""
Handle client PING requests to keep the connection alive.
"""
response = ':{self.server.servername} PONG :{self.server.servername}'
return response.format(**locals())
def handle_join(self, params):
"""
Handle the JOINing of a user to a channel. Valid channel names start
with a # and consist of a-z, A-Z, 0-9 and/or '_'.
"""
channel_names = params.split(' ', 1)[0] # Ignore keys
for channel_name in channel_names.split(','):
r_channel_name = channel_name.strip()
# Valid channel name?
if not re.match('^#([a-zA-Z0-9_])+$', r_channel_name):
raise IRCError.from_name(
'nosuchchannel', '%s :No such channel' % r_channel_name
)
# Add user to the channel (create new channel if not exists)
channel = self.server.channels.setdefault(
r_channel_name, IRCChannel(r_channel_name)
)
channel.clients.add(self)
# Add channel to user's channel list
self.channels[channel.name] = channel
# Send the topic
response_join = ':%s TOPIC %s :%s' % (
channel.topic_by,
channel.name,
channel.topic,
)
self.send_queue.append(response_join)
# Send join message to everybody in the channel, including yourself
# and send user list of the channel back to the user.
response_join = ':%s JOIN :%s' % (self.client_ident(), r_channel_name)
for client in channel.clients:
client.send_queue.append(response_join)
nicks = [client.nick for client in channel.clients]
_vals = (self.server.servername, self.nick, channel.name, ' '.join(nicks))
response_userlist = ':%s 353 %s = %s :%s' % _vals
self.send_queue.append(response_userlist)
_vals = self.server.servername, self.nick, channel.name
response = ':%s 366 %s %s :End of /NAMES list' % _vals
self.send_queue.append(response)
def handle_privmsg(self, params):
"""
Handle sending a private message to a user or channel.
"""
self._send_msg('PRIVMSG', params)
def handle_notice(self, params):
"""
Handle sending a notice to a user or channel.
"""
self._send_msg('NOTICE', params)
def _send_msg(self, cmd, params):
"""
A generic message handler (e.g. PRIVMSG and NOTICE)
"""
target, sep, msg = params.partition(' ')
if not msg:
raise IRCError.from_name('needmoreparams', cmd + ' :Not enough parameters')
message = ':%s %s %s %s' % (self.client_ident(), cmd, target, msg)
if target.startswith('#') or target.startswith('$'):
# Message to channel. Check if the channel exists.
channel = self.server.channels.get(target)
if not channel:
raise IRCError.from_name('nosuchnick', cmd + ' :%s' % target)
if channel.name not in self.channels:
# The user isn't in the channel.
raise IRCError.from_name(
'cannotsendtochan', '%s :Cannot send to channel' % channel.name
)
self._send_to_others(message, channel)
else:
# Message to user
client = self.server.clients.get(target, None)
if not client:
raise IRCError.from_name('nosuchnick', cmd + ' :%s' % target)
client.send_queue.append(message)
def _send_to_others(self, message, channel):
"""
Send the message to all clients in the specified channel except for
self.
"""
other_clients = [client for client in channel.clients if not client == self]
for client in other_clients:
client.send_queue.append(message)
def handle_topic(self, params):
"""
Handle a topic command.
"""
channel_name, sep, topic = params.partition(' ')
channel = self.server.channels.get(channel_name)
if not channel:
raise IRCError.from_name('nosuchnick', 'PRIVMSG :%s' % channel_name)
if channel.name not in self.channels:
# The user isn't in the channel.
raise IRCError.from_name(
'cannotsendtochan', '%s :Cannot send to channel' % channel.name
)
if topic:
channel.topic = topic.lstrip(':')
channel.topic_by = self.nick
message = ':%s TOPIC %s :%s' % (
self.client_ident(),
channel_name,
channel.topic,
)
return message
def handle_part(self, params):
"""
Handle a client parting from channel(s).
"""
for pchannel in params.split(','):
if pchannel.strip() in self.server.channels:
# Send message to all clients in all channels user is in, and
# remove the user from the channels.
channel = self.server.channels.get(pchannel.strip())
response = ':%s PART :%s' % (self.client_ident(), pchannel)
if channel:
for client in channel.clients:
client.send_queue.append(response)
channel.clients.remove(self)
self.channels.pop(pchannel)
else:
_vars = self.server.servername, pchannel, pchannel
response = ':%s 403 %s :%s' % _vars
self.send_queue.append(response)
def handle_quit(self, params):
"""
Handle the client breaking off the connection with a QUIT command.
"""
response = ':%s QUIT :%s' % (self.client_ident(), params.lstrip(':'))
# Send quit message to all clients in all channels user is in, and
# remove the user from the channels.
for channel in self.channels.values():
for client in channel.clients:
client.send_queue.append(response)
channel.clients.remove(self)
def handle_dump(self, params):
"""
Dump internal server information for debugging purposes.
"""
print("Clients:", self.server.clients)
for client in self.server.clients.values():
print(" ", client)
for channel in client.channels.values():
print(" ", channel.name)
print("Channels:", self.server.channels)
for channel in self.server.channels.values():
print(" ", channel.name, channel)
for client in channel.clients:
print(" ", client.nick, client)
def handle_ison(self, params):
response = ':%s 303 %s :' % (self.server.servername, self.client_ident().nick)
if len(params) == 0 or params.isspace():
response = ':%s 461 %s ISON :Not enough parameters' % (
self.server.servername,
self.client_ident().nick,
)
return response
nickOnline = []
for nick in params.split(" "):
if nick in self.server.clients:
nickOnline.append(nick)
response += ' '.join(nickOnline)
return response
def client_ident(self):
"""
Return the client identifier as included in many command replies.
"""
return irc.client.NickMask.from_params(
self.nick, self.user, self.server.servername
)
def finish(self):
"""
The client conection is finished. Do some cleanup to ensure that the
client doesn't linger around in any channel or the client list, in case
the client didn't properly close the connection with PART and QUIT.
"""
log.info('Client disconnected: %s', self.client_ident())
response = ':%s QUIT :EOF from client' % self.client_ident()
for channel in self.channels.values():
if self in channel.clients:
# Client is gone without properly QUITing or PARTing this
# channel.
for client in channel.clients:
client.send_queue.append(response)
channel.clients.remove(self)
if self.nick:
self.server.clients.pop(self.nick)
log.info('Connection finished: %s', self.client_ident())
def __repr__(self):
"""
Return a user-readable description of the client
"""
return '<%s %s!%s@%s (%s)>' % (
self.__class__.__name__,
self.nick,
self.user,
self.host[0],
self.realname,
)
class IRCServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
daemon_threads = True
allow_reuse_address = True
channels: typing.Dict[str, IRCChannel] = {}
"Existing channels by channel name"
clients: typing.Dict[str, IRCClient] = {}
"Connected clients by nick name"
def __init__(self, *args, **kwargs):
self.servername = 'localhost'
self.channels = {}
self.clients = {}
super().__init__(*args, **kwargs)
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"-a",
"--address",
dest="listen_address",
default='127.0.0.1',
help="IP on which to listen",
)
parser.add_argument(
"-p",
"--port",
dest="listen_port",
default=6667,
type=int,
help="Port on which to listen",
)
jaraco.logging.add_arguments(parser)
return parser.parse_args()
def main():
options = get_args()
jaraco.logging.setup(options)
log.info("Starting irc.server")
try:
bind_address = options.listen_address, options.listen_port
ircserver = IRCServer(bind_address, IRCClient)
_tmpl = 'Listening on {listen_address}:{listen_port}'
log.info(_tmpl.format(**vars(options)))
ircserver.serve_forever()
except socket.error as e:
log.error(repr(e))
raise SystemExit(-2)
if __name__ == "__main__":
main()
|
jaraco/irc
|
irc/server.py
|
Python
|
mit
| 17,996
|
#!/usr/bin/python
import signal
import subprocess
import os
import sys
class Program():
def __init__(self):
self.pid = ''
def call(self):
argv = sys.argv[1]
process = subprocess.Popen(argv,shell=True)
self.pid = process.pid
process.wait()
sys.exit(process.returncode)
def kill(self):
os.kill(self.pid, signal.SIGTERM)
class Watchdog(Exception):
def __init__(self, time=5):
self.time = time
def __enter__(self):
signal.signal(signal.SIGALRM, self.handler)
signal.alarm(self.time)
def __exit__(self, type, value, traceback):
signal.alarm(0)
def handler(self, signum, frame):
raise self
program = Program()
try:
with Watchdog(420):
program.call()
except Watchdog:
print "process took too long to complete - killed!"
program.kill()
sys.exit(1)
|
broonie89/loadify-1
|
lib/ohdevtools/remote_wrapper.py
|
Python
|
mit
| 930
|
"""
Revision ID: 0109_rem_old_noti_status
Revises: 0108_change_logo_not_nullable
Create Date: 2017-07-10 14:25:15.712055
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0109_rem_old_noti_status'
down_revision = '0108_change_logo_not_nullable'
def upgrade():
op.drop_column('notification_history', 'status')
op.drop_column('notifications', 'status')
def downgrade():
op.add_column(
'notifications',
sa.Column(
'status',
postgresql.ENUM(
'created', 'sending', 'delivered', 'pending', 'failed', 'technical-failure',
'temporary-failure', 'permanent-failure', 'sent', name='notify_status_type'
),
autoincrement=False,
nullable=True
)
)
op.add_column(
'notification_history',
sa.Column(
'status',
postgresql.ENUM(
'created', 'sending', 'delivered', 'pending', 'failed', 'technical-failure',
'temporary-failure', 'permanent-failure', 'sent', name='notify_status_type'
),
autoincrement=False,
nullable=True
)
)
|
alphagov/notifications-api
|
migrations/versions/0109_rem_old_noti_status.py
|
Python
|
mit
| 1,224
|
# Palindrome Checker
from Deque import Deque
def palindrome_checker(alist):
p = Deque()
# deal with a null list
if len(alist) == 0:
return None
# add each element into the queue
for item in alist:
p.addRear(item)
# initialize the flag
flag = True
# if the list has only one element, it's not palindrome
if len(p.items) <= 1:
flag = False
# compare the head and the rear
while len(p.items) > 1 and flag is True:
if p.removeFront() == p.removeRear():
flag = True
else:
flag = False
if flag is True:
return True
else:
return False
print(palindrome_checker('lsdkjfskf'))
print(palindrome_checker('radar'))
print(palindrome_checker('aa'))
print(palindrome_checker('a'))
print(palindrome_checker('aba'))
|
rush2catch/algorithms-leetcode
|
Basic Data Structures/queue/PalindromeChecker.py
|
Python
|
mit
| 839
|
"""
GhProject.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
GitHub project model.
A Github project has two relationships to commits. A many to many and a foreign key.
Unsure the intention of the dual reltionship.
"""
from django.db import models as m
class GhProject(m.Model):
url = m.CharField(max_length=255, blank=True, null=True)
owner = m.ForeignKey('gh_users.GhUser', m.DO_NOTHING, blank=True, null=True)
name = m.CharField(max_length=255)
description = m.CharField(max_length=255, blank=True, null=True)
language = m.CharField(max_length=255, blank=True, null=True)
created_at = m.DateTimeField()
ext_ref_id = m.CharField(max_length=24)
forked_from = m.ForeignKey(
'self', m.DO_NOTHING, db_column='forked_from', blank=True, null=True
)
deleted = m.IntegerField()
# M2M fields added
commits_m2m = m.ManyToManyField(
'gh_commits.GhCommit',
through='gh_projects.GhProjectCommit',
)
maintainers = m.ManyToManyField(
'gh_users.GhUser',
through='gh_users.GhProjectMember',
related_name='maintain_repos'
)
watchers = m.ManyToManyField(
'gh_users.GhUser',
through='gh_users.GhWatcher',
related_name='watched_repos'
)
forks = m.ManyToManyField(
'gh_projects.GhProject',
through='gh_projects.GhFork',
related_name='parents'
)
def __str__(self):
return self.name
def is_owned_by_org(self):
return self.owner.is_org()
class Meta:
db_table = 'projects'
unique_together = (('name', 'owner'),)
verbose_name="GitHub Project"
|
jakeharding/repo-health
|
repo_health/gh_projects/models/GhProject.py
|
Python
|
mit
| 1,778
|
import os
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = os.environ.get('SECRET_KEY')
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/{{ docs_version }}/howto/static-files/
STATIC_ROOT = os.path.normpath(os.path.join(SITE_ROOT, 'assets'))
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
jpadilla/feedleap
|
feedleap/settings/production.py
|
Python
|
mit
| 893
|
import os
from click import UsageError
from click.testing import CliRunner
import numpy as np
import pytest
import rasterio
from rasterio.enums import Compression
from rio_color.scripts.cli import color, atmos, check_jobs
def equal(r1, r2):
with rasterio.open(r1) as src1:
with rasterio.open(r2) as src2:
return np.array_equal(src1.read(), src2.read())
def test_atmos_cli(tmpdir):
output = str(tmpdir.join("atmosj1.tif"))
runner = CliRunner()
result = runner.invoke(
atmos,
["-a", "0.03", "-b", "0.5", "-c", "15", "-j", "1", "tests/rgb8.tif", output],
)
assert result.exit_code == 0
assert os.path.exists(output)
output2 = str(tmpdir.join("atmosj2.tif"))
runner = CliRunner()
result = runner.invoke(
atmos,
["-a", "0.03", "-b", "0.5", "-c", "15", "-j", "2", "tests/rgb8.tif", output2],
)
assert result.exit_code == 0
assert os.path.exists(output2)
assert equal(output, output2)
def test_color_cli(tmpdir):
output = str(tmpdir.join("colorj1.tif"))
runner = CliRunner()
result = runner.invoke(
color,
[
"-d",
"uint8",
"-j",
"1",
"tests/rgb8.tif",
output,
"gamma 3 1.85",
"gamma 1,2 1.95",
"sigmoidal 1,2,3 35 0.13",
"saturation 1.15",
],
)
assert result.exit_code == 0
assert os.path.exists(output)
output2 = str(tmpdir.join("colorj2.tif"))
result = runner.invoke(
color,
[
"-d",
"uint8",
"-j",
"2",
"tests/rgb8.tif",
output2,
"gamma 3 1.85",
"gamma 1,2 1.95",
"sigmoidal 1,2,3 35 0.13",
"saturation 1.15",
],
)
assert result.exit_code == 0
assert os.path.exists(output2)
assert equal(output, output2)
def test_bad_op(tmpdir):
output = str(tmpdir.join("noop.tif"))
runner = CliRunner()
result = runner.invoke(
color, ["-d", "uint8", "-j", "1", "tests/rgb8.tif", output, "foob 115"]
)
assert result.exit_code == 2
assert "foob is not a valid operation" in result.output
assert not os.path.exists(output)
def test_color_jobsn1(tmpdir):
output = str(tmpdir.join("colorj1.tif"))
runner = CliRunner()
result = runner.invoke(
color,
[
"-d",
"uint8",
"-j",
"-1",
"tests/rgb8.tif",
output,
"gamma 1,2,3 1.85 sigmoidal rgb 35 0.13",
],
)
assert result.exit_code == 0
assert os.path.exists(output)
def test_check_jobs():
assert 1 == check_jobs(1)
assert check_jobs(-1) > 0
with pytest.raises(UsageError):
check_jobs(0)
def test_creation_opts(tmpdir):
output = str(tmpdir.join("color_opts.tif"))
runner = CliRunner()
result = runner.invoke(
color,
[
"--co",
"compress=jpeg",
"tests/rgb8.tif",
output,
"gamma 1,2,3 1.85 sigmoidal rgb 35 0.13",
],
)
assert result.exit_code == 0
with rasterio.open(output, "r") as src:
assert src.compression == Compression.jpeg
output = str(tmpdir.join("color_opts.tif"))
runner = CliRunner()
result = runner.invoke(
color, ["--co", "compress=jpeg", "tests/rgb8.tif", output, "gamma 1,2,3 1.85"]
)
assert result.exit_code == 0
with rasterio.open(output, "r") as src:
assert src.compression == Compression.jpeg
output = str(tmpdir.join("atmos_opts.tif"))
runner = CliRunner()
result = runner.invoke(
atmos,
[
"--co",
"compress=jpeg",
"-a",
"0.03",
"-b",
"0.5",
"-c",
"15",
"-j",
"1",
"tests/rgb8.tif",
output,
],
)
assert result.exit_code == 0
with rasterio.open(output, "r") as src:
assert src.compression == Compression.jpeg
def test_color_cli_rgba(tmpdir):
output = str(tmpdir.join("colorj1.tif"))
runner = CliRunner()
result = runner.invoke(
color,
[
"-d",
"uint8",
"-j",
"1",
"tests/rgba8.tif",
output,
"gamma 3 1.85",
"gamma 1,2 1.95",
"sigmoidal 1,2,3 35 0.13",
"saturation 1.15",
],
)
assert result.exit_code == 0
with rasterio.open("tests/rgba8.tif") as src:
with rasterio.open(output) as out:
assert out.profile["count"] == 4
# Alpha band is unaltered
assert np.array_equal(src.read(4), out.read(4))
def test_color_cli_16bit_photointerp(tmpdir):
output = str(tmpdir.join("color16color.tif"))
runner = CliRunner()
result = runner.invoke(
color,
[
"-d",
"uint16",
"-j",
"1",
"tests/rgb16.tif",
output,
"gamma 3 1.85",
"gamma 1,2 1.95",
],
)
assert result.exit_code == 0
with rasterio.open("tests/rgb16.tif") as src:
with rasterio.open(output) as out:
assert out.colorinterp == src.colorinterp
def test_color_empty_operations(tmpdir):
output = str(tmpdir.join("color.tif"))
runner = CliRunner()
result = runner.invoke(color, ["tests/rgb8.tif", output])
assert result.exit_code == 2
assert not os.path.exists(output)
result = runner.invoke(color, ["tests/rgb8.tif", output, ", , ,"])
assert result.exit_code == 2
def test_as_color(tmpdir):
runner = CliRunner()
result = runner.invoke(atmos, ["-a", "0.03", "--as-color", "foo.tif", "bar.tif"])
assert result.exit_code == 0
assert not os.path.exists("bar.tif")
assert (
result.output.strip()
== "rio color foo.tif bar.tif gamma g 0.99, gamma b 0.97, sigmoidal rgb 10.0 0.15"
)
|
mapbox/rio-color
|
tests/test_cli.py
|
Python
|
mit
| 6,115
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module implements Trello to JIRA importer.
"""
import os
import argparse
import json
# from io import StringIO
from io import BytesIO
from jira import JIRA
from urllib import request
import res.t2jstrings as strings
import log.mylogging as mylogging
import exc.myexceptions as exceptions
now_testing = False
now_testing_field_file = False
class Trello2Jira(object):
_jira = None # jira connection object
_logger = None # logger
_args = {} # command line arguments
_config = None # user configuration
_username = '' # jira user name
_password = '' # jira user password
_url = '' # jira project url
_project = '' # jira project key
_file_list = [] # json files exported from trello
_field_list = [] # fields to be used to create jira issues
_failed_field_list = [] # failed fields while creating jira issues
def __init__(self, config):
if not config:
raise exceptions.ConfigException(strings.exc_config_read_error)
else:
self._config = config
self._logger = mylogging.get_logger(self.__class__.__name__)
def _parse_args(self):
"""
parse arguments
"""
parser = argparse.ArgumentParser(description=strings.arg_desc)
parser.add_argument(strings.arg_key, type=str, nargs='+', help=strings.arg_help)
self._args = vars(parser.parse_args())
def _parse_file_list(self):
"""
parse file list from command line arguments
"""
self._file_list = self._args[strings.arg_key]
def _read_auth(self):
"""
get user id and pwd
"""
self._username = self._config.get(strings.section_jira, strings.key_jira_username)
self._password = self._config.get(strings.section_jira, strings.key_jira_password)
def _read_url(self):
"""
get jira project url
"""
self._url = self._config.get(strings.section_jira, strings.key_jira_url)
def _read_project(self):
"""
get jira project key
"""
self._project = self._config.get(strings.section_jira, strings.key_jira_project)
def _remove_field(self, in_field_list, in_target_field):
"""
remove the target field from the given list
:param in_field_list:
:param in_target_field:
:return: none
"""
for index, field in enumerate(in_field_list):
if field[strings.order] == in_target_field[strings.order]:
in_field_list.pop(index)
def _add_field_if_not_exist(self, in_field_list, in_target_field):
"""
add the target field into the given list if the field does not exist
:param in_field_list:
:param in_target_field:
:return: none
"""
for index, field in enumerate(in_field_list):
if field[strings.order] == in_target_field[strings.order]:
return
in_field_list.append(in_target_field.copy())
def _create_issues_test(self, in_field_list):
in_field_len = len(in_field_list)
if not self._jira:
self._jira = JIRA(server=self._url, basic_auth=(self._username, self._password))
self._logger.info(strings.info_progress + ': 0 / ' + str(in_field_len))
for index, field in enumerate(in_field_list):
try:
self._logger.info('### _create_issues_test ###')
# # create issue
# issue = self._jira.create_issue(fields=field[strings.jira_field_basic])
#
# if issue:
# # add label
# issue.update(labels=field[strings.jira_field_labels])
#
# # add attachment
# for src_url in field[strings.jira_field_attachs]:
# attachment = BytesIO()
# attachment.write(request.urlopen(src_url).read())
# self._jira.add_attachment(issue=issue, attachment=attachment,
# filename=src_url[src_url.rindex('/'):])
#
# # add comment
# for comment in field[strings.jira_field_comments]:
# self._jira.add_comment(issue, comment)
# else:
# raise exceptions.JiraIssueCreationException(strings.exc_jira_issue_creation_error)
#
# self._remove_field(self._failed_field_list, field)
except Exception as err:
self._add_field_if_not_exist(self._failed_field_list, field)
self._logger.error(strings.info_create_issue_error)
self._logger.error(err)
if not os.path.exists(strings.dir_error):
os.makedirs(strings.dir_error)
fp = open('%s/%d.json' % (strings.dir_error, field[strings.order]), 'w')
json.dump(field, fp, indent=2)
self._logger.info(strings.info_progress + ': ' + str(index + 1) + ' / ' + str(in_field_len))
self._logger.info('=' * 30)
self._logger.info(strings.info_jobs_are_done)
self._logger.info(' * ' + strings.info_jobs_count + ': ' + str(len(self._field_list)))
self._logger.info(' * ' + strings.info_success_count + ': '
+ str(len(self._field_list) - len(self._failed_field_list)))
self._logger.info(' * ' + strings.info_failure_count + ': '
+ str(len(self._failed_field_list)) + ' (' + strings.info_failure_count_help + ')')
self._logger.info('=' * 30)
def _create_issues(self, in_field_list):
"""
create jira issues with extracted fields
issue = jira.create_issue(fields=issue_dict) or issue = jira.issue('JIRA-9')
issue.update(labels=[{'add': 'AAA'}, {'add': 'BBB'}])
jira.add_attachment(issue=issue, attachment=attachment, filename='content.txt')
"""
in_field_len = len(in_field_list)
if not self._jira:
self._jira = JIRA(server=self._url, basic_auth=(self._username, self._password))
self._logger.info(strings.info_progress + ': 0 / ' + str(in_field_len))
for index, field in enumerate(in_field_list):
try:
# create issue
issue = self._jira.create_issue(fields=field[strings.jira_field_basic])
if issue:
# add label
issue.update(labels=field[strings.jira_field_labels])
# add attachment
for src_url in field[strings.jira_field_attachs]:
attachment = BytesIO()
attachment.write(request.urlopen(src_url).read())
self._jira.add_attachment(issue=issue, attachment=attachment, filename=src_url[src_url.rindex('/'):])
# add comment
for comment in field[strings.jira_field_comments]:
self._jira.add_comment(issue, comment)
else:
raise exceptions.JiraIssueCreationException(strings.exc_jira_issue_creation_error)
self._remove_field(self._failed_field_list, field)
except Exception as err:
self._add_field_if_not_exist(self._failed_field_list, field)
self._logger.error(strings.info_create_issue_error)
self._logger.error(err)
if not os.path.exists(strings.dir_error):
os.makedirs(strings.dir_error)
fp = open('%s/%d.json' % (strings.dir_error, field[strings.order]), 'w')
json.dump(field, fp, indent=2)
self._logger.info(strings.info_progress + ': ' + str(index + 1) + ' / ' + str(in_field_len))
self._logger.info('=' * 30)
self._logger.info(strings.info_jobs_are_done)
self._logger.info(' * ' + strings.info_jobs_count + ': ' + str(len(self._field_list)))
self._logger.info(' * ' + strings.info_success_count + ': '
+ str(len(self._field_list) - len(self._failed_field_list)))
self._logger.info(' * ' + strings.info_failure_count + ': '
+ str(len(self._failed_field_list)) + ' (' + strings.info_failure_count_help + ')')
self._logger.info('=' * 30)
def _extract_fields(self):
"""
extract issue fields from exported trello json files
<input - trello.json>
{
"name": "name of the board",
"url": "http://...",
"lists": [...],
"checklists": [...],
"cards": [
{
"name": "name of one of cards",
"desc": "description of one of cards",
"idList": "1234567890",
"idChecklists": ["abcdefgh", "...", ...],
"url": "http://...",
"attachments": [...]
},
{...}, ...
],
"actions": [{...}, ...]
}
<output - filed lists to be sent to JIRA>
[
{
'order': 0,
'basic': {
'project': {'id': 'ABC'},
'summary': 'New issue from jira-python',
'description': 'Look into this one',
'issuetype': {'name': 'Bug'},
},
'labels': [{'add':'AAA'}, {'add':'BBB'}],
'attachments': ['http://aaa.jpg', 'http://bbb.jpg'],
'checklists': ['...', ...],
'comments': ['...', ...]
},
{...}, ...
]
"""
for file in self._file_list:
fp = open(file, 'r', encoding='utf-8')
board = json.load(fp)
fp.close()
boardlists = dict()
for lst in board[strings.trel_field_boardlists]:
boardlists[lst[strings.trel_field_listid]] = lst[strings.trel_field_listname]
for order, card in enumerate(board[strings.trel_field_boardcards]):
if card[strings.trel_field_cardclosed]:
continue
field = dict()
field[strings.order] = order
# organize basic field
field[strings.jira_field_basic] = dict()
field[strings.jira_field_basic][strings.jira_field_project] = \
{strings.jira_field_project_key: self._project}
field[strings.jira_field_basic][strings.jira_field_summary] = \
'[' + board[strings.trel_field_boardname] + ']' \
+ '[' + boardlists[card[strings.trel_field_cardidlist]] + ']' \
+ ' ' + card[strings.trel_field_cardname]
field[strings.jira_field_basic][strings.jira_field_description] = \
card[strings.trel_field_carddesc] \
+ '\n\n' + '*Board Link* - ' + board[strings.trel_field_boardurl] \
+ '\n' + '*Card Link* - ' + card[strings.trel_field_cardurl]
field[strings.jira_field_basic][strings.jira_field_issuetype] = \
{strings.jira_field_issuetype_name: strings.jira_field_issuetype_task}
# organize labels field
field[strings.jira_field_labels] = []
field[strings.jira_field_labels].append(
{'add': board[strings.trel_field_boardname].replace(' ', '_')})
field[strings.jira_field_labels].append(
{'add': board[strings.trel_field_boardurl].replace(' ', '_')})
field[strings.jira_field_labels].append(
{'add': card[strings.trel_field_cardurl].replace(' ', '_')})
field[strings.jira_field_labels].append(
{'add': boardlists[card[strings.trel_field_cardidlist]].replace(' ', '_')})
# organize attachments filed
field[strings.jira_field_attachs] = \
[attach[strings.trel_field_attachurl] for attach in card[strings.trel_field_cardattachs]]
# organize checklists filed
field[strings.jira_field_checklists] = []
for chlist_id in card[strings.trel_field_cardidchecklists]:
for chlist_trel in board[strings.trel_field_boardchecklists]:
if chlist_id == chlist_trel[strings.trel_field_boardchecklists_id]:
# make checklist title
chlist_jira = '\n' + 'h4. ' + chlist_trel[strings.trel_field_boardchecklists_name]
# make checklist items
for chlist_trel_item in chlist_trel[strings.trel_field_boardchecklists_checkitems]:
# if complete
if strings.trel_field_boardchecklists_checkitems_statecomplete \
== chlist_trel_item[strings.trel_field_boardchecklists_checkitems_state]:
chlist_jira += '\n' + '(/)'
# or incomplete
else:
chlist_jira += '\n' + '(?)'
# item name
chlist_jira += \
' ' + chlist_trel_item[strings.trel_field_boardchecklists_checkitems_name]
# add checklist
field[strings.jira_field_checklists].append(chlist_jira)
# organize comments
field[strings.jira_field_comments] = []
for action in reversed(board[strings.trel_field_boardactions]):
if action[strings.trel_field_boardactions_type] == strings.trel_field_boardactions_typecommentcard:
action_data = action[strings.trel_field_boardactions_data]
action_data_card = action_data[strings.trel_field_boardactions_data_card]
action_data_card_id = action_data_card[strings.trel_field_boardactions_data_card_id]
action_membercreator = action[strings.trel_field_boardactions_membercreator]
if action_data_card_id == card[strings.trel_field_cardid]:
# make comment
cmnt_author = action_membercreator[strings.trel_field_boardactions_membercreator_fullname]
cmnt_content = action_data[strings.trel_field_boardactions_data_text]
cmnt_item = 'h4. ' + cmnt_author + '\'s Comment:'
cmnt_item += '\n' + cmnt_content
# add new comment
field[strings.jira_field_comments].append(cmnt_item)
# manipulate issue description with checklists
field[strings.jira_field_basic][strings.jira_field_description] += \
'\n' + '\n'.join(field[strings.jira_field_checklists])
# remove new line character from issue summary
field[strings.jira_field_basic][strings.jira_field_summary].replace('\n', ' ')
# finally add new filed
self._field_list.append(field)
# save it to file
self._logger.info(strings.info_extract_done)
if not os.path.exists(strings.dir_extract):
os.makedirs(strings.dir_extract)
fp = open('%s/%s' % (strings.dir_extract, strings.file_extract), 'w')
json.dump(self._field_list, fp, indent=2)
def _extract_fields_from_file(self):
"""
extract fields from files which contains already converted fields
:return: none
<input>
{
'order': 0,
'basic': {
'project': {'id': 'ABC'},
'summary': 'New issue from jira-python',
'description': 'Look into this one',
'issuetype': {'name': 'Bug'},
},
'labels': [{'add':'AAA'}, {'add':'BBB'}],
'attachments': ['http://aaa.jpg', 'http://bbb.jpg'],
'checklists': ['...', ...],
'comments': ['...', ...]
}
"""
for file in self._file_list:
fp = open(file, 'r', encoding='utf-8')
field = json.load(fp)
fp.close()
# remove new line character from issue summary
field[strings.jira_field_basic][strings.jira_field_summary] = \
field[strings.jira_field_basic][strings.jira_field_summary].replace('\n', ' ')
self._field_list.append(field)
# save it to file
self._logger.info(strings.info_extract_done)
if not os.path.exists(strings.dir_extract):
os.makedirs(strings.dir_extract)
fp = open('%s/%s' % (strings.dir_extract, strings.file_extract), 'w')
json.dump(self._field_list, fp, indent=2)
def run(self):
"""
run trello2jira
"""
self._parse_args()
self._parse_file_list()
self._read_auth()
self._read_url()
self._read_project()
self._logger.debug(strings.dbg_src_files_info + ' '.join(self._file_list))
self._logger.debug(strings.dbg_user_info + self._username)
if now_testing_field_file:
self._extract_fields_from_file()
else:
self._extract_fields()
# create issues
if now_testing:
self._create_issues_test(self._field_list)
else:
self._create_issues(self._field_list)
# retry if any failed job exists
if len(self._failed_field_list) > 0:
self._logger.info(strings.info_retry_for_failure)
if now_testing:
self._create_issues_test(self._failed_field_list)
else:
self._create_issues(self._failed_field_list)
|
iamleeky/pytrello2jira
|
t2j/trello2jira.py
|
Python
|
mit
| 18,256
|
from rpython.translator.backendopt import removenoops
from rpython.translator.backendopt import inline
from rpython.translator.backendopt.malloc import remove_mallocs
from rpython.translator.backendopt.constfold import constant_fold_graph
from rpython.translator.backendopt.constfold import replace_we_are_jitted
from rpython.translator.backendopt.stat import print_statistics
from rpython.translator.backendopt.merge_if_blocks import merge_if_blocks
from rpython.translator import simplify
from rpython.translator.backendopt import mallocprediction
from rpython.translator.backendopt.removeassert import remove_asserts
from rpython.translator.backendopt.support import log
from rpython.translator.backendopt.storesink import storesink_graph
from rpython.translator.backendopt import gilanalysis
from rpython.flowspace.model import checkgraph
INLINE_THRESHOLD_FOR_TEST = 33
def get_function(dottedname):
parts = dottedname.split('.')
module = '.'.join(parts[:-1])
name = parts[-1]
try:
mod = __import__(module, {}, {}, ['__doc__'])
except ImportError as e:
raise Exception("Import error loading %s: %s" % (dottedname, e))
try:
func = getattr(mod, name)
except AttributeError:
raise Exception("Function %s not found in module" % dottedname)
return func
def backend_optimizations(translator, graphs=None, secondary=False,
inline_graph_from_anywhere=False, **kwds):
# sensible keywords are
# inline_threshold, mallocs
# merge_if_blocks, constfold, heap2stack
# clever_malloc_removal, remove_asserts
# replace_we_are_jitted
config = translator.config.translation.backendopt.copy(as_default=True)
config.set(**kwds)
if graphs is None:
graphs = translator.graphs
for graph in graphs:
assert not hasattr(graph, '_seen_by_the_backend')
if config.print_statistics:
print "before optimizations:"
print_statistics(translator.graphs[0], translator, "per-graph.txt")
if config.replace_we_are_jitted:
for graph in graphs:
replace_we_are_jitted(graph)
if config.remove_asserts:
constfold(config, graphs)
remove_asserts(translator, graphs)
if config.really_remove_asserts:
for graph in graphs:
removenoops.remove_debug_assert(graph)
# the dead operations will be killed by the remove_obvious_noops below
# remove obvious no-ops
def remove_obvious_noops():
for graph in graphs:
removenoops.remove_same_as(graph)
simplify.eliminate_empty_blocks(graph)
simplify.transform_dead_op_vars(graph, translator)
removenoops.remove_duplicate_casts(graph, translator)
if config.print_statistics:
print "after no-op removal:"
print_statistics(translator.graphs[0], translator)
remove_obvious_noops()
if config.inline or config.mallocs:
heuristic = get_function(config.inline_heuristic)
if config.inline:
threshold = config.inline_threshold
else:
threshold = 0
inline_malloc_removal_phase(config, translator, graphs,
threshold,
inline_heuristic=heuristic,
inline_graph_from_anywhere=inline_graph_from_anywhere)
constfold(config, graphs)
if config.clever_malloc_removal:
threshold = config.clever_malloc_removal_threshold
heuristic = get_function(config.clever_malloc_removal_heuristic)
log.inlineandremove("phase with threshold factor: %s" % threshold)
log.inlineandremove("heuristic: %s.%s" % (heuristic.__module__,
heuristic.__name__))
count = mallocprediction.clever_inlining_and_malloc_removal(
translator, graphs,
threshold = threshold,
heuristic=heuristic)
log.inlineandremove("removed %d simple mallocs in total" % count)
constfold(config, graphs)
if config.print_statistics:
print "after clever inlining and malloc removal"
print_statistics(translator.graphs[0], translator)
if config.storesink:
for graph in graphs:
storesink_graph(graph)
if config.profile_based_inline and not secondary:
threshold = config.profile_based_inline_threshold
heuristic = get_function(config.profile_based_inline_heuristic)
inline.instrument_inline_candidates(graphs, threshold)
counters = translator.driver_instrument_result(
config.profile_based_inline)
n = len(counters)
def call_count_pred(label):
if label >= n:
return False
return counters[label] > 250 # xxx introduce an option for this
inline_malloc_removal_phase(config, translator, graphs,
threshold,
inline_heuristic=heuristic,
call_count_pred=call_count_pred)
constfold(config, graphs)
if config.merge_if_blocks:
log.mergeifblocks("starting to merge if blocks")
for graph in graphs:
merge_if_blocks(graph, translator.config.translation.verbose)
if config.print_statistics:
print "after if-to-switch:"
print_statistics(translator.graphs[0], translator)
remove_obvious_noops()
for graph in graphs:
checkgraph(graph)
gilanalysis.analyze(graphs, translator)
def constfold(config, graphs):
if config.constfold:
for graph in graphs:
constant_fold_graph(graph)
def inline_malloc_removal_phase(config, translator, graphs, inline_threshold,
inline_heuristic,
call_count_pred=None,
inline_graph_from_anywhere=False):
# inline functions in each other
if inline_threshold:
log.inlining("phase with threshold factor: %s" % inline_threshold)
log.inlining("heuristic: %s.%s" % (inline_heuristic.__module__,
inline_heuristic.__name__))
inline.auto_inline_graphs(translator, graphs, inline_threshold,
heuristic=inline_heuristic,
call_count_pred=call_count_pred,
inline_graph_from_anywhere=inline_graph_from_anywhere)
if config.print_statistics:
print "after inlining:"
print_statistics(translator.graphs[0], translator)
# vaporize mallocs
if config.mallocs:
log.malloc("starting malloc removal")
remove_mallocs(translator, graphs)
if config.print_statistics:
print "after malloc removal:"
print_statistics(translator.graphs[0], translator)
|
oblique-labs/pyVM
|
rpython/translator/backendopt/all.py
|
Python
|
mit
| 6,949
|
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 17 21:31:48 2015
@author: andrew_woizesko
"""
###########################################################################
## Imports
###########################################################################
import dill
from conf import settings
###########################################################################
## Event Classifier
###########################################################################
class EventClassifier(object):
"""
Class that will be used to classify user input
events. The event classifier will use the model
from build.py
"""
def __init__(self, model=None):
"""
Initialize the event classifier with the model
created in build.py
"""
## Get the default model from the settings if it isn't passed in
model = model or settings.model
## Load the model from the pickle
with open(model, 'rb') as pkl:
self._classifier = dill.load(pkl)
def classify(self, instance):
"""
This is the function that will take the user
input (instance) and return the probability that the
user will encounter a crime on their trip
"""
## Use the classifier to predict the probabilities of each crime
most_likely = self._classifier.predict(instance)
return most_likely
def get_model(self):
"""
returns the model to the user to access all object functionality
"""
return self._classifier
if __name__ == "__main__":
import numpy as np
classifier = EventClassifier()
#sample data. Most of this will be generated by a form.
tests = np.asfarray([['-77.00589537', '38.90611936', '0.44015444', '0.505928854', '0.055153707', '0.944846293', '0.052631579', '0.295465729', '4.238', '7.7461', '8.412', '0', '1', '0'],
['-77.03382744', '38.93071433', '0.499459459', '0.59562212', '0.178443114', '0.821556886', '0.048104956', '0.351140991', '4.028', '4.4688', '12.755', '0', '1', '0'],
['-77.06697201', '38.90685672', '0.12755102', '0.452574526', '0.122916667', '0.877083333', '0', '1.583192993', '10.00001', '7.0388', '-999', '0', '1', '0'],
['-77.0646675', '38.94619085', '0.638018937', '0.588370314', '0.11689008', '0.88310992', '0', '0.323109894', '3.451', '7.6532', '6.505', '0', '1', '0']])
answers = ['THEFT/OTHER',
'ASSAULT W/DANGEROUS WEAPON',
'THEFT F/AUTO',
'THEFT/OTHER']
for i,test in enumerate(tests):
print "Classifier predicts {0} and the actual answer is {1}".format(classifier.classify(test)[0], answers[i])
|
georgetown-analytics/dc-crimebusters
|
crimebusters/classify.py
|
Python
|
mit
| 2,787
|
import corner as triangle
import numpy as np
from matplotlib import rcParams
run_name='model1_nax20_DE'
chain=np.load(run_name+'.npy')
nwalkers, nsteps,ndim = np.shape(chain)
burnin = nsteps/4
# Make sample chain removing burnin
combinedUSE=chain[:,burnin:,:].reshape((-1,ndim))
# Priors, for plotting limits and binning
fmin,fmax=0.,2.
betamin,betamax=0.,1.
b0min,b0max=0.,1.5
#################################
# Plotting fonts
###############################
F1 = 20 # Axes font size
F2 = 20 # Legend font size
line = 1.5 # Line width
# Setting the font structure
rc = rcParams # Font structure is called rc now
rc['text.usetex'] = True # Tex fonts
rc['font.family'] = 'serif'
rc['font.serif'].insert(0,'cm') # Default font is computer modern for latex
rc['font.size'] = F1
rc['xtick.labelsize'] = 'small'
rc['ytick.labelsize'] = 'small'
rc['legend.fontsize'] = F2
##############################
# Binning
#############################
bins=20
# Linear binning for linear prior
fbins=np.linspace(fmin,fmax,num=bins)
b0bins=np.linspace(b0min,b0max,num=bins)
betabins=np.linspace(betamin,betamax,num=bins)
#############################################
# Triangle plot: show 1 and 2 sigma levels following triangle documentation
###########################################
combinedCOL='#FF495C'
fig2 = triangle.corner(combinedUSE, labels=[r'$\bar{f}$', r'$\beta_{\mathcal{M}}$',r'$\sigma_{\mathcal{M}}$'],
color=combinedCOL,smooth1d=2,smooth=2.,plot_datapoints=False,
levels=(1-np.exp(-0.5),1-np.exp(-2.)),
density=True,range=[[fmin,fmax],[betamin,betamax],[b0min,b0max]],bins=[fbins,betabins,b0bins])
fig2.savefig('Plots/'+run_name+"_triangle.pdf")
fig2.savefig('Plots/'+run_name+"_triangle.png")
|
DoddyPhysics/AxionNet
|
Chains/model1_DE_triangle.py
|
Python
|
mit
| 1,718
|
# -*- coding: utf-8 -*-
"""
General utilities
"""
import http.client
import json
import socket
import time
from functools import wraps
from itertools import chain
from flask import flash, redirect, request, url_for
from flask_login import current_user
def timetag_today():
"""Return the timetag for today"""
return int(time.time() // 86400)
def get_bustimes(stopname, count=10):
"""Parses the VVO-Online API return string.
API returns in format [["line", "to", "minutes"],[__],[__]], where "__" are
up to nine more Elements.
:param stopname: Requested stop.
:param count: Limit the entries for the stop.
"""
conn = http.client.HTTPConnection('widgets.vvo-online.de', timeout=1)
stopname = stopname.replace(' ', '%20')
try:
conn.request(
'GET',
'/abfahrtsmonitor/Abfahrten.do?ort=Dresden&hst={}'.format(stopname)
)
response = conn.getresponse()
except socket.error:
return None
response_data = json.loads(response.read().decode())
return ({
'line': i[0],
'dest': i[1],
'minutes_left': int(i[2]) if i[2] else 0,
} for i in response_data)
# TODO: check whether this is the correct format
def password_changeable(user):
"""A decorator used to disable functions (routes) if a certain feature
is not provided by the User class.
given_features has to be a callable to ensure runtime distinction
between datasources.
:param needed_feature: The feature needed
:param given_features: A callable returning the set of supported features
:return:
"""
def feature_decorator(func):
@wraps(func)
def decorated_view(*args, **kwargs):
if user.is_authenticated and user.can_change_password:
return func(*args, **kwargs)
else:
def not_supported():
flash("Diese Funktion ist nicht verfügbar.", 'error')
return redirect(redirect_url())
return not_supported()
return decorated_view
return feature_decorator
def get_user_name(user=current_user):
if user.is_authenticated:
return user.uid
if user.is_anonymous:
return 'anonymous'
return ''
def url_self(**values):
"""Generate a URL to the request's current endpoint with the same view
arguments.
Additional arguments can be specified to override or extend the current view
arguments.
:param values: Additional variable arguments for the endpoint
:return: A URL to the current endpoint
"""
if request.endpoint is None:
endpoint = 'generic.index'
else:
endpoint = request.endpoint
# if no endpoint matches the given URL, `request.view_args` is
# ``None``, not ``{}``
kw = request.view_args.copy() if request.view_args is not None else {}
kw.update(values)
return url_for(endpoint, **kw)
def redirect_url(default='generic.index'):
return request.args.get('next') or request.referrer or url_for(default)
def argstr(*args, **kwargs):
return ", ".join(chain(
("{}".format(arg) for arg in args),
("{}={!r}".format(key, val) for key, val in kwargs.items()),
))
def replace_empty_handler_callables(config, func):
"""Register func as specific handler's callable in a dict logging config.
This method looks at the elements of the 'handlers' section of the
`config`.
If an element has an unassigned handler callable, which is a dict line
`'()': None`, `None` is replaced by func.
This function is kind of a hack, but necessary, because else the
choice of the handler callable is limited to some static,
predefined method.
The specific example that lead to this: Because the callable to
create a SentryHandler can only be defined *after* the import of
the default config dict, but *before* the knowledge whether a
`SENTRY_DSN` is given, it has to be dynamically created.
:param dict config: A dict as used for logging.dictConfig()
:return: The new, modified dict
"""
if 'handlers' not in config:
return config
ret = config.copy()
ret['handlers'] = {
h_name: {param: (func
if val is None and param == '()'
else val)
for param, val in h_conf.items()}
for h_name, h_conf in ret['handlers'].items()
}
return ret
def dict_diff(d1, d2):
"""Return a list of keys that have changed."""
for key in set(d1.keys()) | set(d2.keys()):
if key not in d1 or key not in d2 or d1[key] != d2[key]:
yield key
|
lukasjuhrich/sipa
|
sipa/utils/__init__.py
|
Python
|
mit
| 4,684
|
"""
Django settings for bibbutler project.
Generated by 'django-admin startproject' using Django 1.9.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'zhkz^0pkrwu^pr$=hwm6)d-t1lr-u8vifo2@uiq-rwtb&l(zy#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'bibbutler_web.apps.BibbutlerWebConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'polymorphic',
'widget_tweaks',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# 'django.contrib.sites', (http://stackoverflow.com/questions/29635765/django-1-9-deprecation-warnings-app-label
ROOT_URLCONF = 'bibbutler.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bibbutler.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'de-de'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
dolonnen/bibbutler
|
bibbutler/settings.py
|
Python
|
mit
| 3,374
|
#recommender.py includes a class 'Recommender' which provides
#basic functions of a certain recommender
from __future__ import division
import os
import pprint
import similarity
import cPickle as pickle
import tool
class Recommender:
def __init__(self, outputFile, similarityMeasure, pathStr, trainingSet, predictingSet):
self.outputFile = os.getcwd() + '//results/' + outputFile
self.pathStr = pathStr
self.trainingSet = trainingSet
self.predictingSet = predictingSet
self.prefs = {}
self.predictPrefs = []
self.movieTag = {}
self.similarityMeasure = similarityMeasure
def loadTrainingSet(self):
prefs = {}
#prefsOnTitle={}
try:
with open(self.pathStr + self.trainingSet) as train:
for line in train:
(userId, movieId, rating, time) = line.split('\t')
prefs.setdefault(userId, {})
prefs[userId][movieId] = float(rating)
except IOError as err:
print('File error: ' + str(err))
self.prefs = prefs
def loadPredictingSet(self):
prefs = []
try:
with open(self.pathStr + self.predictingSet) as predict:
for line in predict:
(userId, movieId, rating, time) = line.split('\t')
movieId = movieId.replace('\r\r\n', '')
prefs.append((userId, movieId))
except IOError as err:
print('File error: ' + str(err))
self.predictPrefs = prefs
def transformPrefs(self, prefs):
result = {}
for person in prefs:
for item in prefs[person]:
result.setdefault(item, {})
result[item][person] = prefs[person][item]
return result
def topMatches(self, prefs, item, similarityMeasure, n=100):
if similarityMeasure == similarity.sim_cosine_improved_tag:
scores = [(similarityMeasure(prefs, item, other, self.movieTag), other) for other in prefs if other != item]
else:
scores = [(similarityMeasure(prefs, item, other), other) for other in prefs if other != item]
scores.sort()
scores.reverse()
return scores[0:n]
def getRecommendedItems(self, user):
return None
def predictRating(self, user, movie):
return None
class ItemBasedRecommender(Recommender):
def __init__(self, outputFile, similarityMeasure):
Recommender.__init__(self, outputFile, similarityMeasure=similarity.sim_cosine_improved,
pathStr=os.getcwd() + '//ml-100k/', trainingSet='u1.base',
predictingSet='u1.test')
self.itemMatch = None
def calculateSimilarItems(self, n, resultFile):
# Create a dictionary of items showing which other items they
# are most similar to.
result = {}
c = 0
# self.loadMovieTag()
# for i in prefsOnItem:
# if i not in self.movieTag:
# self.movieTag[i] = []
prefsOnItem = self.transformPrefs(self.prefs)
for i in prefsOnItem.keys():
result.setdefault(i, [])
for item in prefsOnItem:
# Status updates for large datasets
c += 1
if c % 5 == 0: print
"%d / %d" % (c, len(prefsOnItem))
# Find the most similar items to this one
scores = self.topMatches(prefsOnItem, item, similarityMeasure=self.similarityMeasure, n=n)
result[item] = scores
tool.dumpPickle(result, resultFile)
#return result
def loadItemMatch(self, itemFile):
self.itemMatch = tool.loadPickle(itemFile)
def predictRating(self, user, movie):
totals = 0.0
simSums = 0.0
sim = 0.0
predict = 0
itemList = self.itemMatch[movie]
for other in itemList:
if other[1] == movie:
continue
sim = other[0]
if sim <= 0:
continue
if movie not in self.prefs[user] or self.prefs[user][movie] == 0:
if other[1] in self.prefs[user]:
#~ print 'test'
totals += self.prefs[user][other[1]] * sim
simSums += sim
if simSums == 0:
predict = 4.0
else:
predict = totals / simSums
return predict
def getRecommendedItems(self, user):
prefsOnUser = self.loadBaseFileOnUser()
#itemMatch=tool.loadPickle('/ItemSimOnSet1_n40_typeAdded.pkl')
userRatings = prefsOnUser[user]
scores = {}
totalSim = {}
# Loop over items rated by this user
for (item, rating) in userRatings.items():
# Loop over items similar to this one
for (similarity, item2) in self.itemMatch[item]:
if similarity <= 0: continue
# Ignore if this user has already rated this item
if item2 in userRatings: continue
# Weighted sum of rating times similarity
scores.setdefault(item2, 0)
scores[item2] += similarity * rating
# Sum of all the similarities
totalSim.setdefault(item2, 0)
totalSim[item2] += similarity
# Divide each total score by total weighting to get an average
rankings = [(round(score / totalSim[item], 7), item) for item, score in scores.items()]
# Return the rankings from highest to lowest
rankings.sort()
rankings.reverse()
return rankings
class UserBasedRecommender(Recommender):
def __init__(self, outputFile, similarityMeasure):
Recommender.__init__(self, outputFile, similarityMeasure=similarity.sim_cosine_improved,
pathStr=os.getcwd() + '//data-v/', trainingSet='training_set.txt',
predictingSet='predict.txt')
self.userMatch = None
def calculateSimilarUsers(self, n, resultFile):
result = {}
c = 0
for i in self.prefs.keys():
result.setdefault(i, [])
for user in self.prefs:
c += 1
if c % 5 == 0:
print
"%d / %d" % (c, len(self.prefs))
scores = self.topMatches(self.prefs, user, similarityMeasure=self.similarityMeasure, n=n)
result[user] = scores
#~ print result[user]
tool.dumpPickle(result, resultFile)
def loadUserMatch(self, userFile):
self.userMatch = tool.loadPickle(userFile)
def predictRating(self, user, movie):
totals = 0.0
simSums = 0.0
sim = 0.0
predict = 0
userList = self.userMatch[user]
for other in userList:
if other[1] == user:
continue
sim = other[0]
if sim <= 0:
continue
if movie not in self.prefs[user] or self.prefs[user][movie] == 0:
if movie in self.prefs[other[1]]:
totals += self.prefs[other[1]][movie] * sim
simSums += sim
if simSums == 0:
predict = 4.0
else:
predict = totals / simSums
return predict
#~ def predictRating(self, user, movie):
#~ totals=0.0
#~ simSums=0.0
#~ sim=0.0
#~ predict=0
#~ matchlist=self.topMatches(self.prefs, user, similarityMeasure=similarity.sim_pearson_improved,n=80)
#~ for other in matchlist:
#~ if other[1]==user:
#~ continue
#~ sim=other[0]
#~ if sim<=0:
#~ continue
#~ if movie not in self.prefs[user] or self.prefs[user][movie]==0:
#~ if movie in self.prefs[other[1]]:
#~ totals+=self.prefs[other[1]][movie]*sim
#~ simSums+=sim
#~ print "simSums",simSums
#~ print "totals",totals
#~ if simSums==0:
#~ predict=4.0
#~ else:
#~ predict=totals/simSums
#~ print predict
#~ return predict
def getRecommendedItems(self, user):
prefs = self.loadTrainingSet()
totals = {}
simSums = {}
sim = 0.0
for other in self.topMatches(prefs, user, similarityMeasure=similarity.sim_cosine, n=90):
#don't compare me to myself
if other[1] == user: continue
sim = other[0]
#ignore scores of zero or lower
if sim <= 0: continue
for item in prefs[other[1]]:
#only score movies I haven't seen yet
if item not in prefs[user] or prefs[user][item] == 0:
#similarity*score
totals.setdefault(item, 0)
totals[item] += prefs[other[1]][item] * sim
#sum of similarities
simSums.setdefault(item, 0)
simSums[item] += sim
#create the normalized list
rankings = [(total / simSums[item], item) for item, total in totals.items()]
#return the sorted list
rankings.sort()
rankings.reverse()
return rankings
|
clasnake/recommender
|
recommender.py
|
Python
|
mit
| 9,170
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import functools
import sys
from . import utils
from .helper import jinja2_env
__author__ = 'banxi'
_generators_map = {}
def _make_gkey(target, platform, lang):
return "%s:%s:%s" % (target, platform, lang)
def as_generator(target, platform="ios", lang="swift"):
def decorator(cls):
gkey = _make_gkey(target=target, platform=platform, lang=lang)
cls.target = target
cls.platform = platform
cls.lang = lang
_generators_map[gkey] = cls
return cls
return decorator
as_ios_swift_generator = functools.partial(as_generator, platform="ios", lang="swift")
as_android_kotlin_generator = functools.partial(as_generator, platform="android", lang="kotlin")
def find_generator(target, platform="ios", lang="swift"):
gkey = _make_gkey(target=target, platform=platform, lang=lang)
return _generators_map.get(gkey)
def generate_v2(target, **options):
from . import models
platform = options.get('platform', 'ios')
lang = options.get('lang', 'swift')
lines = utils.readlines_from_stdin()
comments = ["//"+line for line in lines]
model_class = find_generator(target=target, platform=platform, lang=lang)
if not model_class:
return "No generator for %s %s %s" % (target, platform, lang)
try:
model,fields = model_class.parse_source(lines)
template_path = model.template_path()
template_context = model.template_context()
template_context['comments'] = comments
template = jinja2_env.get_template(template_path)
text = template.render(**template_context)
return text.encode('utf-8')
except Exception as e:
import traceback
return traceback.format_exc()
generate = generate_v2
generate_kotlin = functools.partial(generate_v2, platform='android', lang='kotlin')
def json_to_fields(auto_remove_comments = False):
from . import converters
try:
lines = utils.readlines_from_stdin()
comments = [str("\n/// ") + line.encode('utf-8') for line in lines]
if auto_remove_comments:
final_lines = json_remove_comment(lines)
else:
final_lines = lines
text = '\n'.join(final_lines)
fields = converters.convert_text_to_field_list(text)
output = ';'.join([str(f) for f in fields])
sys.stdout.writelines(comments)
sys.stdout.write("\n"+output)
except Exception as e:
import traceback
sys.stdout.write(traceback.format_exc())
def json_remove_comment(lines):
# remove '//' comment in lines
final_lines = []
for line in lines:
if not line:
continue
comment_index = line.rfind('//')
if comment_index == 0:
continue # comment line
if comment_index == -1:
comment_index = line.rfind('#')
if comment_index == 0:
continue # comment line
if comment_index > 0:
new_line = line[:comment_index]
final_lines.append(new_line)
else:
final_lines.append(line)
return final_lines
def json_escape_quote(lines = None):
should_return = lines is not None
lines = lines or utils.readlines_from_stdin()
in_lines = json_remove_comment(lines)
final_lines = []
for line in in_lines:
if not line:continue
new_line = line.replace('"','\\"')
final_lines.append(new_line)
if should_return:
return final_lines
else:
ouput_lines =([line.decode('utf-8') for line in final_lines])
ouput_lines.insert(0, '"')
ouput_lines.append('"')
sys.stdout.writelines(ouput_lines)
|
banxi1988/iOSCodeGenerator
|
ios_code_generator/generators.py
|
Python
|
mit
| 3,737
|
# Doesn't work yet
from time import sleep
import os
import PIL
import scipy.misc
import math
import chi
import tensortools as tt
from tensortools import Function
import numpy as np
import gym
import tensorflow as tf
from tensorflow.contrib import layers
from chi.rl.util import pp, to_json, show_all_variables
import argparse
def conv_out_size_same(size, stride):
return int(math.ceil(float(size) / float(stride)))
class DCGAN:
"""
An implementation of
Unsupervised Representation Learning with Deep Convolutional Generative Adversarial Networks
https://arxiv.org/abs/1511.06434, adapted from https://github.com/carpedm20/DCGAN-tensorflow
"""
def __init__(self, generator:tt.Model, discriminator:tt.Model, sampler:tt.Model, input_height, input_width,
output_height=64, output_width=64,
batch_size=64, y_dim=None,
z_dim=100, gfc_dim=1024, dfc_dim=1024,
c_dim=3, dataset_name='default', input_fname_pattern='*.jpg',
checkpoint_dir=None, sample_dir=None):
self.batch_size = batch_size
self.output_height = output_height
self.output_width = output_width
self.y_dim = y_dim
self.z_dim = z_dim
self.gf_dim = gf_dim
self.df_dim = df_dim
self.gfc_dim = gfc_dim
self.dfc_dim = dfc_dim
self.discriminator = discriminator
self.generator = generator
self.sampler = sampler
# batch normalization
self.d_bn1 = batch_norm(name='d_bn1')
self.d_bn2 = batch_norm(name='d_bn2')
if not self.y_dim:
self.d_bn3 = batch_norm(name='d_bn3')
self.g_bn0 = batch_norm(name='g_bn0')
self.g_bn1 = batch_norm(name='g_bn1')
self.g_bn2 = batch_norm(name='g_bn2')
if not self.y_dim:
self.g_bn3 = batch_norm(name='g_bn3')
self.dataset_name = dataset_name
self.input_fname_pattern = input_fname_pattern
self.checkpoint_dir = checkpoint_dir
if self.dataset_name == 'mnist':
self.data_X, self.data_y = self.load_mnist()
self.c_dim = self.data_X[0].shape[-1]
else:
self.data = glob(os.path.join("./data", self.dataset_name, self.input_fname_pattern))
imreadImg = imread(self.data[0]);
if len(imreadImg.shape) >= 3: # grayscale image?
self.c_dim = imread(self.data[0]).shape[-1]
else:
self.c_dim = 1
self.grayscale = (self.c_dim == 1)
self.build_model()
def build_model(self):
if self.y_dim:
self.y = tf.placeholder(tf.float32, [self.batch_size, self.y_dim], name='y')
else:
self.y = None
if self.crop:
image_dims = [self.output_height, self.output_width, self.c_dim]
else:
image_dims = [self.input_height, self.input_width, self.c_dim]
self.inputs = tf.placeholder(
tf.float32, [self.batch_size] + image_dims, name='real_images')
inputs = self.inputs
self.z = tf.placeholder(
tf.float32, [None, self.z_dim], name='z')
self.z_sum = histogram_summary("z", self.z)
self.G = self.generator(self.z, self.y)
self.D, self.D_logits = self.discriminator(inputs, self.y, reuse=False)
self.sampler = self.sampler(self.z, self.y)
self.D_, self.D_logits_ = self.discriminator(self.G, self.y, reuse=True)
self.d_sum = histogram_summary("d", self.D)
self.d__sum = histogram_summary("d_", self.D_)
self.G_sum = image_summary("G", self.G)
def sigmoid_cross_entropy_with_logits(x, y):
try:
return tf.nn.sigmoid_cross_entropy_with_logits(logits=x, labels=y)
except:
return tf.nn.sigmoid_cross_entropy_with_logits(logits=x, targets=y)
self.d_loss_real = tf.reduce_mean(
sigmoid_cross_entropy_with_logits(self.D_logits, tf.ones_like(self.D)))
self.d_loss_fake = tf.reduce_mean(
sigmoid_cross_entropy_with_logits(self.D_logits_, tf.zeros_like(self.D_)))
self.g_loss = tf.reduce_mean(
sigmoid_cross_entropy_with_logits(self.D_logits_, tf.ones_like(self.D_)))
self.d_loss_real_sum = scalar_summary("d_loss_real", self.d_loss_real)
self.d_loss_fake_sum = scalar_summary("d_loss_fake", self.d_loss_fake)
self.d_loss = self.d_loss_real + self.d_loss_fake
self.g_loss_sum = scalar_summary("g_loss", self.g_loss)
self.d_loss_sum = scalar_summary("d_loss", self.d_loss)
t_vars = tf.trainable_variables()
self.d_vars = [var for var in t_vars if 'd_' in var.name]
self.g_vars = [var for var in t_vars if 'g_' in var.name]
self.saver = tf.train.Saver()
def train(self, config):
d_optim = tf.train.AdamOptimizer(config.learning_rate, beta1=config.beta1) \
.minimize(self.d_loss, var_list=self.d_vars)
g_optim = tf.train.AdamOptimizer(config.learning_rate, beta1=config.beta1) \
.minimize(self.g_loss, var_list=self.g_vars)
try:
tf.global_variables_initializer().run()
except:
tf.initialize_all_variables().run()
self.g_sum = merge_summary([self.z_sum, self.d__sum,
self.G_sum, self.d_loss_fake_sum, self.g_loss_sum])
self.d_sum = merge_summary(
[self.z_sum, self.d_sum, self.d_loss_real_sum, self.d_loss_sum])
self.writer = SummaryWriter("./logs", self.sess.graph)
sample_z = np.random.uniform(-1, 1, size=(self.sample_num , self.z_dim))
if config.dataset == 'mnist':
sample_inputs = self.data_X[0:self.sample_num]
sample_labels = self.data_y[0:self.sample_num]
else:
sample_files = self.data[0:self.sample_num]
sample = [
get_image(sample_file,
input_height=self.input_height,
input_width=self.input_width,
resize_height=self.output_height,
resize_width=self.output_width,
crop=self.crop,
grayscale=self.grayscale) for sample_file in sample_files]
if (self.grayscale):
sample_inputs = np.array(sample).astype(np.float32)[:, :, :, None]
else:
sample_inputs = np.array(sample).astype(np.float32)
counter = 1
start_time = time.time()
could_load, checkpoint_counter = self.load(self.checkpoint_dir)
if could_load:
counter = checkpoint_counter
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
for epoch in xrange(config.epoch):
if config.dataset == 'mnist':
batch_idxs = min(len(self.data_X), config.train_size) // config.batch_size
else:
self.data = glob(os.path.join(
"./data", config.dataset, self.input_fname_pattern))
batch_idxs = min(len(self.data), config.train_size) // config.batch_size
for idx in xrange(0, batch_idxs):
if config.dataset == 'mnist':
batch_images = self.data_X[idx*config.batch_size:(idx+1)*config.batch_size]
batch_labels = self.data_y[idx*config.batch_size:(idx+1)*config.batch_size]
else:
batch_files = self.data[idx*config.batch_size:(idx+1)*config.batch_size]
batch = [
get_image(batch_file,
input_height=self.input_height,
input_width=self.input_width,
resize_height=self.output_height,
resize_width=self.output_width,
crop=self.crop,
grayscale=self.grayscale) for batch_file in batch_files]
if self.grayscale:
batch_images = np.array(batch).astype(np.float32)[:, :, :, None]
else:
batch_images = np.array(batch).astype(np.float32)
batch_z = np.random.uniform(-1, 1, [config.batch_size, self.z_dim]) \
.astype(np.float32)
if config.dataset == 'mnist':
# Update D network
_, summary_str = self.sess.run([d_optim, self.d_sum],
feed_dict={
self.inputs: batch_images,
self.z: batch_z,
self.y:batch_labels,
})
self.writer.add_summary(summary_str, counter)
# Update G network
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={
self.z: batch_z,
self.y:batch_labels,
})
self.writer.add_summary(summary_str, counter)
# Run g_optim twice to make sure that d_loss does not go to zero (different from paper)
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.z: batch_z, self.y:batch_labels })
self.writer.add_summary(summary_str, counter)
errD_fake = self.d_loss_fake.eval({
self.z: batch_z,
self.y:batch_labels
})
errD_real = self.d_loss_real.eval({
self.inputs: batch_images,
self.y:batch_labels
})
errG = self.g_loss.eval({
self.z: batch_z,
self.y: batch_labels
})
else:
# Update D network
_, summary_str = self.sess.run([d_optim, self.d_sum],
feed_dict={ self.inputs: batch_images, self.z: batch_z })
self.writer.add_summary(summary_str, counter)
# Update G network
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.z: batch_z })
self.writer.add_summary(summary_str, counter)
# Run g_optim twice to make sure that d_loss does not go to zero (different from paper)
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.z: batch_z })
self.writer.add_summary(summary_str, counter)
errD_fake = self.d_loss_fake.eval({ self.z: batch_z })
errD_real = self.d_loss_real.eval({ self.inputs: batch_images })
errG = self.g_loss.eval({self.z: batch_z})
counter += 1
print("Epoch: [%2d] [%4d/%4d] time: %4.4f, d_loss: %.8f, g_loss: %.8f" \
% (epoch, idx, batch_idxs,
time.time() - start_time, errD_fake+errD_real, errG))
if np.mod(counter, 100) == 1:
if config.dataset == 'mnist':
samples, d_loss, g_loss = self.sess.run(
[self.sampler, self.d_loss, self.g_loss],
feed_dict={
self.z: sample_z,
self.inputs: sample_inputs,
self.y:sample_labels,
}
)
save_images(samples, image_manifold_size(samples.shape[0]),
'./{}/train_{:02d}_{:04d}.png'.format(config.sample_dir, epoch, idx))
print("[Sample] d_loss: %.8f, g_loss: %.8f" % (d_loss, g_loss))
else:
try:
samples, d_loss, g_loss = self.sess.run(
[self.sampler, self.d_loss, self.g_loss],
feed_dict={
self.z: sample_z,
self.inputs: sample_inputs,
},
)
save_images(samples, image_manifold_size(samples.shape[0]),
'./{}/train_{:02d}_{:04d}.png'.format(config.sample_dir, epoch, idx))
print("[Sample] d_loss: %.8f, g_loss: %.8f" % (d_loss, g_loss))
except:
print("one pic error!...")
if np.mod(counter, 500) == 2:
self.save(config.checkpoint_dir, counter)
pass
def load_mnist(self, config):
data_dir = os.path.join("./data", self.dataset_name)
fd = open(os.path.join(data_dir,'train-images-idx3-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
trX = loaded[16:].reshape((60000,28,28,1)).astype(np.float)
fd = open(os.path.join(data_dir,'train-labels-idx1-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
trY = loaded[8:].reshape((60000)).astype(np.float)
fd = open(os.path.join(data_dir,'t10k-images-idx3-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
teX = loaded[16:].reshape((10000,28,28,1)).astype(np.float)
fd = open(os.path.join(data_dir,'t10k-labels-idx1-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
teY = loaded[8:].reshape((10000)).astype(np.float)
trY = np.asarray(trY)
teY = np.asarray(teY)
X = np.concatenate((trX, teX), axis=0)
y = np.concatenate((trY, teY), axis=0).astype(np.int)
seed = 547
np.random.seed(seed)
np.random.shuffle(X)
np.random.seed(seed)
np.random.shuffle(y)
y_vec = np.zeros((len(y), self.y_dim), dtype=np.float)
for i, label in enumerate(y):
y_vec[i,y[i]] = 1.0
return X/255.,y_vec
@property
def model_dir(self):
return "{}_{}_{}_{}".format(
self.dataset_name, self.batch_size,
self.output_height, self.output_width)
def save(self, checkpoint_dir, step):
model_name = "DCGAN.model"
checkpoint_dir = os.path.join(checkpoint_dir, self.model_dir)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def load(self, checkpoint_dir):
import re
print(" [*] Reading checkpoints...")
checkpoint_dir = os.path.join(checkpoint_dir, self.model_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name))
counter = int(next(re.finditer("(\d+)(?!.*\d)",ckpt_name)).group(0))
print(" [*] Success to read {}".format(ckpt_name))
return True, counter
else:
print(" [*] Failed to find a checkpoint")
return False, 0
def test_dcgan():
parser = argparse.ArgumentParser()
parser.add_argument("--epoch", type=int, default=25,
help="Epoch to train [25]")
parser.add_argument("--learning_rate", type=float, default=0.0002,
help="Learning rate for adam [0.0002]")
parser.add_argument("--beta1", type=float, default=0.5,
help="Momentum term of adam [0.5]")
parser.add_argument("--train_size", default=np.inf,
help="The size of train images [np.inf]")
parser.add_argument("--batch_size", default=64, type=int,
help="The size of batch images [64]")
parser.add_argument("--input_height", default=108, type=int,
help="The size of image to use (will be center cropped) [108]")
parser.add_argument("--input_width", type=int, default=None,
help="The size of image to use (will be center cropped).
If None, same value as input_height [None]")
parser.add_argument("--output_height", type=int, default=64,
help="The size of the output images to produce [64]")
parser.add_argument("--output_width", type=int, default=None,
help="The size of the output images to produce.
If None, same value as output_height [None]")
parser.add_argument("--dataset", type=str, default="celebA",
help="The name of the dataset [celebA]")
parser.add_argument("--input_fname_pattern", type=str, default="*.jpg",
help="Glob pattern of filename of input images [*.jpg]")
parser.add_argument("--checkpoint_dir", type=str, default="checkpoints",
help="Name of the directory in which to save the checkpoints [checkpoints]")
parser.add_argument("--sample_dir", type=str, default="samples",
help="Name of the directory in which to save the images samples [samples]")
parser.add_argument("--train", type=bool, default=False,
help="True for training, False for testing [False]")
parser.add_argument("--crop", type=bool, default=False,
help="True for cropping [False]")
parser.add_argument("--visualize", type=bool, default=False,
help="True for visualizing [False]")
args = parser.parse_args()
input_height = args.input_height
input_width = args.input_width if args.input_width is not None else input_height
output_height = args.output_height
output_width = args.output_width if args.output_width is not None else output_height
batch_size = args.batch_size
sample_num = args.batch_size
if args.dataset == 'mnist':
y_dim = 10
else:
y_dim = None
dataset_name = args.dataset
input_fname_pattern = args.input_fname_pattern
crop = args.crop
checkpoint_dir = args.checkpoint_dir
sample_dir = args.sample_dir
gf_dim = 64
df_dim = 64
gfc_dim = 1024
dfc_dim = 1024
c_dim = 3
z_dim = 100
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
if not os.path.exists(sample_dir):
os.makedirs(sample_dir)
show_all_variables()
@tt.model(optimizer=tf.train.AdamOptimizer(0.0002, beta1=0.5))
def generator(z, y=None):
with tf.variable_scope("generator") as scope:
if not y_dim:
s_h, s_w = output_height, output_width
s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2)
s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2)
s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2)
s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2)
# project `z` and reshape
z_, h0_w, h0_b = linear(
z, gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True)
h0 = tf.reshape(
z_, [-1, s_h16, s_w16, gf_dim * 8])
h0 = tf.nn.relu(g_bn0(h0))
h1, h1_w, h1_b = deconv2d(
h0, [batch_size, s_h8, s_w8, gf_dim*4], name='g_h1', with_w=True)
h1 = tf.nn.relu(g_bn1(h1))
h2, h2_w, h2_b = deconv2d(
h1, [batch_size, s_h4, s_w4, gf_dim*2], name='g_h2', with_w=True)
h2 = tf.nn.relu(g_bn2(h2))
h3, h3_w, h3_b = deconv2d(
h2, [batch_size, s_h2, s_w2, gf_dim*1], name='g_h3', with_w=True)
h3 = tf.nn.relu(g_bn3(h3))
h4, h4_w, h4_b = deconv2d(
h3, [batch_size, s_h, s_w, c_dim], name='g_h4', with_w=True)
return tf.nn.tanh(h4)
else:
s_h, s_w = output_height, output_width
s_h2, s_h4 = int(s_h/2), int(s_h/4)
s_w2, s_w4 = int(s_w/2), int(s_w/4)
yb = tf.reshape(y, [batch_size, 1, 1, y_dim])
z = concat([z, y], 1)
h0 = tf.nn.relu(
g_bn0(linear(z, gfc_dim, 'g_h0_lin')))
h0 = concat([h0, y], 1)
h1 = tf.nn.relu(g_bn1(
linear(h0, gf_dim*2*s_h4*s_w4, 'g_h1_lin')))
h1 = tf.reshape(h1, [batch_size, s_h4, s_w4, gf_dim * 2])
h1 = conv_cond_concat(h1, yb)
h2 = tf.nn.relu(g_bn2(deconv2d(h1,
[batch_size, s_h2, s_w2, gf_dim * 2], name='g_h2')))
h2 = conv_cond_concat(h2, yb)
return tf.nn.sigmoid(
deconv2d(h2, [batch_size, s_h, s_w, c_dim], name='g_h3'))
@tt.model(optimizer=tf.train.AdamOptimizer(0.0002, beta1=0.5))
def discriminator(image, y=None, reuse=False):
with tf.variable_scope("discriminator") as scope:
if reuse:
scope.reuse_variables()
if not y_dim:
h0 = lrelu(conv2d(image, df_dim, name='d_h0_conv'))
h1 = lrelu(d_bn1(conv2d(h0, df_dim*2, name='d_h1_conv')))
h2 = lrelu(d_bn2(conv2d(h1, df_dim*4, name='d_h2_conv')))
h3 = lrelu(d_bn3(conv2d(h2, df_dim*8, name='d_h3_conv')))
h4 = linear(tf.reshape(h3, [batch_size, -1]), 1, 'd_h4_lin')
return tf.nn.sigmoid(h4), h4
else:
yb = tf.reshape(y, [batch_size, 1, 1, y_dim])
x = conv_cond_concat(image, yb)
h0 = lrelu(conv2d(x, c_dim + y_dim, name='d_h0_conv'))
h0 = conv_cond_concat(h0, yb)
h1 = lrelu(d_bn1(conv2d(h0, df_dim + y_dim, name='d_h1_conv')))
h1 = tf.reshape(h1, [batch_size, -1])
h1 = concat([h1, y], 1)
h2 = lrelu(d_bn2(linear(h1, dfc_dim, 'd_h2_lin')))
h2 = concat([h2, y], 1)
h3 = linear(h2, 1, 'd_h3_lin')
return tf.nn.sigmoid(h3), h3
@tt.model
def sampler(z, y=None):
with tf.variable_scope("generator") as scope:
scope.reuse_variables()
if not y_dim:
s_h, s_w = output_height, output_width
s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2)
s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2)
s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2)
s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2)
# project `z` and reshape
h0 = tf.reshape(
linear(z, gf_dim*8*s_h16*s_w16, 'g_h0_lin'),
[-1, s_h16, s_w16, gf_dim * 8])
h0 = tf.nn.relu(g_bn0(h0, train=False))
h1 = deconv2d(h0, [batch_size, s_h8, s_w8, gf_dim*4], name='g_h1')
h1 = tf.nn.relu(g_bn1(h1, train=False))
h2 = deconv2d(h1, [batch_size, s_h4, s_w4, gf_dim*2], name='g_h2')
h2 = tf.nn.relu(g_bn2(h2, train=False))
h3 = deconv2d(h2, [batch_size, s_h2, s_w2, gf_dim*1], name='g_h3')
h3 = tf.nn.relu(g_bn3(h3, train=False))
h4 = deconv2d(h3, [batch_size, s_h, s_w, c_dim], name='g_h4')
return tf.nn.tanh(h4)
else:
s_h, s_w = output_height, output_width
s_h2, s_h4 = int(s_h/2), int(s_h/4)
s_w2, s_w4 = int(s_w/2), int(s_w/4)
# yb = tf.reshape(y, [-1, 1, 1, self.y_dim])
yb = tf.reshape(y, [batch_size, 1, 1, y_dim])
z = concat([z, y], 1)
h0 = tf.nn.relu(g_bn0(linear(z, gfc_dim, 'g_h0_lin'), train=False))
h0 = concat([h0, y], 1)
h1 = tf.nn.relu(g_bn1(
linear(h0, gf_dim*2*s_h4*s_w4, 'g_h1_lin'), train=False))
h1 = tf.reshape(h1, [batch_size, s_h4, s_w4, gf_dim * 2])
h1 = conv_cond_concat(h1, yb)
h2 = tf.nn.relu(g_bn2(
deconv2d(h1, [batch_size, s_h2, s_w2, gf_dim * 2], name='g_h2'), train=False))
h2 = conv_cond_concat(h2, yb)
return tf.nn.sigmoid(deconv2d(h2, [batch_size, s_h, s_w, c_dim], name='g_h3'))
agent = DCGAN(generator, discriminator, sampler, input_height, input_width, output_height, output_width, batch_size, y_dim, z_dim, gfc_dim, dfc_dim, c_dim, dataset_name, input_fname_pattern, checkpoint_dir, sample_dir)
if __name__ == "__main__":
test_dcgan()
|
rmst/chi
|
chi/rl/dcgan.py
|
Python
|
mit
| 25,425
|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Cells(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "table"
_path_str = "table.cells"
_valid_props = {
"align",
"alignsrc",
"fill",
"font",
"format",
"formatsrc",
"height",
"line",
"prefix",
"prefixsrc",
"suffix",
"suffixsrc",
"values",
"valuessrc",
}
# align
# -----
@property
def align(self):
"""
Sets the horizontal alignment of the `text` within the box. Has
an effect only if `text` spans two or more lines (i.e. `text`
contains one or more <br> HTML tags) or if an explicit width is
set to override the text width.
The 'align' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'center', 'right']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["align"]
@align.setter
def align(self, val):
self["align"] = val
# alignsrc
# --------
@property
def alignsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `align`.
The 'alignsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["alignsrc"]
@alignsrc.setter
def alignsrc(self, val):
self["alignsrc"] = val
# fill
# ----
@property
def fill(self):
"""
The 'fill' property is an instance of Fill
that may be specified as:
- An instance of :class:`plotly.graph_objs.table.cells.Fill`
- A dict of string/value properties that will be passed
to the Fill constructor
Supported dict properties:
color
Sets the cell fill color. It accepts either a
specific color or an array of colors or a 2D
array of colors.
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
Returns
-------
plotly.graph_objs.table.cells.Fill
"""
return self["fill"]
@fill.setter
def fill(self, val):
self["fill"] = val
# font
# ----
@property
def font(self):
"""
The 'font' property is an instance of Font
that may be specified as:
- An instance of :class:`plotly.graph_objs.table.cells.Font`
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for `family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
Returns
-------
plotly.graph_objs.table.cells.Font
"""
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# format
# ------
@property
def format(self):
"""
Sets the cell value formatting rule using d3 formatting mini-
languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
The 'format' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["format"]
@format.setter
def format(self, val):
self["format"] = val
# formatsrc
# ---------
@property
def formatsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `format`.
The 'formatsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["formatsrc"]
@formatsrc.setter
def formatsrc(self, val):
self["formatsrc"] = val
# height
# ------
@property
def height(self):
"""
The height of cells.
The 'height' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["height"]
@height.setter
def height(self, val):
self["height"] = val
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of :class:`plotly.graph_objs.table.cells.Line`
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
width
widthsrc
Sets the source reference on Chart Studio Cloud
for `width`.
Returns
-------
plotly.graph_objs.table.cells.Line
"""
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
# prefix
# ------
@property
def prefix(self):
"""
Prefix for cell values.
The 'prefix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["prefix"]
@prefix.setter
def prefix(self, val):
self["prefix"] = val
# prefixsrc
# ---------
@property
def prefixsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `prefix`.
The 'prefixsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["prefixsrc"]
@prefixsrc.setter
def prefixsrc(self, val):
self["prefixsrc"] = val
# suffix
# ------
@property
def suffix(self):
"""
Suffix for cell values.
The 'suffix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["suffix"]
@suffix.setter
def suffix(self, val):
self["suffix"] = val
# suffixsrc
# ---------
@property
def suffixsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `suffix`.
The 'suffixsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["suffixsrc"]
@suffixsrc.setter
def suffixsrc(self, val):
self["suffixsrc"] = val
# values
# ------
@property
def values(self):
"""
Cell values. `values[m][n]` represents the value of the `n`th
point in column `m`, therefore the `values[m]` vector length
for all columns must be the same (longer vectors will be
truncated). Each value must be a finite number or a string.
The 'values' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["values"]
@values.setter
def values(self, val):
self["values"] = val
# valuessrc
# ---------
@property
def valuessrc(self):
"""
Sets the source reference on Chart Studio Cloud for `values`.
The 'valuessrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["valuessrc"]
@valuessrc.setter
def valuessrc(self, val):
self["valuessrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
align
Sets the horizontal alignment of the `text` within the
box. Has an effect only if `text` spans two or more
lines (i.e. `text` contains one or more <br> HTML tags)
or if an explicit width is set to override the text
width.
alignsrc
Sets the source reference on Chart Studio Cloud for
`align`.
fill
:class:`plotly.graph_objects.table.cells.Fill` instance
or dict with compatible properties
font
:class:`plotly.graph_objects.table.cells.Font` instance
or dict with compatible properties
format
Sets the cell value formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
formatsrc
Sets the source reference on Chart Studio Cloud for
`format`.
height
The height of cells.
line
:class:`plotly.graph_objects.table.cells.Line` instance
or dict with compatible properties
prefix
Prefix for cell values.
prefixsrc
Sets the source reference on Chart Studio Cloud for
`prefix`.
suffix
Suffix for cell values.
suffixsrc
Sets the source reference on Chart Studio Cloud for
`suffix`.
values
Cell values. `values[m][n]` represents the value of the
`n`th point in column `m`, therefore the `values[m]`
vector length for all columns must be the same (longer
vectors will be truncated). Each value must be a finite
number or a string.
valuessrc
Sets the source reference on Chart Studio Cloud for
`values`.
"""
def __init__(
self,
arg=None,
align=None,
alignsrc=None,
fill=None,
font=None,
format=None,
formatsrc=None,
height=None,
line=None,
prefix=None,
prefixsrc=None,
suffix=None,
suffixsrc=None,
values=None,
valuessrc=None,
**kwargs
):
"""
Construct a new Cells object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.table.Cells`
align
Sets the horizontal alignment of the `text` within the
box. Has an effect only if `text` spans two or more
lines (i.e. `text` contains one or more <br> HTML tags)
or if an explicit width is set to override the text
width.
alignsrc
Sets the source reference on Chart Studio Cloud for
`align`.
fill
:class:`plotly.graph_objects.table.cells.Fill` instance
or dict with compatible properties
font
:class:`plotly.graph_objects.table.cells.Font` instance
or dict with compatible properties
format
Sets the cell value formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
formatsrc
Sets the source reference on Chart Studio Cloud for
`format`.
height
The height of cells.
line
:class:`plotly.graph_objects.table.cells.Line` instance
or dict with compatible properties
prefix
Prefix for cell values.
prefixsrc
Sets the source reference on Chart Studio Cloud for
`prefix`.
suffix
Suffix for cell values.
suffixsrc
Sets the source reference on Chart Studio Cloud for
`suffix`.
values
Cell values. `values[m][n]` represents the value of the
`n`th point in column `m`, therefore the `values[m]`
vector length for all columns must be the same (longer
vectors will be truncated). Each value must be a finite
number or a string.
valuessrc
Sets the source reference on Chart Studio Cloud for
`values`.
Returns
-------
Cells
"""
super(Cells, self).__init__("cells")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.table.Cells
constructor must be a dict or
an instance of :class:`plotly.graph_objs.table.Cells`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("align", None)
_v = align if align is not None else _v
if _v is not None:
self["align"] = _v
_v = arg.pop("alignsrc", None)
_v = alignsrc if alignsrc is not None else _v
if _v is not None:
self["alignsrc"] = _v
_v = arg.pop("fill", None)
_v = fill if fill is not None else _v
if _v is not None:
self["fill"] = _v
_v = arg.pop("font", None)
_v = font if font is not None else _v
if _v is not None:
self["font"] = _v
_v = arg.pop("format", None)
_v = format if format is not None else _v
if _v is not None:
self["format"] = _v
_v = arg.pop("formatsrc", None)
_v = formatsrc if formatsrc is not None else _v
if _v is not None:
self["formatsrc"] = _v
_v = arg.pop("height", None)
_v = height if height is not None else _v
if _v is not None:
self["height"] = _v
_v = arg.pop("line", None)
_v = line if line is not None else _v
if _v is not None:
self["line"] = _v
_v = arg.pop("prefix", None)
_v = prefix if prefix is not None else _v
if _v is not None:
self["prefix"] = _v
_v = arg.pop("prefixsrc", None)
_v = prefixsrc if prefixsrc is not None else _v
if _v is not None:
self["prefixsrc"] = _v
_v = arg.pop("suffix", None)
_v = suffix if suffix is not None else _v
if _v is not None:
self["suffix"] = _v
_v = arg.pop("suffixsrc", None)
_v = suffixsrc if suffixsrc is not None else _v
if _v is not None:
self["suffixsrc"] = _v
_v = arg.pop("values", None)
_v = values if values is not None else _v
if _v is not None:
self["values"] = _v
_v = arg.pop("valuessrc", None)
_v = valuessrc if valuessrc is not None else _v
if _v is not None:
self["valuessrc"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
plotly/plotly.py
|
packages/python/plotly/plotly/graph_objs/table/_cells.py
|
Python
|
mit
| 17,748
|
# Configurables
ADMIN_ID = 'rsk8'
DEBUG = True
# URIS
MAIN_URI = '/main'
DASHBOARD_URI = '/dashboard'
DASHBOARD_ADMIN_URI = '/dashboard/admins'
DASHBOARD_ELECTIONS_URI = '/dashboard/elections'
ERROR_URI = '/error'
ORGANIZATION_URI = '/organizations'
PETITIONS_URI = '/petitions'
PETITIONS_SIGN_URI = '/petitions/sign'
PETITIONS_UNSIGN_URI = '/petitions/unsign'
POSITIONS_POPULATE_URI = '/my/positions'
MY_URI = '/my'
|
rice-apps/petition-app
|
config.py
|
Python
|
mit
| 419
|
# -*- coding: utf-8 -*-
import riprova
# Custom error object
class MyCustomError(Exception):
pass
# Whitelist of errors that should not be retried
whitelist = riprova.ErrorWhitelist([
ReferenceError,
ImportError,
IOError,
SyntaxError,
IndexError
])
def error_evaluator(error):
"""
Used to determine if an error is legit and therefore
should be retried or not.
"""
return whitelist.isretry(error)
# In order to define a global whitelist policy that would be used
# across all retry instances, overwrite the whitelist attribute in Retrier:
riprova.Retrier.whitelist = whitelist
# Store number of function calls for error simulation
calls = 0
# Register retriable operation with a custom error evaluator
# You should pass the evaluator per retry instance.
@riprova.retry(error_evaluator=error_evaluator)
def mul2(x):
global calls
if calls < 3:
calls += 1
raise RuntimeError('simulated call error')
if calls == 3:
calls += 1
raise ReferenceError('legit error')
return x * 2
# Run task
try:
mul2(2)
except ReferenceError as err:
print('Whitelisted error: {}'.format(err))
print('Retry attempts: {}'.format(calls))
|
h2non/riprova
|
examples/whitelisting_errors.py
|
Python
|
mit
| 1,229
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: network_traversal_samples_async.py
DESCRIPTION:
These samples demonstrate creating a user, issuing a token, revoking a token and deleting a user.
USAGE:
python network_traversal_samples.py
Set the environment variables with your own values before running the sample:
1) COMMUNICATION_SAMPLES_CONNECTION_STRING - the connection string in your ACS resource
2) AZURE_CLIENT_ID - the client ID of your active directory application
3) AZURE_CLIENT_SECRET - the secret of your active directory application
4) AZURE_TENANT_ID - the tenant ID of your active directory application
"""
import os
import asyncio
from azure.communication.networktraversal._shared.utils import parse_connection_str
class CommunicationRelayClientSamples(object):
def __init__(self):
self.connection_string = os.getenv('COMMUNICATION_SAMPLES_CONNECTION_STRING')
self.client_id = os.getenv('AZURE_CLIENT_ID')
self.client_secret = os.getenv('AZURE_CLIENT_SECRET')
self.tenant_id = os.getenv('AZURE_TENANT_ID')
async def get_relay_config(self):
from azure.communication.networktraversal.aio import CommunicationRelayClient
from azure.communication.identity.aio import CommunicationIdentityClient
if self.client_id is not None and self.client_secret is not None and self.tenant_id is not None:
from azure.identity.aio import DefaultAzureCredential
endpoint, _ = parse_connection_str(self.connection_string)
identity_client = CommunicationIdentityClient(endpoint, DefaultAzureCredential())
relay_client = CommunicationRelayClient(endpoint, DefaultAzureCredential())
else:
identity_client = CommunicationIdentityClient.from_connection_string(self.connection_string)
relay_client = CommunicationRelayClient.from_connection_string(self.connection_string)
async with identity_client:
print("Creating new user")
user = await identity_client.create_user()
print("User created with id:" + user.properties.get('id'))
async with relay_client:
print("Getting relay configuration")
relay_configuration = await relay_client.get_relay_configuration(user=user)
for iceServer in relay_configuration.ice_servers:
print("Icer server:")
print(iceServer)
async def main():
sample = CommunicationRelayClientSamples()
await sample.get_relay_config()
if __name__ == '__main__':
asyncio.run(main())
|
Azure/azure-sdk-for-python
|
sdk/communication/azure-communication-networktraversal/samples/network_traversal_samples_async.py
|
Python
|
mit
| 2,887
|
# -*- coding: utf-8 -*-
#
# Bottle documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 18 18:09:50 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os, time
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
bottle_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),'../'))
sys.path.insert(0, bottle_dir)
import bottle
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Bottle'
copyright = unicode('2009-%s, %s' % (time.strftime('%Y'), bottle.__author__))
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
# The short X.Y version.
version = ".".join(bottle.__version__.split(".")[:2])
# The full version, including alpha/beta/rc tags.
release = bottle.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "_static/logo_nav.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "favicon.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_style="bottle.css"
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['sidebar-intro.html', 'sourcelink.html', 'donation.html', 'searchbox.html'],
'**': ['localtoc.html', 'relations.html', 'sourcelink.html', 'donation.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Bottledoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Bottle.tex', u'Bottle Documentation',
bottle.__author__, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = "_static/logo_nav.png"
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
autodoc_member_order = 'bysource'
locale_dirs = ['./locale']
|
adragomir/bottle
|
docs/conf.py
|
Python
|
mit
| 7,025
|
import csv
import os
DIR = 'MTA_Subway_turnstile'
def fix_turnstile_data(filenames):
"""
Filenames is a list of MTA Subway turnstile text files. A link to an example
MTA Subway turnstile text file can be seen at the URL below:
http://web.mta.info/developers/data/nyct/turnstile/turnstile_110507.txt
As you can see, there are numerous data points included in each row of the
a MTA Subway turnstile text file.
You want to write a function that will update each row in the text
file so there is only one entry per row. A few examples below:
A002,R051,02-00-00,05-28-11,00:00:00,REGULAR,003178521,001100739
A002,R051,02-00-00,05-28-11,04:00:00,REGULAR,003178541,001100746
A002,R051,02-00-00,05-28-11,08:00:00,REGULAR,003178559,001100775
Write the updates to a different text file in the format of "updated_" + filename.
For example:
1) if you read in a text file called "turnstile_110521.txt"
2) you should write the updated data to "updated_turnstile_110521.txt"
The order of the fields should be preserved. Remember to read through the
Instructor Notes below for more details on the task.
In addition, here is a CSV reader/writer introductory tutorial:
http://goo.gl/HBbvyy
You can see a sample of the turnstile text file that's passed into this function
and the the corresponding updated file by downloading these files from the resources:
Sample input file: turnstile_110528.txt
Sample updated file: solution_turnstile_110528.txt
"""
for name in filenames:
# your code here
f_in = open(DIR + '/' + name, 'r')
f_out = open(DIR + '/' + 'updated_' + name, 'w')
reader_in = csv.reader(f_in, delimiter=',')
writer_out = csv.writer(f_out, delimiter=',')
for line in reader_in:
fixed_fields = line[0:3]
for i in range(3, len(line), 5):
row = fixed_fields + line[i:i + 5]
writer_out.writerow(row)
f_in.close()
f_out.close()
print(name, ' fixed')
return
turnstile_filenames = [f for f in os.listdir(DIR) if f.startswith("turnstile")]
fix_turnstile_data(turnstile_filenames)
|
angelmtenor/IDSFC
|
L2_Data_Wrangling/P5_fixing_turnstyle_data.py
|
Python
|
mit
| 2,224
|
import glob
import os
from components import *
MEMORY_SIZE = 500
files = glob.glob(os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"example_programs", "*.txt"))
filedict = {}
for i, c in enumerate(files):
print("{}: {}".format(i, c.split(os.pathsep)[-1]))
filedict[str(i)] = c # generate dict here for ease of entering input
while True:
program = filedict.get(input("Index of program to run: "))
if program:
break
with open(program) as commandList:
commands = [i.strip("\n") for i in list(commandList)]
myprogram = Compiler(commands, MEMORY_SIZE)
mycpu = Cpu(MEMORY_SIZE, myprogram.compiled)
mycpu.execute()
|
nitros12/Cpu_emulator
|
main.py
|
Python
|
mit
| 668
|
a, b = map(int, input().split())
print(a*4+b*2)
|
knuu/competitive-programming
|
atcoder/corp/codethxfes2014a_a.py
|
Python
|
mit
| 49
|
from default import *
import theano
import theano.tensor as T
import lasagne as nn
import deep_learning_layers
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
cached = None
# Save and validation frequency
validate_every = 10
validate_train_set = False
save_every = 10
restart_from_save = False
dump_network_loaded_data = False
# Training (schedule) parameters
batch_size = 32
sunny_batch_size = 4
batches_per_chunk = 16
AV_SLICE_PER_PAT = 11
num_epochs_train = 300
learning_rate_schedule = {
0: 0.000010,
75: 0.000007,
150: 0.000003,
225: 0.000001,
}
build_updates = updates.build_adam_updates
preprocess_train = preprocess.preprocess_with_augmentation
preprocess_validation = preprocess.preprocess # no augmentation
preprocess_test = preprocess.preprocess_with_augmentation
test_time_augmentations = 100 * AV_SLICE_PER_PAT # More augmentations since a we only use single slices
cleaning_processes = [preprocess.normalize_contrast,
preprocess.set_upside_up]
augmentation_params = {
"rotation": (-16, 16),
"shear": (0, 0),
"translation": (-8, 8),
}
postprocess = postprocess.postprocess_value
# Input sizes
image_size = 128
data_sizes = {
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size),
"area_per_pixel:sax": (batch_size, ),
"metadata:": (batch_size, ),
# TBC with the metadata
}
# Objective
l2_weight = 0.0005
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.regularize_layer_params_weighted(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.RMSEObjective(interface_layers["outputs"], penalty=l2_penalty)
# Architecture
def build_model():
#################
# Regular model #
#################
input_size = data_sizes["sliced:data:singleslice"]
l0 = nn.layers.InputLayer(input_size)
l1a = nn.layers.dnn.Conv2DDNNLayer(l0 , filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1b = nn.layers.dnn.Conv2DDNNLayer(l1a, filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1 = nn.layers.dnn.MaxPool2DDNNLayer(l1b, pool_size=(2,2), stride=(2,2))
l2a = nn.layers.dnn.Conv2DDNNLayer(l1 , filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2b = nn.layers.dnn.Conv2DDNNLayer(l2a, filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2 = nn.layers.dnn.MaxPool2DDNNLayer(l2b, pool_size=(2,2), stride=(2,2))
l3a = nn.layers.dnn.Conv2DDNNLayer(l2 , filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3b = nn.layers.dnn.Conv2DDNNLayer(l3a, filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3c = nn.layers.dnn.Conv2DDNNLayer(l3b, filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3 = nn.layers.dnn.MaxPool2DDNNLayer(l3c, pool_size=(2,2), stride=(2,2))
l4a = nn.layers.dnn.Conv2DDNNLayer(l3 , filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4b = nn.layers.dnn.Conv2DDNNLayer(l4a, filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4c = nn.layers.dnn.Conv2DDNNLayer(l4b, filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4 = nn.layers.dnn.MaxPool2DDNNLayer(l4c, pool_size=(2,2), stride=(2,2))
l5a = nn.layers.dnn.Conv2DDNNLayer(l4 , filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5b = nn.layers.dnn.Conv2DDNNLayer(l5a, filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5c = nn.layers.dnn.Conv2DDNNLayer(l5b, filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5 = nn.layers.dnn.MaxPool2DDNNLayer(l5c, pool_size=(2,2), stride=(2,2))
key_scale = "area_per_pixel:sax"
l_scale = nn.layers.InputLayer(data_sizes[key_scale])
# Systole Dense layers
ldsys1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
ldsys2 = nn.layers.DenseLayer(ldsys1drop, num_units=64, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
ldsys3 = nn.layers.DenseLayer(ldsys2drop, num_units=1, b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.identity)
l_systole = layers.ScaleLayer(ldsys3, scale=l_scale)
# Diastole Dense layers
lddia1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia1drop = nn.layers.dropout(lddia1, p=0.5)
lddia2 = nn.layers.DenseLayer(lddia1drop, num_units=64, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia2drop = nn.layers.dropout(lddia2, p=0.5)
lddia3 = nn.layers.DenseLayer(lddia2drop, num_units=1, b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.identity)
l_diastole = layers.ScaleLayer(lddia3, scale=l_scale)
return {
"inputs":{
"sliced:data:singleslice": l0,
key_scale: l_scale,
},
"outputs": {
"systole:value": l_systole,
"diastole:value": l_diastole,
"systole:sigma": deep_learning_layers.FixedConstantLayer(np.ones((batch_size, 1), dtype='float32')*20./np.sqrt(test_time_augmentations)),
"diastole:sigma": deep_learning_layers.FixedConstantLayer(np.ones((batch_size, 1), dtype='float32')*30./np.sqrt(test_time_augmentations)),
},
"regularizable": {
ldsys1: l2_weight,
ldsys2: l2_weight,
ldsys3: l2_weight,
lddia1: l2_weight,
lddia2: l2_weight,
lddia3: l2_weight,
},
}
|
317070/kaggle-heart
|
configurations/j4_iranet2.py
|
Python
|
mit
| 6,950
|
import pytest
def test_eval(workbench):
expected = [
dict(repetitions=2, level=60, fc=32e3/2),
dict(repetitions=10, level=60, fc=32e3/10),
dict(repetitions=15, level=60, fc=32e3/15),
dict(repetitions=20, level=60, fc=32e3/20),
dict(repetitions=20, level=60, fc=32e3/20),
dict(repetitions=2, level=60, fc=32e3/2),
dict(repetitions=10, level=60, fc=32e3/10),
]
context = workbench.get_plugin('psi.context')
# Ensure that we loop properly through the selector sequence
context.apply_changes()
for e in expected:
context.next_setting('default', save_prior=False)
assert e == context.get_values()
# Ensure that apply_changes restarts the selector sequence.
context.apply_changes()
for e in expected:
context.next_setting('default', save_prior=False)
assert e == context.get_values()
# Ensure that changes to expressions after apply_changes does not affect the
# result.
context.apply_changes()
context.context_items['fc'].expression = '1e3'
for e in expected:
context.next_setting('default', save_prior=False)
assert e == context.get_values()
# Now, the result should change.
context.apply_changes()
for e in expected:
context.next_setting('default', save_prior=False)
e['fc'] = 1e3
assert e == context.get_values()
def test_unique_values(workbench):
context = workbench.get_plugin('psi.context')
result = context.unique_values('repetitions')
expected = {2, 10, 15, 20}
assert result == expected
def test_update(workbench):
'''
Tests whether the change detection algorithm works as intended.
'''
context = workbench.get_plugin('psi.context')
context.apply_changes()
assert context.changes_pending == False
assert context.get_value('level') == 60
context.next_setting('default', False)
assert context.changes_pending == False
assert context.get_value('level') == 60
context.context_items['level'].expression = '32'
assert context.changes_pending == True
assert context.get_value('level') == 60
context.next_setting('default', False)
assert context.changes_pending == True
assert context.get_value('level') == 60
context.apply_changes()
context.next_setting('default', False)
assert context.changes_pending == False
assert context.get_value('level') == 32
context.context_items['level'].expression = '60'
assert context.changes_pending == True
context.revert_changes()
assert context.changes_pending == False
item = context.context_items['repetitions']
context.selectors['default'].set_value(0, item, '5')
assert context.changes_pending == True
assert context.selectors['default'].get_value(0, item) == 5
|
bburan/psiexperiment
|
tests/workbench/test_context.py
|
Python
|
mit
| 2,838
|
import os
import sys
"""
Outputs empty files when Valve removes interfaces so that people upgrading don't have old bad data.
"""
def main():
list_of_files = (
("autogen", "isteamunifiedmessages.cs"),
("types/SteamUnifiedMessages", "ClientUnifiedMessageHandle.cs"),
("types/SteamClient", "SteamAPI_PostAPIResultInProcess_t.cs"),
)
for f in list_of_files:
try:
os.makedirs(f[0])
except OSError:
pass
with open(os.path.join(f[0], f[1]), "wb") as out:
with open("templates/header.txt", "r") as f:
out.write(bytes(f.read(), "utf-8"))
out.write(bytes("#endif // !DISABLESTEAMWORKS\n", "utf-8"))
out.write(bytes("\n", "utf-8"))
out.write(bytes("// This file is no longer needed. Valve has removed the functionality.\n", "utf-8"))
out.write(bytes("// We continue to generate this file to provide a small amount of backwards compatability.\n", "utf-8"))
if __name__ == "__main__":
main()
|
rlabrecque/Steamworks.NET-CodeGen
|
output_dummy_files.py
|
Python
|
mit
| 1,047
|
from zope.interface import Interface, Attribute
class ICachedItemMapper(Interface):
"""Manage attribute mappings between ICachableItem and ICachedItem
For simple maps that contains only strings and integers, map is simply a key value
pair of the mapped items. If the ICachableItem contains complex attributes, such
as a date, then they should be object adaptable to IManagedCachedItemMapperAttribute
"""
mapper = Attribute("Dictionary map of ICachedItem attribute names to equivalent ICachableItem.attribute keys.")
def key():
"""Returns string identifier of ICachedItem attribute that represents unique item entries (e.g. primary key field name)"""
def factory():
"""Returns instance of ICachedItem with unassigned attributes"""
def get(ICachableItem):
"""Returns ICachedItem representing ICachableItem"""
def check(ICachableItem):
"""True is returned if ICachableItem can be mapped into a ICachedItem"""
class IManagedCachedItemMapperAttributeKeyWrapper(Interface):
"""Key name wrapper for a managed attribute
We use this so that we can apply an interface onto, what would normally be
a python string. We wrap that string within this interface so that we
ca use it for adapter lookups.
"""
def __call__():
"""Returns the key name string of a managed attribute"""
class IManagedCachedItemMapperAttribute(Interface):
"""An attribute whose value needs to be managed before assignment to a ICachedItem (i.e. a date field)"""
def manage(value):
"""Returns a cachable attribute value"""
class ICachableItem(Interface):
"""An item who's information can be cached."""
attributes = Attribute("Dictionary map of ICachableItem attributes and related values")
key = Attribute("String that identifies the item's unique key attribute whose value will be returned by getId()")
def getId():
"""Return item unique identifier attribute value"""
def validate():
"""Check item's validity"""
class ICachableSource(Interface):
"""A source of data that can be cached as ICachableItem in a ICacheArea."""
def key():
"""Returns string identifier key that marks unique item entries (e.g. primary key field name)"""
def items():
"""Returns an iterable of available ICachableItem in the ICachableSource"""
def getById(Id):
"""Returns ICachableItem that matches Id or None if not found"""
def first():
"""Returns the first ICachableItem available in the ICachableSource or None"""
class ICachedItem(Interface):
"""A cached item."""
def getId():
"""Return item unique identifier"""
def __eq__(instance):
"""Returns True if current object should be considered equivalent to instance"""
def __ne__(instance):
"""Returns True if current object should not be considered equivalent to instance"""
class IAgeableCachedItem(ICachedItem):
"""A cached item that has an age"""
def birth():
"""Python datetime of cached item's creation"""
def age():
"""Python timedelta of cached item's current age"""
def expiration():
"""Python datetime of when cached item should be considered invalid"""
def expiration_age():
"""Python timedelta of when cached item should be considered invalid"""
def expired():
"""True indicates the cached item is expired"""
class ICacheArea(Interface):
"""An area where cached information can be stored persistently."""
def get(ICachableItem):
"""Returns current ICachedItem for ICachableItem or None if not cached"""
def isDirty(ICachableItem):
"""True if cached information requires update for ICachableItem"""
def cache(ICachableItem):
"""Updates caches area with latest item information returning
ICachedItem if cache updates were required.
Issues ICacheObjectCreatedEvent, and ICacheObjectModifiedEvent for
ICacheArea/ICachableItem combo.
"""
def import_source(ICachableSource):
"""Updates cache area and returns number of items updated with all
available entries in ICachableSource
"""
def reset():
"""Deletes all entries in the cache area"""
def initialize():
"""Instantiates the cache area to be ready for updates"""
class ITransactionalCacheArea(ICacheArea):
"""A Cache area with tranaction capabilities"""
def commit():
"""Commits changes for transaction capable ICacheAreas"""
def rollback():
"""Rollback changes for transaction capable ICacheAreas"""
class ITrimmableCacheArea(ICacheArea):
"""An area whose contents can be trimmed
This should be used in cases where a cache area's contents should be
updated to match a sources contents exactly on successive calls
"""
def trim(source):
"""Similar to import_source, except any items found in cache that are
not found in the source will be removed from the cache.
Args:
source: either ICachableSource or a iterable of
ICachableItem instances. All entries in the cache
area not found in the source will be removed from the area.
Returns:
tuple whose first entry is the number of items added/updated in
the cache and whose second entry is how many items were removed
from the cache.
"""
class ILocatableCacheArea(ICacheArea):
"""
Same as ICacheArea except zope.location.ILocation must be provided by
ICachableItem parameters for method calls. This type of cache will store
items in a hierarchy (e.g. children have parents).
"""
|
davisd50/sparc.cache
|
sparc/cache/interfaces.py
|
Python
|
mit
| 5,906
|
# Quiz#5
# Eloy Sánchez
# Instrucciones: Promedio de notas
print ("Escriba el nombre del alumno")
input ("nombre")
print ("Escriba las notas del alumno")
nota1 = float(input('Nota1 '))
nota2 = float(input('Nota2 '))
nota3 = float(input('Nota3 '))
nota4 = float(input('Nota4 '))
nota5 = float(input('Nota5 '))
promedio = float (nota1+nota2+nota3+nota4+nota5)/5
print ("El promedio es de : " + str (promedio))
if promedio >= 71:
print (" El estudiante aprobo la materia ")
else:
print (" El estudiante reprobo la materia ")
archivo = open('Juan.txt', 'w')
archivo.write("Estudiante 1 : \n\n ")
archivo.write("Nota 1 : \n ")
archivo.write("Nota 2 : \n ")
archivo.write("Nota 3 : \n ")
archivo.write("Nota 4 : \n ")
archivo.write("Nota 5 : \n ")
archivo.close()
|
Eloy2918/uip-prog3
|
Laboratorios/Semana6/Quiz#5.py
|
Python
|
mit
| 792
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from pymongo import MongoClient
class OSMDataImporter(object):
'''
OSMDataImporter.
'''
def __init__(self, db_name='osm_data_import', db_collection_name='jakarta'):
self.client = MongoClient()
self.db = self.client[db_name]
self.collection = self.db[db_collection_name]
def import_data(self, filename):
with open(filename) as data:
data = json.load(data)
for item in data:
self.collection.insert(item)
|
joashxu/JakartaOSMData
|
osm_dataimporter.py
|
Python
|
mit
| 559
|
# Your are given an array of integers prices, for which the i-th element is the price of a given stock on day i; and a non-negative integer fee representing a transaction fee.
# You may complete as many transactions as you like, but you need to pay the transaction fee for each transaction. You may not buy more than 1 share of a stock at a time (ie. you must sell the stock share before you buy again.)
# Return the maximum profit you can make.
# Example 1:
# Input: prices = [1, 3, 2, 8, 4, 9], fee = 2
# Output: 8
# Explanation: The maximum profit can be achieved by:
# Buying at prices[0] = 1
# Selling at prices[3] = 8
# Buying at prices[4] = 4
# Selling at prices[5] = 9
# The total profit is ((8 - 1) - 2) + ((9 - 4) - 2) = 8.
# Note:
# 0 < prices.length <= 50000.
# 0 < prices[i] < 50000.
# 0 <= fee < 50000.
## DP
# This problem is just like the other stock problems.
# At given day, we can do 1 out of 4 things:
# buy stock
# hold stock
# do nothing with empty portfolio
# sell stock
# We have 4 arrays with the length of # of the days, recording the max profit at given day if we do given operation.
class Solution(object):
def maxProfit(self, prices, fee):
"""
:type prices: List[int]
:type fee: int
:rtype: int
"""
if len(prices) < 2:
return 0
# DP
buy = [0 for _ in range(len(prices))]
sell = [0 for _ in range(len(prices))]
# not sell
hold = [0 for _ in range(len(prices))]
# not buy after sell
skip = [0 for _ in range(len(prices))]
# IMP
# init
buy[0] = - prices[0]
hold[0] = - prices[0]
for i in range(1, len(prices)):
buy[i] = max(skip[i-1], sell[i-1]) - prices[i]
hold[i] = max(hold[i-1], buy[i-1])
skip[i] = max(skip[i-1], sell[i-1])
sell[i] = max(buy[i-1], hold[i-1]) + prices[i] - fee
return max(buy[-1], hold[-1], skip[-1], sell[-1],0)
# But we really do not need so much dp
class Solution(object):
def maxProfit(self, prices, fee):
# only consider the buy and sell
N = len(prices)
if not N:
return 0
buy = [0] * N
sell = [0] * N
buy[0] = -prices[0]
for i in range(1, N):
buy[i] = max(buy[i - 1], sell[i - 1] - prices[i])
sell[i] = max(sell[i - 1], buy[i - 1] + prices[i] - fee)
return max(sell[N - 1], 0)
# So based on that only previous status can relate to current status
class Solution(object):
def maxProfit(self, prices, fee):
# only consider the buy and sell
N = len(prices)
if not N:
return 0
sell = 0
buy = -prices[0]
for i in range(1, N):
buy = max(buy, sell - prices[i])
sell = max(sell, buy + prices[i] - fee)
return max(sell, 0)
# 122. Best Time to Buy and Sell Stock II
# which is really the same as the 714
class Solution(object):
def maxProfit(self, prices):
# only consider the buy and sell
N = len(prices)
if not N:
return 0
sell = 0
buy = -prices[0]
for i in range(1, N):
buy = max(buy, sell - prices[i])
sell = max(sell, buy + prices[i])
return max(sell, 0)
# 309. Best Time to Buy and Sell Stock with Cooldown
# buy[i] = max(buy[i-1], sell[i-2] - prices[i])
# sell[i] = max(sell[i-1], buy[i-1] + prices[i])
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if not prices or len(prices) <= 1:
return 0
# buy[i] = max(buy[i-1], sell[i-2] - prices[i])
# sell[i] = max(sell[i-1], buy[i-1] + prices[i])
b0 = -prices[0]
b1 = b0
s0 = 0
s1 = 0
s2 = 0
for i in range(1,len(prices)):
b1 = max(b0, s0 - prices[i])
s2 = max(s1, b0 + prices[i])
b0 = b1
s0 = s1
s1 = s2
return s2
class Solution(object):
def maxProfit(self, prices):
if len(prices) < 2:
return 0
sell, buy, prev_sell, prev_buy = 0, -prices[0], 0, 0
for price in prices:
prev_buy = buy
buy = max(prev_sell - price, prev_buy)
prev_sell = sell
sell = max(prev_buy + price, prev_sell)
return sell
|
youhusky/Facebook_Prepare
|
714. Best Time to Buy and Sell Stock with Transaction Fee.py
|
Python
|
mit
| 4,579
|
# :) :| :(
|
connorsempek/splice
|
splice/__init__.py
|
Python
|
mit
| 11
|
from functools import wraps
from bson.objectid import ObjectId
from flask import request, abort
from location.api.errors import bad_request
def ensure_json_content_type(f):
"""Ensure the 'Content-Type' header is 'application/json'.
Wraps views. If the check fails, returns a 400 bad request.
"""
@wraps(f)
def decorated(*args, **kwargs):
ctype = request.headers['Content-Type']
if ctype != 'application/json':
return bad_request("Expected 'application/json', got '%s'" % ctype)
return f(*args, **kwargs)
return decorated
def get_or_404(objects, id):
"""Get the object with id from objects, or abort with a 404."""
if not ObjectId.is_valid(id):
abort(404)
oid = ObjectId(id)
obj = objects.find_one(oid)
if not obj:
abort(404)
return obj
|
zackdever/location
|
location/api/utils.py
|
Python
|
mit
| 844
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
from BeautifulSoup import BeautifulSoup
import errno
import HTMLParser
import re
import socket
import sys
import urllib
import urllib2
import db_wrapper
RESUME_ID = 0
PAGE_ID = 0
US_STATES = {
'AK': 'Alaska',
'AL': 'Alabama',
'AR': 'Arkansas',
'AS': 'American Samoa',
'AZ': 'Arizona',
'CA': 'California',
'CO': 'Colorado',
'CT': 'Connecticut',
'DC': 'District of Columbia',
'DE': 'Delaware',
'FL': 'Florida',
'GA': 'Georgia',
'GU': 'Guam',
'HI': 'Hawaii',
'IA': 'Iowa',
'ID': 'Idaho',
'IL': 'Illinois',
'IN': 'Indiana',
'KS': 'Kansas',
'KY': 'Kentucky',
'LA': 'Louisiana',
'MA': 'Massachusetts',
'MD': 'Maryland',
'ME': 'Maine',
'MI': 'Michigan',
'MN': 'Minnesota',
'MO': 'Missouri',
'MP': 'Northern Mariana Islands',
'MS': 'Mississippi',
'MT': 'Montana',
'NA': 'National',
'NC': 'North Carolina',
'ND': 'North Dakota',
'NE': 'Nebraska',
'NH': 'New Hampshire',
'NJ': 'New Jersey',
'NM': 'New Mexico',
'NV': 'Nevada',
'NY': 'New York',
'OH': 'Ohio',
'OK': 'Oklahoma',
'OR': 'Oregon',
'PA': 'Pennsylvania',
'PR': 'Puerto Rico',
'RI': 'Rhode Island',
'SC': 'South Carolina',
'SD': 'South Dakota',
'TN': 'Tennessee',
'TX': 'Texas',
'UT': 'Utah',
'VA': 'Virginia',
'VI': 'Virgin Islands',
'VT': 'Vermont',
'WA': 'Washington',
'WI': 'Wisconsin',
'WV': 'West Virginia',
'WY': 'Wyoming'
}
COUNTRIES = [
"Internet Only",
"Anguilla",
"Saint Kitts-Nevis",
"Antigua",
"Aruba",
"Dominica",
"Grenada",
"Montserrat",
"Cuba",
"Virgin Islands (US)",
"Virgin Islands (UK)",
"Martinique",
"Cayman Islands",
"Trinidad Tobago",
"Jamaica",
"Barbados",
"Haiti",
"Saint Vincent",
"Afghanistan",
"Puerto Rico",
"Albania",
"Algeria",
"American Samoa",
"Andorra",
"Angola",
"Argentina",
"Armenia",
"Australia",
"Austria",
"Azerbaijan",
"Bahrain",
"Bahamas",
"Bangladesh",
"Belarus",
"Belgium",
"Belize",
"Benin",
"Bermuda",
"Bhutan",
"Bolivia",
"Bosnia and Herzegovina",
"Bosnia",
"Botswana",
"Brazil",
"Brunei Darussalam",
"Bulgaria",
"Burkina Faso",
"Burundi",
"Cambodia",
"Cameroon",
"Canada",
"Canary Islands",
"Cape Verde",
"Caribbean Islands",
"Central African Republic",
"Chad",
"Chile",
"China",
"Christmas Island",
"Cocos Islands",
"Colombia",
"Comoros",
"Congo",
"Cook Islands",
"Costa Rica",
"Ivory Coast",
"Saint Lucia",
"Croatia",
"Cyprus",
"Czech Republic",
"Denmark",
"Djibouti",
"DR Congo",
"East Timor",
"Ecuador",
"Egypt",
"El Salvador",
"Equatorial Guinea",
"Eritrea",
"Estonia",
"Ethiopia",
"Falkland Islands",
"Faroe Islands",
"Federated States of Micronesia",
"Fiji",
"Finland",
"France",
"French Guiana",
"French Polynesia",
"Gabon",
"Gambia",
"Georgia",
"Germany",
"Ghana",
"Gibraltar",
"Greece",
"Greenland",
"Guam",
"Guadeloupe",
"Guatemala",
"Guinea",
"Guinea-Bissau",
"Guyana",
"Honduras",
"Hong Kong",
"Hungary",
"Iceland",
"India",
"Indonesia",
"Iran",
"Iraq",
"Ireland",
"Israel",
"Italy",
"Japan",
"Jordan",
"Kazakhstan",
"Kenya",
"Kiribati",
"Kuwait",
"Kyrgyzstan",
"Laos",
"Latvia",
"Lebanon",
"Lesotho",
"Liberia",
"Libya",
"Liechtenstein",
"Lithuania",
"Luxembourg",
"Macau",
"Macedonia",
"Madagascar",
"Malawi",
"Malaysia",
"Maldives",
"Mali",
"Malta",
"Marshall Islands",
"Mauritania",
"Mauritius",
"Mayotte",
"Mexico",
"Moldova",
"Monaco",
"Mongolia",
"Montenegro",
"Morocco",
"Mozambique",
"Myanmar (Burma)",
"Namibia",
"Nauru",
"Nepal",
"Netherlands",
"Netherlands Antilles",
"New Caledonia",
"New Zealand",
"Nicaragua",
"Niger",
"Nigeria",
"Norfolk Island",
"Northern Mariana Islands",
"North Korea",
"Norway",
"Oman",
"Pakistan",
"Palau",
"Palestine",
"Panama",
"Papua New Guinea",
"Paraguay",
"Peru",
"Philippines",
"Poland",
"Portugal",
"Qatar",
"Reunion",
"Romania",
"Russia",
"Rwanda",
"Samoa",
"San Marino",
"Sao Tome-Principe",
"Saudi Arabia",
"Senegal",
"Serbia",
"Seychelles",
"Sierra Leone",
"Singapore",
"Slovakia",
"Slovenia",
"Solomon Islands",
"Somalia",
"South Africa",
"South Korea",
"Spain",
"Sri Lanka",
"St. Helena",
"St. Pierre-Miquelon",
"Sudan",
"Suriname",
"Swaziland",
"Sweden",
"Switzerland",
"Syria",
"Taiwan",
"Tajikistan",
"Tanzania",
"Thailand",
"Togo",
"Tonga",
"Tunisia",
"Turkey",
"Tuvalu",
"Uganda",
"Ukraine",
"United Arab Emirates",
"United Kingdom",
"United States",
"Uruguay",
"Uzbekistan",
"Vanuatu",
"Vatican City",
"Venezuela",
"Vietnam",
"Wallis-Futuna Islands",
"Western Sahara",
"Yemen",
"Zambia",
"Zimbabwe"
]
def get_page(url):
try:
soup = BeautifulSoup(get_data(url))
except:
return None
return soup
def get_data(url, limit=False):
try:
temp = urllib2.urlopen(url)
except urllib2.HTTPError as e:
if e.code == 302:
return e.geturl()
else:
raise e
if limit:
data = temp.read(8192)
# We have the real stream, so it's only data here.
# Return the URL
if len(data) == 8192:
return url
# If there is multiple line in the data we just got, we should return
# the URL, we probably got a metadata file containing the stream URL.
if data.count('\n') > 2:
# If it's an ASF containing only multiple stream for the same
# radio, it will be handled.
if "[Reference]" in data:
return data
# Other than that, we don't take care of it here.
return temp.geturl()
elif '<asxversion="3.0"' in data.lower().replace(' ', ''):
return url
return data
return temp.read()
def find_nb_page(soup):
paging_link = soup.findAll('a', {'class':"paging"})
max = 1
for link in paging_link:
nb = int(link.text)
if nb > max:
max = nb
return max
def scrape_da_page(link):
print "Scraping: %s" % paging_link
soup = get_page(link)
if soup is None:
return
sections = soup.findAll("tr", {'align':"left", "bgcolor":"#FFFFFF"})
nb_sections = 0
for section in sections:
print "Doing sections %d/%d" % (nb_sections, len(sections))
nb_sections += 1
name = section.findAll('td')[1].findAll('a')[0].text
try:
name = unicode(name).encode('utf-8')
except UnicodeDecodeError:
name = unicode(name.decode('utf-8')).encode('utf-8')
stream = section.findAll('td')[1].findAll('a')[0]['href']
stream = "http://vtuner.com/setupapp/guide/asp/"+stream[3:]
stream = urllib.quote(stream, ":/?=&#" )
try:
stream = get_data(stream, limit=True)
except urllib2.URLError as e:
print e
continue
except urllib2.HTTPError as e:
print e
if e.code in [404, 400]:
continue
except socket.error as e:
if e.errno == errno.ECONNRESET:
print "Connection reset by peer."
continue
except:
print "No stream URL. Moving on."
print name
print stream
sys.exit(0)
if not stream:
continue
stream_list = [stream]
if "[Reference]" in stream:
stream_list = []
streams = stream.split('\n')
for s in streams[1:]:
if 'Ref' in s:
stream_list.append("http"+s.split('http')[1].strip())
location = section.findAll('td')[2].text
country = None
print "Country: %s" % location
if "State" in paging_link:
for st, state in US_STATES.iteritems():
if st in location:
country = "United States - %s" % state
location = location.replace(st, '')
elif state in location:
country = "United States - %s" % state
location = location.replace(state, '')
if country is None and "Internet Only" in location:
state = paging_link.split('State=')[1].split('&i')[0]
country = "United States - %s" % state
elif "Slovak Republic" in location:
country = "Slovakia"
location = location.replace("Slovak Republic", '')
elif "Micronesia"in location:
country = "Federated States of Micronesia"
location = location.replace("Micronesia", '')
elif "Brunei" in location:
country = "Brunei Darussalam"
location = location.replace("Brunei", '')
else:
for cnt in COUNTRIES:
if cnt in location:
country = cnt
location = location.replace(cnt, '')
# Some radios are misplaced in United States
if country is None and "State" in paging_link:
for cnt in COUNTRIES:
if cnt in location:
country = cnt
location = location.replace(cnt, '')
if country is None:
# This happen only one time but, still.
if "GA" in location:
country = "Georgia"
location = location.replace("Georgia", '')
if country is None:
print "No country found. Moving on."
sys.exit(0)
city = location.strip()
country = country.strip()
if city == "":
city = country
categ = []
if len(section.findAll('td')) > 3:
categ = section.findAll('td')[3].text
categ = re.split(', |/', categ)
# We only have on information about quality, so assume it's the same for all..
quality = section.findAll('td')[4].text
quality = html_parser.unescape(quality)
quality = quality.replace("MP3", '')
quality = unicode(quality).encode('utf-8')
try:
quality = int(non_decimal.sub('', quality))
except ValueError:
quality = 0
streams = []
for st in stream_list:
streams.append([st, quality])
# Finally, the whole radio
db_wrapper.add_radio(name.strip(), city, country, streams, categ, homepage=url.strip())
print '-'*80
print name
print stream_list
print country
print city
print categ
print quality
db_wrapper.connect()
non_decimal = re.compile(r'[^\d.]+')
html_parser = HTMLParser.HTMLParser()
soup = get_page("http://vtuner.com/setupapp/guide/asp/BrowseStations/StartPage.asp?sBrowseType=Location")
if soup is None:
print "Could not start."
sys.exit(0)
link_countries = soup.findAll('a')
link_url = []
for link_country in link_countries:
href = link_country['href']
if "BrowseStations" not in href or "Category" not in href:
continue
url = "http://vtuner.com/setupapp/guide/asp/"+link_country['href'][3:]
link_url.append(urllib.quote(url, ":/?=&#" ))
resume = 0
for link in link_url:
resume += 1
print "%d - %s" % (resume, link)
if resume < RESUME_ID:
continue
try:
soup = get_page(link)
if soup is None:
raise Exception
except:
print link
do_scrape = True
nb_page = 1
max_page = 2
if PAGE_ID != 0:
nb_page = PAGE_ID
# So we don't fell here again
PAGE_ID = 0
# This is seriously broken and ugly.
# We assume there will be a next page, but maybe there won't be.
max_page = nb_page + 1
while do_scrape:
paging_link = link+"&iCurrPage=%d" % nb_page
if nb_page > max_page:
do_scrape = False
continue
scrape_da_page(paging_link)
max_page = find_nb_page(get_page(paging_link))
nb_page += 1
db_wrapper.disconnect()
|
maximeh/spiderwave
|
scrappers/scrape_vtuner.py
|
Python
|
mit
| 12,090
|
import pandas as pd
import lxml.html
import re
def removeNonAscii(data):
return "".join(i for i in data if ord(i)<128)
def lxmlProcess(document):
page = lxml.html.document_fromstring(document)
page = page.cssselect('body')[0].text_content()
return " ".join(page.replace('\n', ' ').replace('\r', ' ').lower().split())
def sentenceConversion(dataframe, column, splitter):
dataframe[column] = [str(b) for b in dataframe[column]]
dataframe[column] = dataframe[column].apply(lxmlProcess)
if splitter == "None":
return dataframe.reset_index().rename(columns={'index':'id'})
else:
sentences = (' '.join(dataframe[column])).lower().split(splitter)
sentences = filter(None, sentences)
sentences = list(set(sentences))
sentences = [b.lstrip() for b in sentences]
sentences = [str(removeNonAscii(b)) for b in sentences]
sentences = [b.replace('-', ' ') for b in sentences]
return pd.DataFrame({column:sentences}).reset_index().rename(columns={'index':'id'})
|
dtsukiyama/suits
|
named-entity-recognizer/helper.py
|
Python
|
mit
| 1,052
|
def shift_people(danger, safety, is_lantern_safe, clock):
if len(danger) == 0:
return (clock, [])
min_time = None
min_chain = [ ]
if not is_lantern_safe:
for i in range(len(danger)):
for j in range(i + 1, len(danger)):
i_time = danger[i]
j_time = danger[j]
travel_time = min(i_time, j_time)
sub_danger = danger[:]
sub_danger.remove(i_time)
sub_danger.remove(j_time)
sub_safety = safety + [i_time, j_time]
print("Subs", sub_danger, sub_safety)
total_time, sub_chain = shift_people(sub_danger,
sub_safety,
not is_lantern_safe,
clock + travel_time)
print('subchain', sub_chain)
if min_time is None or total_time < min_time:
min_time = total_time
min_chain = [(danger, safety)] + sub_chain
else:
for i in range(len(safety)):
i_time = safety[i]
sub_safety = safety[:]
sub_safety.remove(i_time)
sub_danger = danger + [i_time]
print("Subs", sub_danger, sub_safety)
total_time, sub_chain = shift_people(sub_danger,
sub_safety,
not is_lantern_safe,
clock + i_time)
print('subchain', sub_chain)
min_time = min_time or total_time
min_time = min(min_time, total_time)
if min_time is None or total_time < min_time:
min_time = total_time
min_chain = [(danger, safety)] + sub_chain
return (min_time, min_chain)
danger_side = [1, 2, 5, 10]
safe_side = []
is_lantern_safe = False
print("Finally", shift_people(danger_side, safe_side, is_lantern_safe, 0))
|
ssangervasi/python-playground
|
riddle/reddit/bridge_riddle.py
|
Python
|
mit
| 1,593
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import operator
import re
import csv
from unicodedata import normalize
import os
from io import open
import xml.etree.ElementTree as ET
import sys
reload(sys) # Reload does the trick!
sys.setdefaultencoding('UTF8')
import getopt
import time
import pickle
import argparse
import codecs
import math
from sys import stdin
import unicodedata
from importlib import import_module
# Import common module for shared operations
common = import_module("common")
# Accents word with n-gram solution using local context
def accentWithNgram(buffer, deaccented, padding_char, diff, N, accents,words_dictionary):
# Remove first unnecessary element
buffer.pop(0)
# Append the new one
buffer.append(deaccented)
# Create local context
prevText = padding_char.join(buffer[0:diff])
follText = padding_char.join(buffer[diff+1:N])
word = buffer[diff]
# Invoke the shared NGram accent method
word = common.ngramAccent(word,words_dictionary, diff, accents,prevText, follText, padding_char)
return word
def main():
# Parse command line arguments
parser = argparse.ArgumentParser()
parser.add_argument("-n", "--ngram", help="N value for N-gram, such as 1,2,3,4,5..",type=int, default=2)
parser.add_argument('--timer', dest='timer', help="Timer enabled", action='store_true')
parser.add_argument('-d', '--dict', dest='dict', help="Dictionary file name", default="../Resources/HU_2gram_dict")
parser.add_argument('-s', '--dsize', dest='dsize', help="Dictionary size in lines")
parser.add_argument('-a', '--accents', type=str, default='áaéeíióoöoőoúuüuűu',
help='accent mapping')
parser.set_defaults(feature=False)
args = parser.parse_args()
timer_enabled = args.timer
accents = args.accents
dictionary_size = int(args.dsize)
# N-gram parameter
N = (args.ngram*2)+1
diff = args.ngram
# Start timer if enabled
if (timer_enabled):
start = time.time()
# Get the dictionary for the ngrams
dictionary_filename = args.dict
# Declare the dictionary
words_dictionary = {}
#dictionary_temp = list(csv.reader(open(dictionary_filename,'r',encoding='utf8'), delimiter='\t'))
# Build dictionary
common.buildDict(words_dictionary, dictionary_filename, dictionary_size)
# Get the shared padding char
padding_char = common.getPaddingChar()
word_buffer = []
for i in range(0,N):
word_buffer.append("")
initCounter = 0
# read every line of the input
for l in stdin:
#TEXT = l.translate(None, '()?,.:{}[]')
TEXT = l.decode("utf-8")
TEXT = TEXT.rstrip('\n') # strip newline from the end of the line
if (common.isAccentable(TEXT, accents)):
TEXT = common.replace(TEXT)
deaccented = common.remove_accents(unicode(TEXT))
if (initCounter < diff):
initCounter += 1
word_buffer.pop(0)
word_buffer.append(deaccented)
else:
# Invoke the shared NGram accent method
word = accentWithNgram(word_buffer, deaccented, padding_char, diff,N,
accents, words_dictionary)
print (word)
# Last ngram_diff iterations
for i in range(0,diff):
#Invoke the shared NGram accent method
word = accentWithNgram(word_buffer, "", padding_char, diff,N,
accents, words_dictionary)
print (word)
# Print timer info
if (timer_enabled):
end = time.time()
print ("Finished in " + str(end-start)+" seconds.")
if __name__ == '__main__':
main()
|
jozsinakhivnak/diacriticrestoration
|
accent_ngram.py
|
Python
|
mit
| 3,832
|
"""
Author: Marusa Zerjal, 2019 - 08 - 20
Compare two sets of components for a given association: one where all stellar radial velocities are known, and one where
some of their radial velocities are broken.
"""
from astropy.table import Table
todo=True
|
mikeireland/chronostar
|
projects/fit_comps_stars_with_missing_RV/compare_components.py
|
Python
|
mit
| 255
|
from talent_match import db
class Category(db.Model):
__tablename__ = 'category'
id = db.Column(
db.INTEGER, primary_key=True, autoincrement=True, nullable=False, index=True)
name = db.Column(db.String(80), nullable=False, index=True, unique=True)
description = db.Column(db.String(256), nullable=True)
# Project 3: Steve - adding relationships and navigation
skillList = db.relationship('Skill', lazy='joined')
# Project 4: Steve/Nick - adding deleted flag.
deleted = db.Column(db.Boolean, default=False, nullable=False)
def __init__(self, name, description):
self.name = name
self.description = description
def __repr__(self):
if (self.skillList) and (len(self.skillList) > 0):
return 'Category: ' + self.name + ', description=' + self.description + ', count=' + str(len(self.skillList))
else:
return 'Category: ' + self.name + ', description=' + self.description
# Project 3: adapting the serialize method from Jordan for the category
# list, too.
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'id': self.id,
'name': self.name,
'description': self.description
}
class Skill(db.Model):
__tablename__ = 'skill'
id = db.Column(
db.INTEGER, primary_key=True, autoincrement=True, nullable=False, index=True)
categoryID = db.Column(db.INTEGER, db.ForeignKey('category.id'))
name = db.Column(db.String(80), nullable=False, index=True, unique=True)
description = db.Column(db.String(256), nullable=True)
# Project 3: Steve - adding relationships and navigation
category = db.relationship(
'Category', backref='skill', uselist=False, lazy='joined')
# Project 4: Steve/Nick - adding deleted flag.
deleted = db.Column(db.Boolean, default=False, nullable=False)
def __init__(self, categoryID, name, description):
self.categoryID = categoryID
self.name = name
self.description = description
def __repr__(self):
if (self.category):
return 'Skill: ' + self.name + ', description=' + self.description + ', category=' + self.category.name
else:
return 'Skill: ' + self.name + ', description=' + self.description
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'id': self.id,
'categoryID': self.categoryID,
'name': self.name,
'description': self.description
}
|
jordan-wright/talent-match
|
talent_match/models/talentInfo.py
|
Python
|
mit
| 2,643
|
#!/usr/bin/env python2
# -*- coding: utf-8-*-
import os
import wave
import json
import tempfile
import logging
import urllib
import urlparse
import re
import subprocess
from abc import ABCMeta, abstractmethod
import requests
import yaml
import jasperpath
import diagnose
import vocabcompiler
class AbstractSTTEngine(object):
"""
Generic parent class for all STT engines
"""
__metaclass__ = ABCMeta
VOCABULARY_TYPE = None
@classmethod
def get_config(cls):
return {}
@classmethod
def get_instance(cls, vocabulary_name, phrases):
config = cls.get_config()
if cls.VOCABULARY_TYPE:
vocabulary = cls.VOCABULARY_TYPE(vocabulary_name,
path=jasperpath.config(
'vocabularies'))
if not vocabulary.matches_phrases(phrases):
vocabulary.compile(phrases)
config['vocabulary'] = vocabulary
instance = cls(**config)
return instance
@classmethod
def get_passive_instance(cls):
phrases = vocabcompiler.get_keyword_phrases()
return cls.get_instance('keyword', phrases)
@classmethod
def get_active_instance(cls):
phrases = vocabcompiler.get_all_phrases()
return cls.get_instance('default', phrases)
@classmethod
@abstractmethod
def is_available(cls):
return True
@abstractmethod
def transcribe(self, fp):
pass
class PocketSphinxSTT(AbstractSTTEngine):
"""
The default Speech-to-Text implementation which relies on PocketSphinx.
"""
SLUG = 'sphinx'
VOCABULARY_TYPE = vocabcompiler.PocketsphinxVocabulary
def __init__(self, vocabulary, hmm_dir="/usr/local/share/" +
"pocketsphinx/model/hmm/en_US/hub4wsj_sc_8k"):
"""
Initiates the pocketsphinx instance.
Arguments:
vocabulary -- a PocketsphinxVocabulary instance
hmm_dir -- the path of the Hidden Markov Model (HMM)
"""
self._logger = logging.getLogger(__name__)
# quirky bug where first import doesn't work
try:
import pocketsphinx as ps
except:
import pocketsphinx as ps
with tempfile.NamedTemporaryFile(prefix='psdecoder_',
suffix='.log', delete=False) as f:
self._logfile = f.name
self._logger.debug("Initializing PocketSphinx Decoder with hmm_dir " +
"'%s'", hmm_dir)
# Perform some checks on the hmm_dir so that we can display more
# meaningful error messages if neccessary
if not os.path.exists(hmm_dir):
msg = ("hmm_dir '%s' does not exist! Please make sure that you " +
"have set the correct hmm_dir in your profile.") % hmm_dir
self._logger.error(msg)
raise RuntimeError(msg)
# Lets check if all required files are there. Refer to:
# http://cmusphinx.sourceforge.net/wiki/acousticmodelformat
# for details
missing_hmm_files = []
for fname in ('mdef', 'feat.params', 'means', 'noisedict',
'transition_matrices', 'variances'):
if not os.path.exists(os.path.join(hmm_dir, fname)):
missing_hmm_files.append(fname)
mixweights = os.path.exists(os.path.join(hmm_dir, 'mixture_weights'))
sendump = os.path.exists(os.path.join(hmm_dir, 'sendump'))
if not mixweights and not sendump:
# We only need mixture_weights OR sendump
missing_hmm_files.append('mixture_weights or sendump')
if missing_hmm_files:
self._logger.warning("hmm_dir '%s' is missing files: %s. Please " +
"make sure that you have set the correct " +
"hmm_dir in your profile.",
hmm_dir, ', '.join(missing_hmm_files))
self._decoder = ps.Decoder(hmm=hmm_dir, logfn=self._logfile,
**vocabulary.decoder_kwargs)
def __del__(self):
os.remove(self._logfile)
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
try:
config['hmm_dir'] = profile['pocketsphinx']['hmm_dir']
except KeyError:
pass
return config
def transcribe(self, fp):
"""
Performs STT, transcribing an audio file and returning the result.
Arguments:
fp -- a file object containing audio data
"""
fp.seek(44)
# FIXME: Can't use the Decoder.decode_raw() here, because
# pocketsphinx segfaults with tempfile.SpooledTemporaryFile()
data = fp.read()
self._decoder.start_utt()
self._decoder.process_raw(data, False, True)
self._decoder.end_utt()
result = self._decoder.get_hyp()
with open(self._logfile, 'r+') as f:
for line in f:
self._logger.debug(line.strip())
f.truncate()
transcribed = [result[0]]
self._logger.info('Transcribed: %r', transcribed)
return transcribed
@classmethod
def is_available(cls):
return diagnose.check_python_import('pocketsphinx')
class JuliusSTT(AbstractSTTEngine):
"""
A very basic Speech-to-Text engine using Julius.
"""
SLUG = 'julius'
VOCABULARY_TYPE = vocabcompiler.JuliusVocabulary
def __init__(self, vocabulary=None, hmmdefs="/usr/share/voxforge/julius/" +
"acoustic_model_files/hmmdefs", tiedlist="/usr/share/" +
"voxforge/julius/acoustic_model_files/tiedlist"):
self._logger = logging.getLogger(__name__)
self._vocabulary = vocabulary
self._hmmdefs = hmmdefs
self._tiedlist = tiedlist
self._pattern = re.compile(r'sentence(\d+): <s> (.+) </s>')
# Inital test run: we run this command once to log errors/warnings
cmd = ['julius',
'-input', 'stdin',
'-dfa', self._vocabulary.dfa_file,
'-v', self._vocabulary.dict_file,
'-h', self._hmmdefs,
'-hlist', self._tiedlist,
'-forcedict']
cmd = [str(x) for x in cmd]
self._logger.debug('Executing: %r', cmd)
with tempfile.SpooledTemporaryFile() as out_f:
with tempfile.SpooledTemporaryFile() as f:
with tempfile.SpooledTemporaryFile() as err_f:
subprocess.call(cmd, stdin=f, stdout=out_f, stderr=err_f)
out_f.seek(0)
for line in out_f.read().splitlines():
line = line.strip()
if len(line) > 7 and line[:7].upper() == 'ERROR: ':
if not line[7:].startswith('adin_'):
self._logger.error(line[7:])
elif len(line) > 9 and line[:9].upper() == 'WARNING: ':
self._logger.warning(line[9:])
elif len(line) > 6 and line[:6].upper() == 'STAT: ':
self._logger.debug(line[6:])
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'julius' in profile:
if 'hmmdefs' in profile['julius']:
config['hmmdefs'] = profile['julius']['hmmdefs']
if 'tiedlist' in profile['julius']:
config['tiedlist'] = profile['julius']['tiedlist']
return config
def transcribe(self, fp, mode=None):
cmd = ['julius',
'-quiet',
'-nolog',
'-input', 'stdin',
'-dfa', self._vocabulary.dfa_file,
'-v', self._vocabulary.dict_file,
'-h', self._hmmdefs,
'-hlist', self._tiedlist,
'-forcedict']
cmd = [str(x) for x in cmd]
self._logger.debug('Executing: %r', cmd)
with tempfile.SpooledTemporaryFile() as out_f:
with tempfile.SpooledTemporaryFile() as err_f:
subprocess.call(cmd, stdin=fp, stdout=out_f, stderr=err_f)
out_f.seek(0)
results = [(int(i), text) for i, text in
self._pattern.findall(out_f.read())]
transcribed = [text for i, text in
sorted(results, key=lambda x: x[0])
if text]
if not transcribed:
transcribed.append('')
self._logger.info('Transcribed: %r', transcribed)
return transcribed
@classmethod
def is_available(cls):
return diagnose.check_executable('julius')
class GoogleSTT(AbstractSTTEngine):
"""
Speech-To-Text implementation which relies on the Google Speech API.
This implementation requires a Google API key to be present in profile.yml
To obtain an API key:
1. Join the Chromium Dev group:
https://groups.google.com/a/chromium.org/forum/?fromgroups#!forum/chromium-dev
2. Create a project through the Google Developers console:
https://console.developers.google.com/project
3. Select your project. In the sidebar, navigate to "APIs & Auth." Activate
the Speech API.
4. Under "APIs & Auth," navigate to "Credentials." Create a new key for
public API access.
5. Add your credentials to your profile.yml. Add an entry to the 'keys'
section using the key name 'GOOGLE_SPEECH.' Sample configuration:
6. Set the value of the 'stt_engine' key in your profile.yml to 'google'
Excerpt from sample profile.yml:
...
timezone: US/Pacific
stt_engine: google
keys:
GOOGLE_SPEECH: $YOUR_KEY_HERE
"""
SLUG = 'google'
def __init__(self, api_key=None, language='en-us'):
# FIXME: get init args from config
"""
Arguments:
api_key - the public api key which allows access to Google APIs
"""
self._logger = logging.getLogger(__name__)
self._request_url = None
self._language = None
self._api_key = None
self._http = requests.Session()
self.language = language
self.api_key = api_key
@property
def request_url(self):
return self._request_url
@property
def language(self):
return self._language
@language.setter
def language(self, value):
self._language = value
self._regenerate_request_url()
@property
def api_key(self):
return self._api_key
@api_key.setter
def api_key(self, value):
self._api_key = value
self._regenerate_request_url()
def _regenerate_request_url(self):
if self.api_key and self.language:
query = urllib.urlencode({'output': 'json',
'client': 'chromium',
'key': self.api_key,
'lang': 'fr_FR',
'maxresults': 6,
'pfilter': 2})
self._request_url = urlparse.urlunparse(
('https', 'www.google.com', '/speech-api/v2/recognize', '',
query, ''))
else:
self._request_url = None
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# HMM dir
# Try to get hmm_dir from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'keys' in profile and 'GOOGLE_SPEECH' in profile['keys']:
config['api_key'] = profile['keys']['GOOGLE_SPEECH']
return config
def transcribe(self, fp):
"""
Performs STT via the Google Speech API, transcribing an audio file and
returning an English string.
Arguments:
audio_file_path -- the path to the .wav file to be transcribed
"""
if not self.api_key:
self._logger.critical('API key missing, transcription request ' +
'aborted.')
return []
elif not self.language:
self._logger.critical('Language info missing, transcription ' +
'request aborted.')
return []
wav = wave.open(fp, 'rb')
frame_rate = wav.getframerate()
wav.close()
data = fp.read()
headers = {'content-type': 'audio/l16; rate=%s' % frame_rate}
r = self._http.post(self.request_url, data=data, headers=headers)
try:
r.raise_for_status()
except requests.exceptions.HTTPError:
self._logger.critical('Request failed with http status %d',
r.status_code)
if r.status_code == requests.codes['forbidden']:
self._logger.warning('Status 403 is probably caused by an ' +
'invalid Google API key.')
return []
r.encoding = 'utf-8'
try:
# We cannot simply use r.json() because Google sends invalid json
# (i.e. multiple json objects, seperated by newlines. We only want
# the last one).
response = json.loads(list(r.text.strip().split('\n', 1))[-1])
if len(response['result']) == 0:
# Response result is empty
raise ValueError('Nothing has been transcribed.')
results = [alt['transcript'] for alt
in response['result'][0]['alternative']]
except ValueError as e:
self._logger.warning('Empty response: %s', e.args[0])
results = []
except (KeyError, IndexError):
self._logger.warning('Cannot parse response.', exc_info=True)
results = []
else:
# Convert all results to uppercase
results = tuple(result.upper() for result in results)
self._logger.info('Transcribed: %r', results)
return results
@classmethod
def is_available(cls):
return diagnose.check_network_connection()
class AttSTT(AbstractSTTEngine):
"""
Speech-To-Text implementation which relies on the AT&T Speech API.
This implementation requires an AT&T app_key/app_secret to be present in
profile.yml. Please sign up at http://developer.att.com/apis/speech and
create a new app. You can then take the app_key/app_secret and put it into
your profile.yml:
...
stt_engine: att
att-stt:
app_key: 4xxzd6abcdefghijklmnopqrstuvwxyz
app_secret: 6o5jgiabcdefghijklmnopqrstuvwxyz
"""
SLUG = "att"
def __init__(self, app_key, app_secret):
self._logger = logging.getLogger(__name__)
self._token = None
self.app_key = app_key
self.app_secret = app_secret
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# Try to get AT&T app_key/app_secret from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'att-stt' in profile:
if 'app_key' in profile['att-stt']:
config['app_key'] = profile['att-stt']['app_key']
if 'app_secret' in profile['att-stt']:
config['app_secret'] = profile['att-stt']['app_secret']
return config
@property
def token(self):
if not self._token:
headers = {'content-type': 'application/x-www-form-urlencoded',
'accept': 'application/json'}
payload = {'client_id': self.app_key,
'client_secret': self.app_secret,
'scope': 'SPEECH',
'grant_type': 'client_credentials'}
r = requests.post('https://api.att.com/oauth/v4/token',
data=payload,
headers=headers)
self._token = r.json()['access_token']
return self._token
def transcribe(self, fp):
data = fp.read()
r = self._get_response(data)
if r.status_code == requests.codes['unauthorized']:
# Request token invalid, retry once with a new token
self._logger.warning('OAuth access token invalid, generating a ' +
'new one and retrying...')
self._token = None
r = self._get_response(data)
try:
r.raise_for_status()
except requests.exceptions.HTTPError:
self._logger.critical('Request failed with response: %r',
r.text,
exc_info=True)
return []
except requests.exceptions.RequestException:
self._logger.critical('Request failed.', exc_info=True)
return []
else:
try:
recognition = r.json()['Recognition']
if recognition['Status'] != 'OK':
raise ValueError(recognition['Status'])
results = [(x['Hypothesis'], x['Confidence'])
for x in recognition['NBest']]
except ValueError as e:
self._logger.debug('Recognition failed with status: %s',
e.args[0])
return []
except KeyError:
self._logger.critical('Cannot parse response.',
exc_info=True)
return []
else:
transcribed = [x[0].upper() for x in sorted(results,
key=lambda x: x[1],
reverse=True)]
self._logger.info('Transcribed: %r', transcribed)
return transcribed
def _get_response(self, data):
headers = {'authorization': 'Bearer %s' % self.token,
'accept': 'application/json',
'content-type': 'audio/wav'}
return requests.post('https://api.att.com/speech/v3/speechToText',
data=data,
headers=headers)
@classmethod
def is_available(cls):
return diagnose.check_network_connection()
class WitAiSTT(AbstractSTTEngine):
"""
Speech-To-Text implementation which relies on the Wit.ai Speech API.
This implementation requires an Wit.ai Access Token to be present in
profile.yml. Please sign up at https://wit.ai and copy your instance
token, which can be found under Settings in the Wit console to your
profile.yml:
...
stt_engine: witai
witai-stt:
access_token: ERJKGE86SOMERANDOMTOKEN23471AB
"""
SLUG = "witai"
def __init__(self, access_token):
self._logger = logging.getLogger(__name__)
self.token = access_token
@classmethod
def get_config(cls):
# FIXME: Replace this as soon as we have a config module
config = {}
# Try to get wit.ai Auth token from config
profile_path = jasperpath.config('profile.yml')
if os.path.exists(profile_path):
with open(profile_path, 'r') as f:
profile = yaml.safe_load(f)
if 'witai-stt' in profile:
if 'access_token' in profile['witai-stt']:
config['access_token'] = \
profile['witai-stt']['access_token']
return config
@property
def token(self):
return self._token
@token.setter
def token(self, value):
self._token = value
self._headers = {'Authorization': 'Bearer %s' % self.token,
'accept': 'application/json',
'Content-Type': 'audio/wav'}
@property
def headers(self):
return self._headers
def transcribe(self, fp):
data = fp.read()
r = requests.post('https://api.wit.ai/speech?v=20150101',
data=data,
headers=self.headers)
try:
r.raise_for_status()
text = r.json()['_text']
except requests.exceptions.HTTPError:
self._logger.critical('Request failed with response: %r',
r.text,
exc_info=True)
return []
except requests.exceptions.RequestException:
self._logger.critical('Request failed.', exc_info=True)
return []
except ValueError as e:
self._logger.critical('Cannot parse response: %s',
e.args[0])
return []
except KeyError:
self._logger.critical('Cannot parse response.',
exc_info=True)
return []
else:
transcribed = []
if text:
transcribed.append(text.upper())
self._logger.info('Transcribed: %r', transcribed)
return transcribed
@classmethod
def is_available(cls):
return diagnose.check_network_connection()
def get_engine_by_slug(slug=None):
"""
Returns:
An STT Engine implementation available on the current platform
Raises:
ValueError if no speaker implementation is supported on this platform
"""
if not slug or type(slug) is not str:
raise TypeError("Invalid slug '%s'", slug)
selected_engines = filter(lambda engine: hasattr(engine, "SLUG") and
engine.SLUG == slug, get_engines())
if len(selected_engines) == 0:
raise ValueError("No STT engine found for slug '%s'" % slug)
else:
if len(selected_engines) > 1:
print(("WARNING: Multiple STT engines found for slug '%s'. " +
"This is most certainly a bug.") % slug)
engine = selected_engines[0]
if not engine.is_available():
raise ValueError(("STT engine '%s' is not available (due to " +
"missing dependencies, missing " +
"dependencies, etc.)") % slug)
return engine
def get_engines():
def get_subclasses(cls):
subclasses = set()
for subclass in cls.__subclasses__():
subclasses.add(subclass)
subclasses.update(get_subclasses(subclass))
return subclasses
return [tts_engine for tts_engine in
list(get_subclasses(AbstractSTTEngine))
if hasattr(tts_engine, 'SLUG') and tts_engine.SLUG]
|
JeremieSamson/jasper
|
client/stt.py
|
Python
|
mit
| 23,750
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from ._models import DecryptResult, EncryptResult, SignResult, WrapResult, VerifyResult, UnwrapResult
from ._enums import EncryptionAlgorithm, KeyWrapAlgorithm, SignatureAlgorithm
from ._client import CryptographyClient
__all__ = [
"CryptographyClient",
"DecryptResult",
"EncryptionAlgorithm",
"EncryptResult",
"KeyWrapAlgorithm",
"SignatureAlgorithm",
"SignResult",
"WrapResult",
"VerifyResult",
"UnwrapResult",
]
|
Azure/azure-sdk-for-python
|
sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/__init__.py
|
Python
|
mit
| 608
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for google.appengine.tools.devappserver2.dispatcher."""
import logging
import socket
import unittest
import google
import mox
from google.appengine.api import appinfo
from google.appengine.api import dispatchinfo
from google.appengine.api import request_info
from google.appengine.tools.devappserver2 import api_server
from google.appengine.tools.devappserver2 import dispatcher
from google.appengine.tools.devappserver2 import module
from google.appengine.tools.devappserver2 import scheduled_executor
# This file uses pep8 naming.
# pylint: disable=invalid-name
class ApplicationConfigurationStub(object):
def __init__(self, modules):
self.modules = modules
self.dispatch = None
class ModuleConfigurationStub(object):
def __init__(self, application, module_name, version, manual_scaling):
self.application_root = '/'
self.application = application
self.application_external_name = 'app'
self.module_name = module_name
self.major_version = version
self.version_id = '%s:%s.%s' % (module_name, version, '12345')
self.runtime = 'python27'
self.threadsafe = False
self.handlers = []
self.skip_files = []
self.normalized_libraries = []
self.env_variables = []
if manual_scaling:
self.automatic_scaling = appinfo.AutomaticScaling()
self.manual_scaling = None
else:
self.automatic_scaling = None
self.manual_scaling = appinfo.ManualScaling(instances=1)
self.inbound_services = None
def add_change_callback(self, fn):
pass
class DispatchConfigurationStub(object):
def __init__(self):
self.dispatch = []
MODULE_CONFIGURATIONS = [
ModuleConfigurationStub(application='app',
module_name='default',
version='version',
manual_scaling=False),
ModuleConfigurationStub(application='app',
module_name='other',
version='version2',
manual_scaling=True),
ModuleConfigurationStub(application='app',
module_name='another',
version='version3',
manual_scaling=True),
]
class AutoScalingModuleFacade(module.AutoScalingModule):
def __init__(self,
module_configuration,
host='fakehost',
balanced_port=0):
super(AutoScalingModuleFacade, self).__init__(
module_configuration=module_configuration,
host=host,
balanced_port=balanced_port,
api_host='localhost',
api_port=8080,
auth_domain='gmail.com',
runtime_stderr_loglevel=1,
php_config=None,
python_config=None,
java_config=None,
cloud_sql_config=None,
vm_config=None,
default_version_port=8080,
port_registry=None,
request_data=None,
dispatcher=None,
max_instances=None,
use_mtime_file_watcher=False,
automatic_restarts=True,
allow_skipped_files=False,
threadsafe_override=None)
def start(self):
pass
def quit(self):
pass
@property
def balanced_address(self):
return '%s:%s' % (self._host, self._balanced_port)
@property
def balanced_port(self):
return self._balanced_port
class ManualScalingModuleFacade(module.ManualScalingModule):
def __init__(self,
module_configuration,
host='fakehost',
balanced_port=0):
super(ManualScalingModuleFacade, self).__init__(
module_configuration=module_configuration,
host=host,
balanced_port=balanced_port,
api_host='localhost',
api_port=8080,
auth_domain='gmail.com',
runtime_stderr_loglevel=1,
php_config=None,
python_config=None,
java_config=None,
cloud_sql_config=None,
vm_config=None,
default_version_port=8080,
port_registry=None,
request_data=None,
dispatcher=None,
max_instances=None,
use_mtime_file_watcher=False,
automatic_restarts=True,
allow_skipped_files=False,
threadsafe_override=None)
def start(self):
pass
def quit(self):
pass
@property
def balanced_address(self):
return '%s:%s' % (self._host, self._balanced_port)
@property
def balanced_port(self):
return self._balanced_port
def get_instance_address(self, instance):
if instance == 'invalid':
raise request_info.InvalidInstanceIdError()
return '%s:%s' % (self._host, int(instance) + 1000)
def _make_dispatcher(app_config):
"""Make a new dispatcher with the given ApplicationConfigurationStub."""
return dispatcher.Dispatcher(
app_config,
'localhost',
1,
'gmail.com',
1,
php_config=None,
python_config=None,
java_config=None,
cloud_sql_config=None,
vm_config=None,
module_to_max_instances={},
use_mtime_file_watcher=False,
automatic_restart=True,
allow_skipped_files=False,
module_to_threadsafe_override={},
external_port=None)
class DispatcherQuitWithoutStartTest(unittest.TestCase):
def test_quit_without_start(self):
"""Test that calling quit on a dispatcher without calling start is safe."""
app_config = ApplicationConfigurationStub(MODULE_CONFIGURATIONS)
unstarted_dispatcher = _make_dispatcher(app_config)
unstarted_dispatcher.quit()
class DispatcherTest(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
api_server.test_setup_stubs()
self.dispatch_config = DispatchConfigurationStub()
app_config = ApplicationConfigurationStub(MODULE_CONFIGURATIONS)
self.dispatcher = _make_dispatcher(app_config)
self.module1 = AutoScalingModuleFacade(app_config.modules[0],
balanced_port=1,
host='localhost')
self.module2 = ManualScalingModuleFacade(app_config.modules[0],
balanced_port=2,
host='localhost')
self.module3 = ManualScalingModuleFacade(app_config.modules[0],
balanced_port=3,
host='0.0.0.0')
self.mox.StubOutWithMock(self.dispatcher, '_create_module')
self.dispatcher._create_module(app_config.modules[0], 1).AndReturn(
(self.module1, 2))
self.dispatcher._create_module(app_config.modules[1], 2).AndReturn(
(self.module2, 3))
self.dispatcher._create_module(app_config.modules[2], 3).AndReturn(
(self.module3, 4))
self.mox.ReplayAll()
self.dispatcher.start('localhost', 12345, object())
app_config.dispatch = self.dispatch_config
self.mox.VerifyAll()
self.mox.StubOutWithMock(module.Module, 'build_request_environ')
def tearDown(self):
self.dispatcher.quit()
self.mox.UnsetStubs()
def test_get_module_names(self):
self.assertItemsEqual(['default', 'other', 'another'],
self.dispatcher.get_module_names())
def test_get_hostname(self):
self.assertEqual('localhost:1',
self.dispatcher.get_hostname('default', 'version'))
self.assertEqual('localhost:2',
self.dispatcher.get_hostname('other', 'version2'))
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher.get_hostname, 'default', 'fake')
self.assertRaises(request_info.NotSupportedWithAutoScalingError,
self.dispatcher.get_hostname, 'default', 'version', '0')
self.assertEqual('localhost:1000',
self.dispatcher.get_hostname('other', 'version2', '0'))
self.assertRaises(request_info.InvalidInstanceIdError,
self.dispatcher.get_hostname, 'other', 'version2',
'invalid')
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher.get_hostname,
'nomodule',
'version2',
None)
self.assertEqual('%s:3' % socket.gethostname(),
self.dispatcher.get_hostname('another', 'version3'))
self.assertEqual(
'%s:1000' % socket.gethostname(),
self.dispatcher.get_hostname('another', 'version3', '0'))
def test_get_module_by_name(self):
self.assertEqual(self.module1,
self.dispatcher.get_module_by_name('default'))
self.assertEqual(self.module2,
self.dispatcher.get_module_by_name('other'))
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher.get_module_by_name, 'fake')
def test_get_versions(self):
self.assertEqual(['version'], self.dispatcher.get_versions('default'))
self.assertEqual(['version2'], self.dispatcher.get_versions('other'))
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher.get_versions, 'fake')
def test_get_default_version(self):
self.assertEqual('version', self.dispatcher.get_default_version('default'))
self.assertEqual('version2', self.dispatcher.get_default_version('other'))
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher.get_default_version, 'fake')
def test_add_event(self):
self.mox.StubOutWithMock(scheduled_executor.ScheduledExecutor, 'add_event')
runnable = object()
scheduled_executor.ScheduledExecutor.add_event(runnable, 123, ('foo',
'bar'))
scheduled_executor.ScheduledExecutor.add_event(runnable, 124, None)
self.mox.ReplayAll()
self.dispatcher.add_event(runnable, 123, 'foo', 'bar')
self.dispatcher.add_event(runnable, 124)
self.mox.VerifyAll()
def test_update_event(self):
self.mox.StubOutWithMock(scheduled_executor.ScheduledExecutor,
'update_event')
scheduled_executor.ScheduledExecutor.update_event(123, ('foo', 'bar'))
self.mox.ReplayAll()
self.dispatcher.update_event(123, 'foo', 'bar')
self.mox.VerifyAll()
def test_add_async_request(self):
dummy_environ = object()
self.mox.StubOutWithMock(dispatcher._THREAD_POOL, 'submit')
self.dispatcher._module_name_to_module['default'].build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 1).AndReturn(
dummy_environ)
dispatcher._THREAD_POOL.submit(
self.dispatcher._handle_request, dummy_environ, mox.IgnoreArg(),
self.dispatcher._module_name_to_module['default'],
None, catch_and_log_exceptions=True)
self.mox.ReplayAll()
self.dispatcher.add_async_request(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4')
self.mox.VerifyAll()
def test_add_async_request_specific_module(self):
dummy_environ = object()
self.mox.StubOutWithMock(dispatcher._THREAD_POOL, 'submit')
self.dispatcher._module_name_to_module['other'].build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 2).AndReturn(
dummy_environ)
dispatcher._THREAD_POOL.submit(
self.dispatcher._handle_request, dummy_environ, mox.IgnoreArg(),
self.dispatcher._module_name_to_module['other'],
None, catch_and_log_exceptions=True)
self.mox.ReplayAll()
self.dispatcher.add_async_request(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', module_name='other')
self.mox.VerifyAll()
def test_add_async_request_soft_routing(self):
"""Tests add_async_request with soft routing."""
dummy_environ = object()
self.mox.StubOutWithMock(dispatcher._THREAD_POOL, 'submit')
self.dispatcher._module_name_to_module['default'].build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 1).AndReturn(
dummy_environ)
dispatcher._THREAD_POOL.submit(
self.dispatcher._handle_request, dummy_environ, mox.IgnoreArg(),
self.dispatcher._module_name_to_module['default'],
None, catch_and_log_exceptions=True)
self.mox.ReplayAll()
self.dispatcher.add_async_request(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', module_name='nomodule')
self.mox.VerifyAll()
def test_add_request(self):
dummy_environ = object()
self.mox.StubOutWithMock(self.dispatcher, '_resolve_target')
self.mox.StubOutWithMock(self.dispatcher, '_handle_request')
self.dispatcher._resolve_target(None, '/foo').AndReturn(
(self.dispatcher._module_name_to_module['default'], None))
self.dispatcher._module_name_to_module['default'].build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 1, fake_login=True).AndReturn(
dummy_environ)
self.dispatcher._handle_request(
dummy_environ, mox.IgnoreArg(),
self.dispatcher._module_name_to_module['default'],
None).AndReturn(['Hello World'])
self.mox.ReplayAll()
response = self.dispatcher.add_request(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', fake_login=True)
self.mox.VerifyAll()
self.assertEqual('Hello World', response.content)
def test_add_request_soft_routing(self):
"""Tests soft routing to the default module."""
dummy_environ = object()
self.mox.StubOutWithMock(self.dispatcher, '_handle_request')
self.dispatcher._module_name_to_module['default'].build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 1, fake_login=True).AndReturn(
dummy_environ)
self.dispatcher._handle_request(
dummy_environ, mox.IgnoreArg(),
self.dispatcher._module_name_to_module['default'],
None).AndReturn(['Hello World'])
self.mox.ReplayAll()
response = self.dispatcher.add_request(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', fake_login=True, module_name='nomodule')
self.mox.VerifyAll()
self.assertEqual('Hello World', response.content)
def test_add_request_merged_response(self):
"""Tests handlers which return side-effcting generators."""
dummy_environ = object()
self.mox.StubOutWithMock(self.dispatcher, '_handle_request')
self.dispatcher._module_name_to_module['default'].build_request_environ(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', 1, fake_login=True).AndReturn(
dummy_environ)
start_response_ref = []
def capture_start_response(unused_env, start_response, unused_module,
unused_inst):
start_response_ref.append(start_response)
def side_effecting_handler():
start_response_ref[0]('200 OK', [('Content-Type', 'text/plain')])
yield 'Hello World'
mock = self.dispatcher._handle_request(
dummy_environ, mox.IgnoreArg(),
self.dispatcher._module_name_to_module['default'],
None)
mock = mock.WithSideEffects(capture_start_response)
mock = mock.AndReturn(side_effecting_handler())
self.mox.ReplayAll()
response = self.dispatcher.add_request(
'PUT', '/foo?bar=baz', [('Header', 'Value'), ('Other', 'Values')],
'body', '1.2.3.4', fake_login=True, module_name='nomodule')
self.mox.VerifyAll()
self.assertEqual('200 OK', response.status)
self.assertEqual([('Content-Type', 'text/plain')], response.headers)
self.assertEqual('Hello World', response.content)
def test_handle_request(self):
start_response = object()
servr = self.dispatcher._module_name_to_module['other']
self.mox.StubOutWithMock(servr, '_handle_request')
servr._handle_request({'foo': 'bar'}, start_response, inst=None,
request_type=3).AndReturn(['body'])
self.mox.ReplayAll()
self.dispatcher._handle_request({'foo': 'bar'}, start_response, servr, None,
request_type=3)
self.mox.VerifyAll()
def test_handle_request_reraise_exception(self):
start_response = object()
servr = self.dispatcher._module_name_to_module['other']
self.mox.StubOutWithMock(servr, '_handle_request')
servr._handle_request({'foo': 'bar'}, start_response).AndRaise(Exception)
self.mox.ReplayAll()
self.assertRaises(Exception, self.dispatcher._handle_request,
{'foo': 'bar'}, start_response, servr, None)
self.mox.VerifyAll()
def test_handle_request_log_exception(self):
start_response = object()
servr = self.dispatcher._module_name_to_module['other']
self.mox.StubOutWithMock(servr, '_handle_request')
self.mox.StubOutWithMock(logging, 'exception')
servr._handle_request({'foo': 'bar'}, start_response).AndRaise(Exception)
logging.exception('Internal error while handling request.')
self.mox.ReplayAll()
self.dispatcher._handle_request({'foo': 'bar'}, start_response, servr, None,
catch_and_log_exceptions=True)
self.mox.VerifyAll()
def test_call(self):
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
self.mox.StubOutWithMock(self.dispatcher, '_handle_request')
servr = object()
environ = {'PATH_INFO': '/foo', 'QUERY_STRING': 'bar=baz'}
start_response = object()
self.dispatcher._module_for_request('/foo').AndReturn(servr)
self.dispatcher._handle_request(environ, start_response, servr)
self.mox.ReplayAll()
self.dispatcher(environ, start_response)
self.mox.VerifyAll()
def test_module_for_request(self):
class FakeDict(dict):
def __contains__(self, key):
return True
def __getitem__(self, key):
return key
self.dispatcher._module_name_to_module = FakeDict()
self.dispatch_config.dispatch = [
(dispatchinfo.ParsedURL('*/path'), '1'),
(dispatchinfo.ParsedURL('*/other_path/*'), '2'),
(dispatchinfo.ParsedURL('*/other_path/'), '3'),
(dispatchinfo.ParsedURL('*/other_path'), '3'),
]
self.assertEqual('1', self.dispatcher._module_for_request('/path'))
self.assertEqual('2', self.dispatcher._module_for_request('/other_path/'))
self.assertEqual('2', self.dispatcher._module_for_request('/other_path/a'))
self.assertEqual('3',
self.dispatcher._module_for_request('/other_path'))
self.assertEqual('default',
self.dispatcher._module_for_request('/undispatched'))
def test_should_use_dispatch_config(self):
"""Tests the _should_use_dispatch_config method."""
self.assertTrue(self.dispatcher._should_use_dispatch_config('/'))
self.assertTrue(self.dispatcher._should_use_dispatch_config('/foo/'))
self.assertTrue(self.dispatcher._should_use_dispatch_config(
'/_ah/queue/deferred'))
self.assertTrue(self.dispatcher._should_use_dispatch_config(
'/_ah/queue/deferred/blah'))
self.assertFalse(self.dispatcher._should_use_dispatch_config('/_ah/'))
self.assertFalse(self.dispatcher._should_use_dispatch_config('/_ah/foo/'))
self.assertFalse(self.dispatcher._should_use_dispatch_config(
'/_ah/foo/bar/'))
self.assertFalse(self.dispatcher._should_use_dispatch_config(
'/_ah/queue/'))
def test_resolve_target(self):
servr = object()
inst = object()
self.dispatcher._port_registry.add(8080, servr, inst)
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
self.mox.ReplayAll()
self.assertEqual((servr, inst),
self.dispatcher._resolve_target('localhost:8080', '/foo'))
self.mox.VerifyAll()
def test_resolve_target_no_hostname(self):
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
servr = object()
self.dispatcher._module_for_request('/foo').AndReturn(servr)
self.mox.ReplayAll()
self.assertEqual((servr, None),
self.dispatcher._resolve_target(None, '/foo'))
self.mox.VerifyAll()
def test_resolve_target_dispatcher_port(self):
self.dispatcher._port_registry.add(80, None, None)
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
servr = object()
self.dispatcher._module_for_request('/foo').AndReturn(servr)
self.mox.ReplayAll()
self.assertEqual((servr, None),
self.dispatcher._resolve_target('localhost', '/foo'))
self.mox.VerifyAll()
def test_resolve_target_unknown_port(self):
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
self.mox.ReplayAll()
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher._resolve_target, 'localhost:100', '/foo')
self.mox.VerifyAll()
def test_resolve_target_module_prefix(self):
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
self.mox.StubOutWithMock(self.dispatcher, '_get_module_with_soft_routing')
servr = object()
self.dispatcher._get_module_with_soft_routing('backend', None).AndReturn(
servr)
self.mox.ReplayAll()
self.assertEqual((servr, None),
self.dispatcher._resolve_target('backend.localhost:1',
'/foo'))
self.mox.VerifyAll()
def test_resolve_target_instance_module_prefix(self):
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
self.mox.StubOutWithMock(self.dispatcher, '_get_module_with_soft_routing')
servr = object()
self.dispatcher._get_module_with_soft_routing('backend', None).AndReturn(
servr)
self.mox.ReplayAll()
self.assertEqual((servr, None),
self.dispatcher._resolve_target('1.backend.localhost:1',
'/foo'))
self.mox.VerifyAll()
def test_resolve_target_instance_version_module_prefix(self):
self.mox.StubOutWithMock(self.dispatcher, '_module_for_request')
self.mox.StubOutWithMock(self.dispatcher, '_get_module_with_soft_routing')
servr = object()
self.dispatcher._get_module_with_soft_routing('backend', None).AndReturn(
servr)
self.mox.ReplayAll()
self.assertEqual((servr, None),
self.dispatcher._resolve_target('1.v1.backend.localhost:1',
'/foo'))
self.mox.VerifyAll()
def test_get_module_no_modules(self):
"""Tests the _get_module method with no modules."""
self.dispatcher._module_name_to_module = {}
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher._get_module,
None,
None)
def test_get_module_default_module(self):
"""Tests the _get_module method with a default module."""
# Test default mopdule is returned for an empty query.
self.dispatcher._module_name_to_module = {'default': self.module1}
self.assertEqual(self.dispatcher._get_module(None, None), self.module1)
self.dispatcher._module_name_to_module['nondefault'] = self.module2
self.assertEqual(self.dispatcher._get_module(None, None), self.module1)
self.dispatcher._module_name_to_module = {'default': self.module1}
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher._get_module,
'nondefault',
None)
# Test version handling.
self.dispatcher._module_configurations['default'] = MODULE_CONFIGURATIONS[0]
self.assertEqual(self.dispatcher._get_module('default', 'version'),
self.module1)
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher._get_module,
'default',
'version2')
def test_get_module_non_default(self):
"""Tests the _get_module method with a non-default module."""
self.dispatcher._module_name_to_module = {'default': self.module1,
'other': self.module2}
self.assertEqual(self.dispatcher._get_module('other', None),
self.module2)
# Test version handling.
self.dispatcher._module_configurations['default'] = MODULE_CONFIGURATIONS[0]
self.dispatcher._module_configurations['other'] = MODULE_CONFIGURATIONS[1]
self.assertEqual(self.dispatcher._get_module('other', 'version2'),
self.module2)
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher._get_module,
'other',
'version3')
def test_get_module_no_default(self):
"""Tests the _get_module method with no default module."""
self.dispatcher._module_name_to_module = {'other': self.module1}
self.assertEqual(self.dispatcher._get_module('other', None),
self.module1)
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher._get_module,
None,
None)
# Test version handling.
self.dispatcher._module_configurations['other'] = MODULE_CONFIGURATIONS[0]
self.assertEqual(self.dispatcher._get_module('other', 'version'),
self.module1)
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher._get_module,
'other',
'version2')
def test_get_module_soft_routing_no_modules(self):
"""Tests the _get_module_soft_routing method with no modules."""
self.dispatcher._module_name_to_module = {}
self.assertRaises(request_info.ModuleDoesNotExistError,
self.dispatcher._get_module_with_soft_routing,
None,
None)
def test_get_module_soft_routing_default_module(self):
"""Tests the _get_module_soft_routing method with a default module."""
# Test default mopdule is returned for an empty query.
self.dispatcher._module_name_to_module = {'default': self.module1}
self.assertEqual(self.dispatcher._get_module_with_soft_routing(None, None),
self.module1)
self.dispatcher._module_name_to_module['other'] = self.module2
self.assertEqual(self.dispatcher._get_module_with_soft_routing(None, None),
self.module1)
# Test soft-routing. Querying for a non-existing module should return
# default.
self.dispatcher._module_name_to_module = {'default': self.module1}
self.assertEqual(self.dispatcher._get_module_with_soft_routing('other',
None),
self.module1)
# Test version handling.
self.dispatcher._module_configurations['default'] = MODULE_CONFIGURATIONS[0]
self.assertEqual(self.dispatcher._get_module_with_soft_routing('other',
'version'),
self.module1)
self.assertEqual(self.dispatcher._get_module_with_soft_routing('default',
'version'),
self.module1)
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher._get_module_with_soft_routing,
'default',
'version2')
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher._get_module_with_soft_routing,
'other',
'version2')
def test_get_module_soft_routing_non_default(self):
"""Tests the _get_module_soft_routing method with a non-default module."""
self.dispatcher._module_name_to_module = {'default': self.module1,
'other': self.module2}
self.assertEqual(self.dispatcher._get_module_with_soft_routing('other',
None),
self.module2)
# Test version handling.
self.dispatcher._module_configurations['default'] = MODULE_CONFIGURATIONS[0]
self.dispatcher._module_configurations['other'] = MODULE_CONFIGURATIONS[1]
self.assertEqual(self.dispatcher._get_module_with_soft_routing('other',
'version2'),
self.module2)
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher._get_module_with_soft_routing,
'other',
'version3')
def test_get_module_soft_routing_no_default(self):
"""Tests the _get_module_soft_routing method with no default module."""
self.dispatcher._module_name_to_module = {'other': self.module1}
self.assertEqual(self.dispatcher._get_module_with_soft_routing('other',
None),
self.module1)
self.assertEqual(self.dispatcher._get_module_with_soft_routing('other',
None),
self.module1)
# Test version handling.
self.dispatcher._module_configurations['other'] = MODULE_CONFIGURATIONS[0]
self.assertEqual(self.dispatcher._get_module_with_soft_routing('other',
'version'),
self.module1)
self.assertRaises(request_info.VersionDoesNotExistError,
self.dispatcher._get_module_with_soft_routing,
'other',
'version2')
if __name__ == '__main__':
unittest.main()
|
Serag8/Bachelor
|
google_appengine/google/appengine/tools/devappserver2/dispatcher_test.py
|
Python
|
mit
| 30,824
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# A Solution to "Cubic permutations" – Project Euler Problem No. 62
# by Florian Buetow
#
# Sourcecode: https://github.com/fbcom/project-euler
# Problem statement: https://projecteuler.net/problem=62
permutations = {}
n = 0
while True:
n = n + 1
cube = str(n**3)
digits = "".join(sorted(cube)) # use the sorted list of digits as key
if digits in permutations:
permutations[digits].append(n)
else:
permutations[digits] = [n]
if len(permutations[digits]) == 5:
print "The digits of these numbers cubed are permutations of each other:", permutations[digits]
print "Solution:", permutations[digits][0]**3
break
|
fbcom/project-euler
|
062_cubic_permutations.py
|
Python
|
mit
| 724
|
# pinspect_support_module_9.py
# Copyright (c) 2013-2019 Pablo Acosta-Serafini
# See LICENSE for details
# pylint: disable=C0103,C0111,R0201,R0903,W0212,W0621
def simple_property_generator(): # noqa: D202
"""Test if properties done via enclosed functions are properly detected."""
def fget(self):
"""Actual getter function."""
return self._value
return property(fget)
|
pmacosta/pexdoc
|
tests/support/pinspect_support_module_9.py
|
Python
|
mit
| 401
|
import numpy as np
import glob, pickle
import os,inspect,sys
try:
os.environ['SESNPATH']
os.environ['SESNCFAlib']
except KeyError:
print "must set environmental variable SESNPATH and SESNCfAlib"
sys.exit()
RIri = False
cmd_folder = os.getenv("SESNCFAlib")
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
cmd_folder = os.getenv("SESNCFAlib")+"/templates"
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
from snclasses import *
from templutils import *
import optparse
import readinfofile as ri
def doit(args,options):
locs={'04gq':4,'04gt':2,'05az':2,'05bf':2,'05eo':4,'06jc':3, '04gk':3, '07i':4, '07d':4, '05bf':2}
yoffsets={'01gd':1,'04aw':1.0,'04fe':1.3,'04gk':1.3,'04gq':0.8,'04gt':1.7,'05az':1.8,'05bf':1.5,'05eo':1.0, '05hg':0.7,'05mf':0.5,'06el':0.7,'06f':1.3,'06t':1.0,'06jc':2.1,'07bg':1.0,'06fo':1.4,'07ce':0.8,'07gr':1.1,'07uy':1.2,'07d':1.35,'08cw':1.0, '05nb':1.0,'05kz':0.8, '08d':0, '07i':1.5, '09er':1.5, '09iz':1.5,'07ke':0.7}
photcodes = {'U':('01','06'),'B':('02','07'),'V':('03','08'),'R':('04','09'),'I':('05','0a'),'r':('13','0b'),'i':('14','0c'), 'H':('H','H'), 'J':('J','J'),'K':('K','K'), 'u':('15','15')}
boffsets={'U':2,'u':2,'B':1,'V':0,'R':-1,'I':-2,'r':-1,'i':-2, 'J':-3,'H':-4,'K':-5}
#mybands = ['V']
su=setupvars()
typecount={}
for k in su.mytypecolors.keys():
typecount[k]=0
PHOTTABLE=False
#PHOTTABLE=True
COLOR=False
#COLOR=True
ALLCOLOR=False
#ALLCOLOR=True
lit=False
#lit=True
addlit = True
addlitphot = True
#Vmax,sntype=ri.readinfofile(verbose=False)
if COLOR:
colorfigs={}
# for ckey in su.cs.iterkeys():
# cf=open(ckey+".dat","w")##.upper()+".dat", "w")
# cf.close()
ticklabelsz = options['ticklabelsz']
thisyoff=options['yoff']
noylabel=options['noylabel']
for i,f in enumerate(args):
if '05eo' in f: continue
myebmv=0
legendloc=1
if isinstance (f, basestring):
thissn=mysn(f, addlit=addlit, quiet=True)
else:
thissn=mysn(f[0], addlit=addlit, quiet=True)
fnir = True
#print "optical files: ", thissn.optfiles
lc,flux,dflux, snname = thissn.loadsn2(verbose=False)
# lc,flux,dflux, snname = thissn.loadsn(thissn.optfiles, fnir, lit=lit,
# verbose=True, addlit=addlit,
# photlit=True)
#else:
# sntype[snname.lower()]='Ia'
# Vmax[snname.lower()]b='<0.000'
Vmax={snname.lower():'<0.000'}
#sntype={snname.lower():}
#print "printing sn info"
#thissn.printsn()
thissn.readinfofileall(bigfile=False, verbose=False, earliest=False, loose=True)
#thissn.setsn(sntype[snname.lower()],Vmax[snname.lower()])
if np.isnan(thissn.Vmax):
return -1
thissn.setphot()
thissn.getphot(RIri=RIri)
thissn.setphase()
thissn.printsn()
if options['hostebmv']:
if thissn.snnameshort not in su.ebmvhost and\
thissn.snnameshort not in su.ebmvcfa:
continue
myebmv = su.ebmvs[thissn.snnameshort]
try:
thisebmv = su.ebmvs[thissn.snnameshort] + \
su.ebmvhost[thissn.snnameshort]
except KeyError:
thisebmv = su.ebmvs[thissn.snnameshort] + \
su.ebmvcfa[thissn.snnameshort]
thissn.cleanphot()
thissn.printsn()
thissn.getphot(myebmv,RIri=RIri)
thissn.setphase()
if options['abs']:
try:
distpc=float(thissn.metadata['distance Mpc'])*1e6
except:
print "failed on distance:", snname#, thissn.metadata['distance Mpc']
continue
dm= 5.0*(np.log10(distpc)-1)
thissn.photometry['V']['mag']-=dm
if thissn.filters['K']>0:
#print "cheap hack"
thisyoff+=0.7
if PHOTTABLE:
thissn.printsn_fitstable()
# thissn.printsn_textable(photometry=True, fout=snname+".phot.tex")
continue
maxmag = -25
minmag = 200
mint=25555555.0
maxt=-2555555.0
ylabel=""
params = {'legend.fontsize': 12,
#'legend.linewidth': 1,
'legend.numpoints':1,
'legend.handletextpad':0.001,
'xtick.labelsize' : ticklabelsz,
'ytick.labelsize' : ticklabelsz
}
pl.rcParams.update(params)
if COLOR:
# for snoff in ebmvs.iterkeys():
# if thissn.name.endswith((snoff.strip()).lower()):
# myebmv=ebmvs[snoff]
if not options['hostebmv']:
myebmv=su.ebmvs[thissn.snnameshort]
thissn.cleanphot()
thissn.getphot(myebmv,RIri=RIri)
thissn.getcolors()
for ci,ckey in enumerate(su.cs.iterkeys()):
cf=open(ckey+".dat","a")#ckey.upper()+".dat", "a")
if thissn.getmaxcolors(ckey) == -1:
continue
print >>cf,snname, ckey, thissn.maxcolors[ckey]['epoch'],thissn.maxcolors[ckey]['color'],thissn.maxcolors[ckey]['dcolor'] ,
tmp=thissn.getepochcolors(ckey,50)
print >>cf, tmp[0],tmp[1],tmp[2],
tmp=thissn.getepochcolors(ckey,100)
print >>cf, tmp[0],tmp[1],tmp[2]
cf.close()
if isinstance (f, basestring):
fout = f.split("/")[-1].replace("slc.","").split('.')[0]+"."+ckey+".dat"
else:
fout = f[0].split("/")[-1].replace("slc.","").split('.')[0]+"."+ckey+".dat"
fileout = open(fout,"w")
thissn.printsn(photometry=False, cband = ckey,color=True, fout=fout)
fig=figure(su.cs[ckey]+1000)
pl.ylabel(ckey)
pl.errorbar(thissn.colors[ckey]['mjd'],thissn.colors[ckey]['mag'],fmt='o',yerr=thissn.colors[ckey]['dmag'], label=snname)
pl.xlabel("phase (days)")
if '06jc' in thissn.name:
# thissn.plotsn(photometry=False,color=True, save=True,fig=i, ylim=(maxmag,minmag), xlim=(mint-10,maxt+10), relim=False, offsets=True, ylabel=ylabel, aspect=0.5, nbins=options.bin, singleplot=True, noylabel=noylabel)
_ = thissn.plotsn(photometry=False,color=True, save=True,fig=i, ylim=(maxmag,minmag), xlim=(mint-10,maxt+10+(maxt-mint+20)*0.2), relim=False, offsets=True, ylabel=ylabel, aspect=0.5, nbins=options['bin'], singleplot=False, noylabel=noylabel, ticklabelsz=ticklabelsz)
else:
_ = thissn.plotsn(photometry=False,color=True, save=True,fig=i, ylim=(maxmag,minmag), xlim=(mint-10,maxt+10+(maxt-mint+20)*0.2), relim=False, offsets=True, ylabel=ylabel, aspect=0.5, nbins=options['bin'], singleplot=False,noylabel=noylabel, ticklabelsz=ticklabelsz)
if options['showme'] : pl.show()
else : pl.savefig(f.split("/")[-1].replace("slc.","").split('.')[0]+"_color.png", bbox_inches="tight", dpi=350)
continue
else:
#thissn.printsn(photometry=True)
for b in photcodes.iterkeys():
if thissn.filters[b] == 0: continue
myphotcode=su.photcodes[b]
if isinstance (f, basestring):
fout = f.split("/")[-1].replace("slc.","").split('.')[0]+"."+b+".dat"
else:
if not isinstance (f[0], basestring):
f = f[0]
fout = f[0].split("/")[-1].replace("slc.","").split('.')[0]+"."+b+".dat"
fout = fout.replace('.i.','.ip.').replace(".r.",".rp.").replace(".u.",".up.")
fileout = open(fout,"w")
#thissn.printsn(photometry=True, band=b, fout=fout)
#if b=='U':
#thissn.printsn(photometry=True, band = b)
if not thissn.stats[b].maglim[1] == 0:
maxmag = max(maxmag, thissn.stats[b].maglim[1]+boffsets[b]+1)
if not thissn.stats[b].tlim[0] == 0:
mint = min(thissn.stats[b].tlim[0], mint)
maxt = max(thissn.stats[b].tlim[1], maxt)
#print "maxt", maxt
if options['offdb'] :
for snoff in yoffsets.iterkeys():
if thissn.name.endswith(snoff.strip()):
thisyoff=yoffsets[snoff]
#print yoffsets[snoff]
if options['locdb'] :
for snoff in locs.iterkeys():
if thissn.name.endswith(snoff.strip()):
legendloc=locs[snoff]
if thissn.stats['V'].maglim[0] >0:
minmag= thissn.stats['V'].maglim[0]-thisyoff
nopt=0
for b in ['U','u','B','V','R','r','I','i']:
nopt+=thissn.filters[b]
for b in ['U','u','B','V','R','r','I','i','J','H','K']:
#print "####\n\n####", thissn.name, b, thissn.filters[b], "####"
if not thissn.stats[b].maglim[0] == 0:
minmag= min(minmag, thissn.stats[b].maglim[0]+boffsets[b]-thisyoff)
if thissn.filters[b]>0:
ylabel+=b+"+%d"%boffsets[b]+", "
'''
ax = pl.figure().add_subplot(111)
ax.errorbar(thissn.photometry[b]['phase'],
thissn.photometry[b]['mag'],
yerr = thissn.photometry[b]['dmag'],
color = 'k', fmt = '.')
ax.set_ylim(ax.get_ylim()[1], ax.get_ylim()[0])
thissn.gpphot(b, ax=ax)
'''
if nopt == 0:
maxmag +=1
ylabel=ylabel[:-2].replace("+-","-").replace('r','r\'').\
replace('i','i\'').replace('u','u\'') + " [mag]"
#print "maxs:", mint, maxt, maxmag, minmag
if minmag == 0 or minmag < 0:
minmag = 7
fig=pl.figure(i, figsize=(8,16))
# individual SN plot setups
if '06ep' in thissn.name:
thissn.plotsn(photometry=True, show=False, fig=i,
ylim=(maxmag,minmag), xlim=(mint-10,maxt+3),
relim=False, offsets=True, ylabel=ylabel,
aspect=0.5, Vmax=False, legendloc=legendloc,
noylabel=noylabel, save=True, ticklabelsz=ticklabelsz)
elif '07ke' in thissn.name:
thissn.plotsn(photometry=True, show=False, fig=i,
ylim=(maxmag,minmag), xlim=(mint-8,maxt+10),
relim=False, offsets=True, ylabel=ylabel,
aspect=0.5, Vmax=False, legendloc=legendloc,
noylabel=noylabel, save=True, ticklabelsz=ticklabelsz)
elif '05az' in thissn.name:
thissn.plotsn(photometry=True, show=False, fig=i,
ylim=(maxmag,minmag), xlim=(mint-10,maxt),
relim=False, offsets=True, ylabel=ylabel,
aspect=0.5, Vmax=False, legendloc=legendloc,
noylabel=noylabel, save=True, ticklabelsz=ticklabelsz)
#all other SN plots
else:
#print thissn.fnir
thissn.plotsn(photometry=True, fig=i,
ylim=(maxmag,minmag), show=True,
#xlim=(mint-10,maxt+10+(maxt-mint+20)*0.4),
xlim=(thissn.Vmax-22-2400000, thissn.Vmax+17-2400000),
relim=False, offsets=True, ylabel=ylabel,
aspect=0.5, Vmax=False, legendloc=legendloc,
noylabel=noylabel, save=True, ticklabelsz=ticklabelsz)
# extent=fig.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
#pl.savefig(f.split("/")[-1].replace("slc.","").split('.')[0]+"_UBVRIriHJK.pdf")#, bbox_inches="tight")
try:
os.system("convert -resize 50\% "+fout +fout.replace(".png","_thumb.png"))
except UnboundLocalError: pass
if options['showme']: pl.show()
if ALLCOLOR:
for ci,ckey in enumerate(su.cs.keys()[:-1]):
fig=figure(su.cs[ckey]+1000)
if options['showme']: pl.show()
else: pl.savefig(ckey+"_allsn.png")#, bbox_inches='tight', dpi=150)
return thissn
#pl.ion()
if __name__ == '__main__':
parser = optparse.OptionParser(usage="readlcvV.py snname --yoff yoffset", conflict_handler="resolve")
parser.add_option('--yoff', default=0.5, type="float",
help='y offset')
parser.add_option('--offdb', default=False, action="store_true",
help='offset from database')
parser.add_option('--locdb', default=False, action="store_true",
help='location from database')
parser.add_option('--bin', default=None, type="int",
help='bin size for step plot')
parser.add_option('--hostebmv', default=False, action="store_true",
help='host ebmv correction using cfa ebmv values')
parser.add_option('--abs', default=False, action="store_true",
help='abs mag')
parser.add_option('--noylabel', default=False, action="store_true",
help='')
parser.add_option('--ticklabelsz', default=13, type="float",
help='tick size')
parser.add_option('--showme', default=False, action="store_true",
help='')
options, args = parser.parse_args()
doit (args, options)
|
fedhere/SESNCfAlib
|
readlcv_func.py
|
Python
|
mit
| 15,303
|
from vanilla import *
from defconAppKit.windows.baseWindow import BaseWindowController
from mojo.events import addObserver, removeObserver
from mojo.UI import UpdateCurrentGlyphView
from mojo.drawingTools import *
class GlobalMaks(BaseWindowController):
def __init__(self, font):
# create a window
self.w = Window((170, 300), "The Mask", minSize=(170, 100))
# add a UI list
self.w.list = List((0, 0, -0, -0), [], selectionCallback=self.listSelectionCallback)
# set the font
self.setFont(font)
# add observers needed
addObserver(self, "drawBackground", "drawBackground")
addObserver(self, "drawBackground", "drawInactive")
addObserver(self, "fontBecameCurrent", "fontBecameCurrent")
addObserver(self, "fontResignCurrent", "fontResignCurrent")
self.setUpBaseWindowBehavior()
self.w.open()
def setFont(self, font):
# set all the possible glyph of the font in the UI list
self._font = font
self._glyphs = []
glyphs = []
if font is not None:
glyphs = font.glyphOrder
self.w.list.set(glyphs)
# ui callbacks
def listSelectionCallback(self, sender):
# called when an item is selected in the UI list
sel = sender.getSelection()
self._glyphs = []
for i in sel:
glyphName = sender[i]
self._glyphs.append(self._font[glyphName])
self.updateGlyphView()
def updateGlyphView(self):
# update the current glyph view
UpdateCurrentGlyphView()
# notifications
def fontBecameCurrent(self, notification):
# called when a font became the current font
font = notification["font"]
# set the font
self.setFont(font)
# update the glyph view
self.updateGlyphView()
def fontResignCurrent(self, notification):
# called when a font resigns being the current font
self.setFont(None)
self.updateGlyphView()
def drawBackground(self, notification):
# draw the glyph in the background of the glyph view
if not self._glyphs:
return
stroke(1, 0, 0)
strokeWidth(notification["scale"])
fill(None)
for glyph in self._glyphs:
drawGlyph(glyph)
def windowCloseCallback(self, sender):
# when the windows closes remove all the added observers
removeObserver(self, "drawBackground")
removeObserver(self,"drawInactive")
removeObserver(self, "fontBecameCurrent")
removeObserver(self, "fontResignCurrent")
super(GlobalMaks, self).windowCloseCallback(sender)
# go
GlobalMaks(CurrentFont())
|
typemytype/RoboFontExamples
|
observers/theMask.py
|
Python
|
mit
| 2,844
|
#!/usr/bin/python
# encoding: utf-8
"""
schema.py
Functions for working with database schemas.
The MySQLSchema class requires sqlfairy. To install this on Ubuntu, run:
sudo apt-get install sqlfairy
Created by Shane O'Connor 2013
"""
import sys
import os
import re
import subprocess
import getpass
import time
import shlex
sys.path.insert(0, '../..')
import klab.colortext as colortext
from .mysql import DatabaseInterface as MySQLDatabaseInterface
from klab.fs.fsio import read_file, write_file, open_temp_file
class EmptyDiagramException(Exception): pass
class MySQLSchema(object):
def __init__(self, settings = {}, isInnoDB=True, numTries=32, host=None, db=None, user=None, passwd=None, port=3306, unix_socket="/var/lib/mysql/mysql.sock", passwdfile=None, use_utf=False):
self.db = db
self.host = host
self.original_schema = []
if not(os.path.exists(unix_socket)):
unix_socket = '/var/run/mysqld/mysqld.sock' # Ubuntu hack
if not passwd and passwdfile:
if os.path.exists(passwdfile):
passwd = read_file(passwdfile).strip()
else:
passwd = getpass.getpass("Enter password to connect to MySQL database:")
dbinterface = MySQLDatabaseInterface(settings, isInnoDB = isInnoDB, numTries = numTries, host = host, db = db, user = user, passwd = passwd, port = port, unix_socket = unix_socket, use_locking = False)
# Get the DB schema, normalizing for sqlt-diagram
db_schema = []
self.num_tables = 0
try:
for t in sorted(dbinterface.TableNames):
creation_string = dbinterface.execute_select('SHOW CREATE TABLE `%s`' % t)
assert(len(creation_string) == 1)
if creation_string[0].get('Create Table') == None: # e.g. for views
continue
self.num_tables += 1
creation_string = '%s;' % creation_string[0]['Create Table'].strip()
self.original_schema.append(creation_string)
# Fix input for sqlt-diagram (it is fussy)
creation_string = creation_string.replace("default ''", "")
creation_string = creation_string.replace("DEFAULT ''", "")
creation_string = creation_string.replace("DEFERRABLE INITIALLY DEFERRED", "") # sqlt-diagram doesn't like this syntax for MySQL
creation_string = creation_string.replace("AUTOINCREMENT", "") # sqlt-diagram doesn't like this syntax for MySQL
creation_string = creation_string.replace("auto_increment", "") # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub("COMMENT.*'.*'", "", creation_string, re.DOTALL) # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub("CONSTRAINT.*?CHECK.*?,", "", creation_string, re.DOTALL) # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub("CONSTRAINT.*?CHECK.*?[)][)]", ")", creation_string, re.DOTALL) # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub(" AUTO_INCREMENT=\d+", "", creation_string, re.DOTALL)
creation_string = creation_string.replace("''", "")
creation_string = creation_string.replace('tg_', 'auth_')
db_schema.append(creation_string)
except: raise
db_schema = '\n\n'.join(db_schema)
self.db_schema = db_schema
self.mysqldump_schema = self.get_schema(host, user, passwd, db)
def print_schema(self):
c = 1
for x in self.sanitize_schema().split('\n'):
colortext.warning('%04d: %s' % (c, x))
c += 1
def sanitize_schema(self):
# Fix input for sqlt-diagram (it is fussy)
creation_string = self.mysqldump_schema
creation_string = creation_string.replace("default ''", "")
creation_string = creation_string.replace("DEFAULT ''", "")
creation_string = creation_string.replace("DEFERRABLE INITIALLY DEFERRED", "") # sqlt-diagram doesn't like this syntax for MySQL
creation_string = creation_string.replace("AUTOINCREMENT", "") # sqlt-diagram doesn't like this syntax for MySQL
creation_string = creation_string.replace("auto_increment", "") # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub("COMMENT.*'.*'", "", creation_string, re.DOTALL) # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub("CONSTRAINT.*?CHECK.*?,", "", creation_string, re.DOTALL) # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub("CONSTRAINT.*?CHECK.*?[)][)]", ")", creation_string, re.DOTALL) # sqlt-diagram doesn't like this syntax for MySQL
creation_string = re.sub(" AUTO_INCREMENT=\d+", "", creation_string, re.DOTALL)
creation_string = creation_string.replace("''' ,", "' ,")
creation_string = creation_string.replace("''',", "',")
creation_string = creation_string.replace("'' ,", "")
creation_string = creation_string.replace("'',", "")
creation_string = creation_string.replace("''", "")
#write_file('/tmp/failed_schema.sql', creation_string)
return creation_string
def get_schema(self, host, username, passwd, database_name):
try:
outfile, outfilename = open_temp_file('/tmp', "w")
p = subprocess.Popen(shlex.split("mysqldump -h %s -u %s -p%s --skip-add-drop-table --no-data %s" % (host, username, passwd, database_name)), stdout=outfile)
p.wait()
outfile.close()
contents = read_file(outfilename)
os.remove(outfilename)
return contents
except Exception as e:
if os.path.exists(outfilename):
os.remove(outfilename)
raise
def get_full_schema(self):
# todo: rename this to get_definition as this is more appropriate
return '\n\n'.join(self.original_schema)
def generate_schema_diagram(self, output_filepath = None, show_fk_only = False):
if self.num_tables == 0:
raise EmptyDiagramException('No tables in schema.')
tempfiles = self._generate_schema_diagram(show_fk_only)
self.schema_diagram = read_file(tempfiles[1])
for fname in tempfiles:
if os.path.exists(fname):
os.remove(fname)
if output_filepath:
write_file(output_filepath, self.schema_diagram)
def _generate_schema_diagram(self, show_fk_only):
tempfiles = []
output_handle, sql_schema_filepath = open_temp_file('/tmp', ftype = 'w')
tempfiles.append(sql_schema_filepath)
try:
#output_handle.write('%s\n\n' % self.db_schema)
output_handle.write('%s\n\n' % self.sanitize_schema())#mysqldump_schema)
output_handle.close()
except:
output_handle.close()
try:
png_handle, png_filepath = open_temp_file('/tmp', ftype = 'w')
png_handle.close()
tempfiles.append(png_filepath)
c = [
"sqlt-diagram",
"-d=MySQL",
"-i=png",
"-t=%s database on %s" % (self.db, self.host),
"-o=%s" % png_filepath,
"--color",
sql_schema_filepath,
]
if show_fk_only:
# Useful to print a smaller schema of just the primary/foreign keys
c.append("--show-fk-only")
p = subprocess.Popen(c, stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if not p.returncode == 0:
if stderr:
raise colortext.Exception("Error - sqlt-diagram exited with %d: '%s'." % (p.returncode, stderr))
else:
raise colortext.Exception("Error - sqlt-diagram exited with %d." % (p.returncode))
except Exception as e:
colortext.error('Failed!')
print((str(e)))
return tempfiles
if __name__ == '__main__':
s = MySQLSchema(host = "kortemmelab.ucsf.edu", db = "ddG", user = "kortemmelab", passwdfile = 'pw')
s.generate_schema_diagram(output_filepath = "mytest-ddG.png")
s = MySQLSchema(host = "kortemmelab.ucsf.edu", db = "KortemmeLab", user = "root", passwdfile = 'mpw')
s.generate_schema_diagram(output_filepath = "mytest-klab.png")
#s = MySQLSchema(host = "localhost", db = "DesignCollection", user = "root", passwd = '...')
#s.generate_schema_diagram(output_filepath = "DesignCollection_schema.png")
#print(s.get_full_schema())
|
Kortemme-Lab/klab
|
klab/db/schema.py
|
Python
|
mit
| 8,698
|
from django.conf.urls.i18n import i18n_patterns
from django.conf.urls.static import static
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.static import serve
from django.contrib.auth.decorators import login_required
from landing.views import home, team_page
from medical_prescription import settings
from django.template.response import TemplateResponse
@login_required
def protected_serve(request, path, document_root=None, show_indexes=False):
return serve(request, path, document_root, show_indexes)
urlpatterns = [
url(r'^user/', include('user.urls')),
url(r'^dashboard_health_professional/', include('dashboardHealthProfessional.urls')),
url(r'^medicine/', include('medicine.urls')),
url(r'^dashboard_patient/', include('dashboardPatient.urls')),
url(r'^exam/', include('exam.urls')),
url(r'^admin/', admin.site.urls),
url(r'^disease/', include('disease.urls')),
url(r'^$', home, name='landing_page'),
url(r'^team/', team_page, name='team_page'),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^prescription/', include('prescription.urls')),
url(r'^chat/', include('chat.urls')),
url(r'^recommendation/', include('recommendation.urls')),
url(r'^%s(?P<path>.*)$' % settings.MEDIA_URL[1:], protected_serve, {'document_root': settings.MEDIA_ROOT}),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += i18n_patterns(
url(r'^user/', include('user.urls')),
url(r'^dashboard_health_professional/', include('dashboardHealthProfessional.urls')),
url(r'^medicine/', include('medicine.urls')),
url(r'^dashboard_patient/', include('dashboardPatient.urls')),
url(r'^exam/', include('exam.urls')),
url(r'^admin/', admin.site.urls),
url(r'^disease/', include('disease.urls')),
url(r'^prescription/', include('prescription.urls')),
url(r'^chat/', include('chat.urls')),
url(r'^$', home, name='landing_page'),
url(r'^$team/', team_page, name='team_page'),
)
# TODO(Joao) Discomment this lines when DEBUG=FALSE.
# if settings.DEBUG:
# urlpatterns += [
# (r'^media/(?P<path>.*)$', 'django.views.static.serve',
# {'document_root': settings.MEDIA_ROOT, 'show_indexes': True,})
# ]
|
fga-gpp-mds/2017.2-Receituario-Medico
|
medical_prescription/medical_prescription/urls.py
|
Python
|
mit
| 2,354
|
from time import time
start = time()
ROW = 0
COL = 1
def read_in_triangle(file_name):
triangle = []
with open(file_name) as f:
for line in f:
mini_list = line.split()
triangle.append(mini_list)
triangle = turn_into_numbers(triangle)
return triangle
def turn_into_numbers(str_list):
num_list = []
for mini_str_list in str_list:
mini_num_list = []
for str_num in mini_str_list:
mini_num_list.append(int(str_num))
num_list.append(mini_num_list)
return num_list
def find_all_paths(current_path, last_position, triangle, all_paths):
if last_position[ROW] == len(triangle) - 1:
all_paths.append(current_path)
return current_path
else:
current_path0 = list(current_path)
current_path1 = list(current_path)
next_position0 = [last_position[ROW] + 1, last_position[COL]]
next_position1 = [last_position[ROW] + 1, last_position[COL] + 1]
current_path0.append(triangle[next_position0[ROW]][next_position0[COL]])
current_path1.append(triangle[next_position1[ROW]][next_position1[COL]])
find_all_paths(current_path0, next_position0, triangle, all_paths)
find_all_paths(current_path1, next_position1, triangle, all_paths)
#get triangle
triangle = read_in_triangle("prob18_triangle.txt")
#initialize variables
all_paths = []
current_path = []
#starting position and first partial current_path (made of starting position in triangle)
position = [0, 0]
current_path.append(triangle[0][0])
#find all paths in triangle (recursive function)
find_all_paths(current_path, position, triangle, all_paths)
#find path whose members have the highest sum
sum = 0
max = 0
winner = []
for path in all_paths:
sum = 0
for step in path:
sum += step
if sum > max:
max = sum
winner = path
end = time()
#report back
print "winning path: " + str(winner)
print "maximum sum of path steps: " + str(max)
print "This program took %s seconds to run" %(end-start)
|
ctlewitt/Project-Euler
|
prob18_maximum-path-sum-i.py
|
Python
|
mit
| 2,059
|
from will.plugin import WillPlugin
from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings
import requests
import json
class DefinitionPlugin(WillPlugin):
@respond_to("^urban dictionary (?P<word>.*)$")
def definition(self, message, word):
r = requests.get("http://api.urbandictionary.com/v0/define?term={0}".format(word))
wordlist = r.json()
if wordlist['result_type'] == 'exact':
def1 = wordlist['list'][0]['definition']
ex1 = wordlist['list'][0]['example']
sayData = {"word": word.title(), "definition": self.stripchars(def1,"[]"), "example": self.stripchars(ex1,"[]") }
self.say(rendered_template("urban_define.html", sayData), message, html=True)
else:
self.say("No definition found for {0}.\nSorry homie.".format(word), message=message)
# Strips characters from a string.
def stripchars(this, s, chars):
return "".join(c for c in s if c not in chars)
|
Ironykins/will
|
will/plugins/fun/definition.py
|
Python
|
mit
| 1,027
|
#!/bin/python3
import os
import sys
#
# Complete the gradingStudents function below.
#
def gradingStudents(grades):
# another elegant solution with a lambda function
# map(lambda x: 5*(1 + x//5) if (x > 37 and ((x%5) > 2)) else x, grades)
# but we will use this for the solution
result = []
for i in grades:
if i >= 38:
if i % 5 >= 3:
i += (5 - i % 5)
result.append(i)
return result
if __name__ == '__main__':
f = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
grades = []
for _ in range(n):
grades_item = int(input())
grades.append(grades_item)
result = gradingStudents(grades)
f.write('\n'.join(map(str, result)))
f.write('\n')
f.close()
|
bluewitch/Code-Blue-Python
|
HR_gradingStudents.py
|
Python
|
mit
| 772
|
import contextlib
import json
import logging
import pathlib
import time
import uuid
import bottle
from ..apps import BaseApp
from ..ccfile import CCFile
from ..nodes import Manager as NodesManager, AlreadyRegisteredError, Node, NotConnectedError
from ..plugin.session import SessionPlugin
from ..proxy import Proxy as TorProxy
from ..session import SessionManager, Session
from ..utils import AttributedDict
from ..version import VersionManager
# __package__ is either 'theonionbox.tob.apps' or 'tob.apps'
# If there're more than two levels, we try to import RELATIVEly.
# If it's only two level, we try ABSOLUTEly.
p = __package__.split('.')
if len(p) > 2:
from ... import stamp
else:
import stamp
class CCError(bottle.HTTPError):
def __init__(self, status=None, origin=None, body=None, exception=None, traceback=None,
**options):
self.origin = origin
super(CCError, self).__init__(status, body, exception, traceback, **options)
def apply(self, response):
super(CCError, self).apply(response)
if self.origin is not None:
response.set_header('Content-Location', self.origin)
class ControlCenter(BaseApp):
def __init__(self
, sessions: SessionManager
, nodes: NodesManager
, proxy: TorProxy
, version: VersionManager
, config: AttributedDict):
super(ControlCenter, self).__init__(sessions=sessions, nodes=nodes, proxy=proxy, version=version,
config=config)
self.base_path = self.config.box.base_path
self.cc = CCFile(self.config.cc)
self.fingerprints = {}
self.show_logout = None
self.cwd = pathlib.Path(self.config['cwd'])
config = {
'no_session_redirect': self.redirect.path('/'),
'valid_status': ['auto', 'prepared', 'frame']
}
self.app.route('/<session>/cc.html',
method='GET',
callback=self.get_cc,
apply=SessionPlugin(sessions),
**config)
config = {
'valid_status': ['frame']
}
self.app.route('/<session>/cc/ping',
method='POST',
callback=self.post_cc_ping,
apply=SessionPlugin(sessions),
**config)
self.app.route('/<session>/cc/license',
method='POST',
callback=self.post_cc_license,
apply=SessionPlugin(sessions),
**config)
self.app.route('/<session>/cc/about',
method='POST',
callback=self.post_cc_about,
apply=SessionPlugin(sessions),
**config)
self.app.route('/<session>/cc/logout',
method='GET',
callback=self.get_cc_logout,
apply=SessionPlugin(sessions),
**config)
config = {
'valid_status': ['cc_new', 'cc_login', 'cc_ok']
}
self.app.route('/<session>/cc/data',
method='POST',
callback=self.post_cc_data,
apply=SessionPlugin(sessions),
**config)
# self.app.route('/<session>/cc/control.html',
# method='POST',
# callback=self.post_cc_control,
# apply=SessionPlugin(sessions),
# **config)
self.app.route('/<session>/cc/ciao.html',
method='POST',
callback=self.post_cc_ciao,
apply=SessionPlugin(sessions),
**config)
self.app.route('/<session>/cc/login',
method='POST',
callback=self.post_cc_login,
apply=SessionPlugin(sessions),
**config)
self.app.route('/<session>/cc/position',
method='POST',
callback=self.post_position,
apply=SessionPlugin(sessions),
**config)
config = {
'valid_status': ['cc_new']
}
self.app.route('/<session>/cc/md5.js',
method='GET',
callback=self.get_md5,
apply=SessionPlugin(sessions),
**config)
# To show the detail dashboard for a node
config = {
'valid_status': ['cc_ok']
}
self.app.route('/<session>/cc/details',
method='GET',
callback=self.get_details,
apply=SessionPlugin(sessions),
**config)
# Node property management
config = {
'valid_status': ['frame', 'cc_new', 'cc_ok']
}
self.app.route('/<session>/cc/check',
method='POST',
callback=self.post_check_node,
apply=SessionPlugin(sessions),
**config)
self.app.route('/<session>/cc/save',
method='POST',
callback=self.post_save_node,
apply=SessionPlugin(sessions),
**config)
config = {
'valid_status': ['cc_new', 'cc_ok']
}
self.app.route('/<session>/cc/edit',
method='POST',
callback=self.post_edit_node,
apply=SessionPlugin(sessions),
**config)
self.app.route('/<session>/cc/remove',
method='POST',
callback=self.post_remove_node,
apply=SessionPlugin(sessions),
**config)
# def debug_request():
# self.log.debug(bottle.request.environ['PATH_INFO'])
#
# # Log connection requests...
# self.app.add_hook('before_request', debug_request)
# The CC frame
def get_cc(self, session):
log = logging.getLogger('theonionbox')
if session is None or 'status' not in session:
raise bottle.HTTPError(404)
status = session['status']
# check only at first run, not when re-loaded...
if self.show_logout is None:
self.show_logout = (session['status'] != 'auto')
if status == 'prepared':
delay = time.time() - session['prep_time']
if delay > 2.0: # seconds
session['status'] = 'toolate' # ;)
log.info('{}@{}: Login to Session delay expired. Session canceled.'
.format(session.id_short(), self.sessions.get_remote_address(session.id)))
else:
session['status'] = 'ok'
# we have a successfull connection! Celebrate this!
log.notice('{}@{}: Session established.'.format(session.id_short(),
self.sessions.get_remote_address(session.id)))
if session['status'] not in ['ok', 'auto', 'frame']:
self.sessions.delete_session(session)
self.redirect('/')
node = session['node']
if node is None:
self.sessions.delete_session(session)
self.redirect('/')
# This indicates the session is now the CC frame
session['status'] = 'frame'
session['stylesheets'] = ['bootstrap.css', 'latolatin/latolatinfonts.css', 'fontawesome/css/all.css', 'cc.css']
session['scripts'] = ['jquery.js', 'bootstrap.js', 'smoothie.js', 'scrollMonitor.js',
'chart.js', 'cc.js', 'md5.js', 'pep.js']
params = {
'session': session,
'virtual_basepath': self.base_path,
'show_logout': self.show_logout,
'icon': self.icon,
'stamp': stamp,
'launcher': 1 if 'cards' not in session else 0
, 'template_lookup': [str(self.cwd)]
}
session['cc.js'] = bottle.template('scripts/cc.js', **params)
session['cc.css'] = bottle.template('css/cc.css', **params)
return bottle.template('pages/cc.html', **params)
def post_check_node(self, session):
def verify_tor_control_port(protocol_info):
pi = protocol_info.splitlines()
# print(pi)
if len(pi) == 4:
if len(pi[0]) > 16 and pi[0][:16] == '250-PROTOCOLINFO':
if len(pi[3]) == 6 and pi[3] == '250 OK':
return True
return False
def verify_port(port):
try:
p = int(port)
except ValueError:
raise ValueError('Wrong value for Port.')
if p < 0 or p > 65535:
raise ValueError('Wrong value for Port.')
return p
# def verify_tor_password(response: str) -> bool:
# if len(response) < 3:
# return False
# return response[:3] == '250'
# action = request.forms.get('action')
connect = bottle.request.forms.get('connect')
host = bottle.request.forms.get('host')
port = bottle.request.forms.get('port')
pwd = bottle.request.forms.get('password')
cookie = bottle.request.forms.get('cookie')
# translate unmod cookie indicator into cookie value from config
node = self.nodes[session['node']]
config = node.config
if cookie == session['edit_unmod']:
cookie = config.cookie
sc = None
piok = False
auth = False
# We're NOT going to test the password, as this could expose it to a hostile party.
try:
if connect == 'port':
from ..simplecontroller import SimplePort
p = verify_port(port)
sc = SimplePort(host, p)
elif connect == 'socket':
from ..simplecontroller import SimpleSocket
sc = SimpleSocket(host)
elif connect == 'proxy':
from ..simplecontroller import SimpleProxy
p = verify_port(port)
if cookie and len(cookie) > 0:
self.proxy.assure_cookie(host, cookie)
sc = SimpleProxy(host, p, self.proxy.host, self.proxy.port)
if sc is not None:
piok = verify_tor_control_port(sc.msg('PROTOCOLINFO 1'))
# if piok is True and len(pwd) > 0:
# print(pwd)
# auth = verify_tor_password(sc.msg(f'AUTHENTICATE "{pwd}"'))
sc.shutdown()
del sc
except Exception as exc:
return f'500 NOK\n{exc}'
# if len(pwd) > 0:
# return '1' if auth is True else '0'
return '250 OK' if piok is True else '500 NOK\nNot a Tor ControlPort or ControlSocket.'
def post_save_node(self, session):
label = bottle.request.forms.get('label', '').strip()
control = bottle.request.forms.get('connect', '').strip()
host = bottle.request.forms.get('host', '').strip()
port = bottle.request.forms.get('port', '').strip()
password = bottle.request.forms.get('password', '').strip()
cookie = bottle.request.forms.get('cookie', '').strip()
# Basic data integrity check
if control not in ['port', 'socket', 'proxy']:
raise bottle.HTTPError(400)
if control in ['port', 'proxy']:
if host is None or len(host) < 1:
raise bottle.HTTPError(400)
if port is None or len(port) < 1:
raise bottle.HTTPError(400)
if control in ['socket']:
if host is None or len(host) < 1:
raise bottle.HTTPError(400)
status = session['status']
if status in ['frame']:
config = self.cc.add_node() # This returns None if readonly!
if config is None:
raise bottle.HTTPError(403)
elif status in ['cc_new', 'cc_ok']:
node = self.nodes[session['node']]
if node is None:
raise bottle.HTTPError(500)
config = node.config
if config.file.readonly is True:
raise bottle.HTTPError(403)
# translate unmod cookie & password indicator into cookie / password value from config
if password == session['edit_unmod']:
password = config.password or ''
if cookie == session['edit_unmod']:
cookie = config.cookie or ''
else:
raise bottle.HTTPError(403)
config.label = label if len(label) > 0 else None
config.control = control
config.host = host
config.port = port
config.password = password if len(password) > 0 else None
config.cookie = cookie if len(cookie) > 0 else None
config.tick()
return bottle.HTTPResponse(status=200)
def post_edit_node(self, session):
unmod = uuid.uuid4().hex
session['edit_unmod'] = unmod
node = self.nodes.get(session['node'])
config = node.config
address = config.host
if config.port is not None:
address = f'{address}:{config.port}'
data = {
'unmod': unmod,
'label': config.label,
'connect': config.control,
'address': address
}
if config.password is not None:
data['pwd'] = unmod
cookie = config.cookie
if cookie is not None:
if len(cookie) == 22:
data['cookie'] = f'{cookie[:6]}...'
else:
# cookie length shall (always) be 22 characters!
# All other lengths are invalid cookie values!
data['cookie'] = cookie
return json.JSONEncoder().encode({'config': data})
def post_remove_node(self, session):
node_id = session['node']
node = self.nodes.get(node_id)
config = node.config
if config.readonly is True:
return bottle.HTTPError(403)
# get the name of the section that shall be removed
section_name = config.name
# here we collect all sessions representing this configuration section
sessions = []
# check all sessions, if they are connected to this section
for s in self.sessions:
n = self.nodes.get(s['node'])
if n.config.name == section_name:
sessions.append(s.id)
print(f"ID'd sessions: {len(sessions)}")
# delete all sessions identified!
for s in sessions:
self.sessions.delete_session_by_id(s)
# now remove the node!
self.nodes.node_remove(node_id)
# and finally delete the section from the config file!
if config.remove() is True:
config.tick()
return bottle.HTTPResponse(status=200)
return bottle.HTTPError(500)
def post_cc_ping(self, session):
headers = {
'Last-Modified': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(self.cc.last_modified))
, 'Date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
}
# ping shall always send 'If-Modified-Since' header ...
ims = bottle.request.environ.get('HTTP_IF_MODIFIED_SINCE', None)
if ims is None:
# As said: ping shall always send I-M-S
return bottle.HTTPError(400)
ims = bottle.parse_date(ims.split(";")[0].strip())
if ims >= int(self.cc.last_modified):
return bottle.HTTPResponse(status=304, **headers)
# set the headers
for header, value in headers.items():
# print(header, value)
bottle.response.set_header(header, value)
# ['cards'] holds the session.id's for the cards of this session
if session['cards'] is None:
# First Card = DefaultNode
card = self.sessions.create_session(bottle.request, 'cc_new')
card['node'] = 'theonionbox'
session['cards'] = [card.id]
# if the frame (cc) session knows a password, this is the one for the default node!
card['password'] = session.get('password', None)
cards = [session['cards'][0]]
# walk through the sections defined in the cc configuration file
for section in self.cc:
# The session = card representing this section
card = None
# the card's node
node = None
# check if there's a card = session, that holds a node with the name of this configuration section
for card_id in session['cards']:
s = self.sessions.get_session(card_id, bottle.request)
if s is not None:
# If s IS None, the session is expired!
node = self.nodes[s['node']]
if section.name == node.config.name:
card = s
break
if card is not None:
# if the config changed...
if int(section.last_modified) > ims:
# disco the node
node = self.nodes[card['node']]
node.disconnect()
# and delete the session = card
self.sessions.delete_session(card)
card = None
# card will now be recreated & node reconnected
# If there's None, create a new session = a new card
if card is None:
# get the node representing this section
node = self.nodes.get_name(section.name)
# if node is None: create it from section!
if node is None:
id = self.nodes.node_add(section)
node = self.nodes.get(id)
# then create a new session & connect both
if node is not None:
card = self.sessions.create_session(bottle.request, 'cc_new')
if card is not None:
card['node'] = node.id
card['controlled_by'] = session.id
card['password'] = section.password
# session['cards'].append(card.id)
if card is not None:
cards.append(card.id)
# This eliminates all expired sessions from ['cards']
session['cards'] = cards
# Now json everything... and return it!
return json.JSONEncoder().encode({'cards': cards})
def post_cc_ciao(self, session):
log = logging.getLogger('theonionbox')
log.debug("Card {} sent a 'Ciao'!".format(session.id_short))
self.sessions.delete_session(session)
def post_cc_data(self, session):
headers = dict()
# data shall always send 'If-Modified-Since' header ...
ims = bottle.request.environ.get('HTTP_IF_MODIFIED_SINCE')
headers['Last-Modified'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(time.time()))
headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
if ims:
ims = bottle.parse_date(ims.split(";")[0].strip())
#print(session['node'])
# print(session.id + ' / position -> ')
try:
node = self.nodes[session['node']]
except KeyError:
self.sessions.delete_session(session)
raise bottle.HTTPError(400)
status = session['status']
# print(status)
if status != 'cc_ok':
# if node.config.connect is False:
# return bottle.HTTPResponse(202) # Accepted
# This raises in case of issues
self.connect_card_to_node(session, node)
# Being here there's a valid connection!
session['status'] = 'cc_ok'
if node.controller and not node.controller.is_alive():
# Try to reconnect - once!
node.controller.reconnect()
# This is an issue...
if not node.controller or not node.controller.is_alive():
raise bottle.HTTPError(404)
# ...
ret = {}
# create an uuid unique to a fp
# to allow the cc client to distinguish cards for cumulation
fp = None
with contextlib.suppress(Exception):
fp = node.controller.fingerprint
if fp is not None and len(fp) == 40:
if fp not in self.fingerprints:
self.fingerprints[fp] = uuid.uuid4().hex
ret['representing'] = self.fingerprints[fp]
# ret['style'] = 'readonly'
ret['dd'] = ''
ret['label'] = node.nickname
ret['version'] = node.controller.version_short
# if ims and ims < self.vm.Tor.last_modified:
ret['latest'] = self.version.Tor.stable
# ret['latest'] = '0.4.0.6'
ret['versionflag'] = node.controller.version_current
node_mode = 'Client'
if node.controller.get_conf('BridgeRelay', None) == '1':
node_mode = 'Bridge'
elif node.controller.get_conf('ORPort', None):
node_mode = 'Relay'
ret['mode'] = node_mode
if True == True:
# the first flag is always the placeholder for the nodes mode data
# ret['flags'] = ['mode']
ret['flags'] = []
if node_mode == 'Bridge':
# node.controller.flags fails for a bridge!
try:
oo = node.onionoo
d = oo.details('flags')
ret['flags'].extend(d)
except:
pass
else:
f = node.controller.flags
if f is not None and len(f) > 0 and f[0] != 'unknown':
ret['flags'].extend(f)
# We add an icon in case of Hibernation!
try:
accs = node.controller.get_accounting_stats()
except:
pass
else:
if accs.status != "awake":
ret['flags'].append(accs.status)
else:
ret['flags'] = [
'mode',
'Authority',
'BadExit',
'BadDirectory',
'Exit',
'Fast',
'Guard',
'HSDir',
'Named',
'Stable',
'Running',
'Unnamed',
'Valid',
'V2Dir',
'V3Dir',
'soft',
'hard',
'unknown'
]
ret['details'] = True
last_ts = session['lastTS']
rv = node.bandwidth.get_data(interval='1s', since_timestamp=last_ts)
# print(len(rv))
if len(rv) > 0:
ret['bw'] = rv
session['lastTS'] = time.time() * 1000
# Connection
conn = ''
conn += 'h' if node.config.is_default_node else ''
conn += 'p' if node.controller.auth_password else ''
conn += 'c' if node.controller.with_cookie else ''
conn += 'x' if node.controller.via_proxy else ''
ret['conn'] = conn
# set the headers
for header, value in headers.items():
# print(header, value)
bottle.response.set_header(header, value)
return json.JSONEncoder().encode(ret)
def connect_card_to_node(self, session_of_card: Session, node: Node):
from stem.connection import MissingPassword, IncorrectPassword
try:
super().connect_session_to_node(session_of_card, node.id)
except (MissingPassword, IncorrectPassword):
session_of_card['auth'] = 'basic' if node.controller.password is None else 'digest'
label = node.label or ''
version = self.latest_pi.tor_version.version_str if self.latest_pi is not None else ''
origin = 'Tor/' + version + '/' + label
raise CCError(401, origin=origin) # Unauthorized
except bottle.HTTPError:
raise
except Exception as exc:
raise bottle.HTTPError(404)
def post_cc_connect(self, session):
try:
node = self.nodes[session['node']]
except KeyError:
self.sessions.delete_session(session)
raise bottle.HTTPError(400)
# This raises in case of issues
self.connect_card_to_node(session, node)
# Being here there's a valid connection!
session['status'] = 'cc_ok'
def post_cc_login(self, session):
from ..authenticate import authenticate
# print("post_cc_login")
if 'login' in session:
session['login'] += 1
if session['login'] > 1 or (time.time() - session['login_time']) > 1.5:
self.sessions.delete_session(session)
raise bottle.HTTPError(404)
else:
session['login'] = 0
session['login_time'] = time.time()
node = self.nodes[session['node']]
header = bottle.request.environ.get('HTTP_AUTHORIZATION', '')
# print(header)
# this raises HTTPError(401) to perform the authentication procedure
pwd = authenticate(session, node, header, method='POST')
# at this stage we have a successful login
# and switch to standard session management
# Cerate new session
authed_session = self.sessions.create_session(bottle.request, 'cc_new')
if authed_session is not None:
# Fill it with initial data
authed_session['node'] = node.id
authed_session['password'] = pwd
# Replace current session against authed_session in the controlcenter's session['cards']:
# Find the cc's session
cc = None
with contextlib.suppress(Exception):
cc = self.sessions.get_session(session['controlled_by'], bottle.request)
# if found, get the ['cards'] and exchange the .id's
if cc is not None:
cards = cc['cards']
cards.remove(session.id)
cards.append(authed_session.id)
# register the controlcenter's .id in the authed_session
# ... to enable exactly this procedure
authed_session['controlled_by'] = cc.id
# delete the unauthed session
self.sessions.delete_session(session)
# and finally tell the client the new id!
return authed_session.id
# This is the uuups case...
return bottle.HTTPError(404)
def get_md5(self, session):
self.redirect(f'/{session.id}/md5.js')
def get_cc_logout(self, session):
print('@logout')
self.sessions.delete_session(session)
self.redirect('/')
def get_details(self, session):
# We use the dashboards 'get_restart' ('/<session_id>/') function to show the detail page
# => Adequately fill the session, so that 'restart' likes it:
details_session = self.sessions.create_session(bottle.request, 'login')
details_session['password'] = session.get('password')
details_session['cached_node'] = session.get('node')
self.redirect(f'/{details_session.id}/')
def post_position(self, session):
# When the operator changes the position of a card via D & D,
# it sends the session id of the card located before itself in the DOM tree.
from ..ccfile import CCNode
before = bottle.request.forms.get('position', None)
if before is None:
return
b = self.sessions.get_session(before, bottle.request)
if b is None:
return
before_node = self.nodes[b['node']]
session_node = self.nodes[session['node']]
if before_node is not None and session_node is not None:
bnc = before_node.config
if not isinstance(bnc, CCNode):
bnc = None
session_node.config.move_after(bnc)
def post_cc_edit(self, session):
n = session['node']
c = n.config
data = {'control': c.control}
if c.host is not None and c.port is not None:
data['port'] = f'{c.host}:{c.port}'
if c.socket is not None:
data['socket'] = c.socket
data = {
'control': c.control,
'port': 'xxx'
}
def post_cc_license(self, session):
from ..license import License
l = License()
license = f"""
<div style='font-family: LatoLatinWebLight; font-size: 24px;'>The Onion Box</div>
<div style='font-family: LatoLatinWeb; font-size: 14px;'>{l.get('copyright')}</div>
<div style='font-family: LatoLatinWeb; font-size: 14px;'>{l.get('1')}</div>
<br>
<div style='font-family: LatoLatinWeb; font-size: 14px;'>{l.get('2')}</div>
<br>
<div style='font-family: LatoLatinWeb; font-size: 14px;'>{l.get('3')}</div>
<br>
<div style='font-family: LatoLatinWebLight; font-size: 24px;'>Statement of Independence</div>
<div style='font-family: LatoLatinWeb; font-size: 14px;'>{l.get('independence')}</div>
"""
return license
def post_cc_about(self, session):
from stamp import __version__, __stamp__
Credits = [
('Bootstrap', 'https://getbootstrap.com', 'The Bootstrap team', 'MIT'),
('Bottle', 'http://bottlepy.org', 'Marcel Hellkamp', 'MIT'),
('Cheroot', 'https://github.com/cherrypy/cheroot', 'CherryPy Team',
'BSD 3-Clause "New" or "Revised" License'),
('Click', 'https://github.com/pallets/click', 'Pallets', 'BSD 3-Clause "New" or "Revised" License'),
('ConfigUpdater', 'https://github.com/pyscaffold/configupdater', 'Florian Wilhelm', 'MIT'),
('Glide', 'https://github.com/glidejs/glide', '@jedrzejchalubek', 'MIT'),
('JQuery', 'https://jquery.com', 'The jQuery Foundation', 'MIT'),
('jquery.pep.js', 'http://pep.briangonzalez.org', '@briangonzalez', 'MIT'),
('js-md5', 'https://github.com/emn178/js-md5', '@emn178', 'MIT'),
('PySocks', 'https://github.com/Anorov/PySocks', '@Anorov', 'Custom DAN HAIM'),
('RapydScript-NG', 'https://github.com/kovidgoyal/rapydscript-ng', '@kovidgoyal',
'BSD 2-Clause "Simplified" License'),
('Requests', 'https://requests.kennethreitz.org', 'Kenneth Reitz', 'Apache License, Version 2.0'),
('scrollMonitor', 'https://github.com/stutrek/scrollmonitor', '@stutrek', 'MIT'),
('Smoothie Charts', 'https://github.com/joewalnes/smoothie', '@drewnoakes', 'MIT'),
('stem', 'https://stem.torproject.org', 'Damian Johnson and The Tor Project',
'GNU LESSER GENERAL PUBLIC LICENSE')
]
cdts = ''
for line in Credits:
(project, url, author, license) = line
cdts += f'<a href="{url}" target="_blank"><b>{project}</b></a> © {author} | {license}<br>'
cdts += '<br><b>... and a number of others more!</b>'
about = f"""
<div style='font-family: LatoLatinWeb; font-size: 24px;'>
The Onion Box <span style='font-family: LatoLatinWeb; font-size: 18px;'> {__version__}
</div>
<div style='font-family: LatoLatinWebLight; font-size: 18px;'>Dashboard to monitor Tor node operations</div>
<hr>
<div style='font-family: LatoLatinWebLight; font-size: 14px;'>
<a href="http://www.theonionbox.com/#readme" target="_blank">The Onion Box</a>
| Copyright © 2015 - 2020 Ralph Wetzel | License:
<a href="https://github.com/ralphwetzel/theonionbox/blob/master/LICENSE" target="_blank">MIT</a>
</div>
<div style='font-family: LatoLatinWebLight; font-size: 14px;'>
Tor and the Tor Onion Logo are registered trademarks ® of
<a href="https://torproject.org" target="_blank">The Tor Project</a>.
Onionoo is a service of The Tor Project.
</div>
<div style='font-family: LatoLatinWebLight; font-size: 14px;'>
The Onion Box uses the great <a href="https://latofonts.com" target="_blank">Lato</a> font. | Copyright
Łukasz Dziedzic | License:
<a href="http://scripts.sil.org/OFL" target="_blank">SIL Open Font License 1.1</a>.
</div>
<div style='font-family: LatoLatinWebLight; font-size: 14px;'>
Icons from <a href="https://fontawesome.com" target="_blank">Font Awesome Free</a> | Copyright
@fontawesome | <a href="https://fontawesome.com/license/free" target="_blank">
License</a> @ Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT.
</div>
<hr>
<div style='font-family: LatoLatinWeb; font-size: 18px;'>Credits</div>
<div style='font-family: LatoLatinWebLight; font-size: 14px;'>
The Onion Box benefits from the contribution of the following open source software projects.
Thank you so much!
<br>
</div>
<div style="margin: 5px">
<div class="crawl" style="overflow-y:auto; max-height: 116px;
font-family: LatoLatinWebLight; font-size: 14px;">
{cdts}
</div>
</div>
"""
return about
|
ralphwetzel/theonionbox
|
theonionbox/tob/apps/controlcenter.py
|
Python
|
mit
| 34,287
|
"""Take some of the labour out of building autoencoders/"""
|
rikkhill/four-letter-words
|
helpers/autoencoder.py
|
Python
|
mit
| 60
|
import logging
import importlib
_module_cache = {}
def get_module(module_type: str, module_id: str):
module_name = '{}.{}'.format(module_type.replace(' ', '_'), module_id.lower())
logging.debug('Searching for {} module {}...'.format(module_type, module_name))
try:
module = _module_cache[module_name]
logging.debug('Module cache hit')
except KeyError:
logging.debug('Module cache miss - importing {}...'.format(module_name))
module = importlib.import_module(module_name)
return module
def get_class(module_type: str, module_id: str):
module = get_module(module_type, module_id)
class_name = '{}{}'.format(
''.join([v.capitalize() for v in module_id.split('.')]),
''.join([v.capitalize() for v in module_type.split(' ')])
)
logging.debug('Trying to get {} class {}...'.format(module_type, class_name))
return getattr(module, class_name)
|
einsfr/autoarchive
|
utils/module_import.py
|
Python
|
mit
| 930
|