repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
emillon/glyr-debian
|
spec/provider/tests/artistphoto.py
|
Python
|
gpl-3.0
| 3,340
| 0.000599
|
#################################################################
# This file is part of glyr
# + a command-line tool and library to download various sort of music related metadata.
# + Copyright (C) [2011-2012] [Christopher Pahl]
# + Hosted at: https://github.com/sahib/glyr
#
# glyr is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# glyr is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with glyr. If not, see <http://www.gnu.org/licenses/>.
#################################################################
#!/usr/bin/env python
# encoding: utf-8
from tests.__common__ import *
not_found_options = {
'get_type': 'artistphoto',
'artist': 'HorseTheBand',
'album': 'Given, but not used.',
'title': 'Accidentally given'
}
TESTCASES = [{
# {{{
'name': 'bbcmusic',
'data': [{
'options': {
'get_type': 'artistphoto',
'artist': 'The Rolling Stones'
},
'expect': len_greater_0
}, {
'options': not_found_options,
'expect': len_equal_0
}],
}, {
# }}}
# {{{
'name': 'discogs',
'data': [{
'options': {
'get_type': 'artistphoto',
'artist': 'Nirvana'
},
'expect': len_greater_0
}, {
'options': not_found_options,
'expect': len
|
_equal_0
}],
}, {
# }}}
# {{{
'name': 'flickr',
'data': [{
'options': {
'ge
|
t_type': 'artistphoto',
'artist': 'Die Ärzte'
},
'expect': len_greater_0
}, {
'options': not_found_options,
'expect': len_equal_0
}],
}, {
# }}}
# {{{
'name': 'google',
'data': [{
'options': {
'get_type': 'artistphoto',
'artist': 'DeVildRiVeR'
},
'expect': len_greater_0
}, {
'options': not_found_options,
'expect': len_equal_0
}],
}, {
# }}}
# {{{
'name': 'lastfm',
'data': [{
'options': {
'get_type': 'artistphoto',
'artist': 'Alestorm'
},
'expect': len_greater_0
}, {
'options': not_found_options,
'expect': len_equal_0
}],
}, {
# }}}
# {{{
'name': 'singerpictures',
'data': [{
'options': {
'get_type': 'artistphoto',
'artist': 'Equilibrium'
},
'expect': len_greater_0
}, {
'options': not_found_options,
'expect': len_equal_0
}],
}, {
# }}}
# {{{
'name': 'rhapsody',
'data': [{
'options': {
'get_type': 'artistphoto',
'artist': 'In Flames'
},
'expect': len_greater_0
}, {
'options': not_found_options,
'expect': len_equal_0
}],
}
]
|
MazaCoin/maza
|
test/functional/test_framework/messages.py
|
Python
|
mit
| 38,295
| 0.002141
|
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
# Copyright (c) 2010-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin test framework primitive and message strcutures
CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
data structures that should map to corresponding structures in
bitcoin/primitives
msg_block, msg_tx, msg_headers, etc.:
data structures that represent network messages
ser_*, deser_*: functions that handle serialization/deserialization."""
from codecs import encode
import copy
import hashlib
from io import BytesIO
import random
import socket
import struct
import time
import maza_scrypt
from test_framework.siphash import siphash256
from test_framework.util import hex_str_to_bytes, bytes_to_hex_str
MIN_VERSION_SUPPORTED = 60001
MY_VERSION = 80014 # past bip-31 for ping/pong
MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
MAX_INV_SZ = 50000
MAX_BLOCK_BASE_SIZE = 1000000
COIN = 100000000 # 1 btc in satoshis
NODE_NETWORK = (1 << 0)
# NODE_GETUTXO = (1 << 1)
NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
NODE_UNSUPPORTED_SERVICE_BIT_5 = (1 << 5)
NODE_UNSUPPORTED_SERVICE_BIT_7 = (1 << 7)
NODE_NETWORK_LIMITED = (1 << 10)
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
def ser_compact_size(l):
r = b""
if l < 253:
r = struct.pack("B", l)
elif l < 0x10000:
r = struct.pack("<BH", 253, l)
elif l < 0x100000000:
r = struct.pack("<BI", 254, l)
else:
r = struct.pack("<BQ", 255, l)
return r
def deser_compact_size(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return nit
def deser_string(f):
nit = deser_compact_size(f)
return f.read(nit)
def ser_string(s):
return ser_compact_size(len(s)) + s
def deser_uint256(f):
r = 0
for i in range(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in range(8):
rs += struct.pack("<I", u & 0xFFFFFFFF)
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
# ser_function_name: Allow for an alternate serialization function on the
# entries in the vector (we use this for serializing the vector of transactions
# for a witness block).
def ser_vector(l, ser_function_name=None):
r = ser_compact_size(len(l))
for i in l:
if ser_function_name:
r += getattr(i, ser_function_name)()
else:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ser_compact_size(len(l))
for sv in l:
r += ser_string(sv)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return bytes_to_hex_str(obj.serialize())
# Objects that map to bitcoind objects, which can be serialized/deserialized
class CAddress():
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
MSG_WITNESS_FLAG = 1<<30
class CInv():
typemap = {
0: "Error",
1: "TX",
2: "Block",
1|MSG_WITNESS_FLAG: "WitnessTx",
2|MSG_WITNESS_FLAG : "WitnessBlock",
4: "CompactBlock"
}
def __init__(self, t=0, h=0):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator():
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint():
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn():
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
se
|
lf.nSequence = struct.unpack("<I", f.read(4))[
|
0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), bytes_to_hex_str(self.scriptSig),
self.nSequence)
class CTxOut():
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self)
|
RPCS3/ps3autotests
|
utils/convert-ps3-output.py
|
Python
|
gpl-2.0
| 868
| 0.008065
|
import os
import sys
def main():
if len(sys.argv) <= 2:
print("This script generates the .expected file from your PS3's de
|
bug logs.")
print("")
print("Usage: convert-ps3-output.py <input> <output>")
print("Example: convert-ps3-output.py
|
hello_world.log hello_world.expected")
return False
#Parse and check arguments
inputFile = sys.argv[1]
outputFile = sys.argv[2]
if not os.path.isfile(inputFile):
print("[!] Input file does not exist")
return False
f = open(inputFile, 'rb')
w = open(outputFile, 'wb')
data = f.read()
data = data[data.find(b"/app_home/"):]
data = data[data.find(b"\x0D\x0A")+2:]
data = data[:data.rindex(b"END LOG")-12]
data = data.replace(b"\x0D\x0A", b"\x0A")
w.write(data)
w.close()
if __name__ == "__main__":
main()
|
mjlong/openmc
|
openmc/tallies.py
|
Python
|
mit
| 100,934
| 0.000545
|
from __future__ import division
from collections import Iterable, defaultdict
import copy
import os
import pickle
import itertools
from numbers import Integral, Real
from xml.etree import ElementTree as ET
import sys
import numpy as np
from openmc import Mesh, Filter, Trigger, Nuclide
from openmc.cross import CrossScore, CrossNuclide, CrossFilter
from openmc.filter import _FILTER_TYPES
import openmc.checkvalue as cv
from openmc.clean_xml import *
if sys.version_info[0] >= 3:
basestring = str
# "Static" variable for auto-generated Tally IDs
AUTO_TALLY_ID = 10000
def reset_auto_tally_id():
global AUTO_TALLY_ID
AUTO_TALLY_ID = 10000
class Tally(object):
"""A tally defined by a set of scores that are accumulated for a list of
nuclides given a set of filters.
Parameters
----------
tally_id : Integral, optional
Unique identifier for the tally. If none is specified, an identifier
will automatically be assigned
name : str, optional
Name of the tally. If not specified, the name is the empty string.
Attributes
----------
id : Integral
Unique identifier for the tally
name : str
Name of the tally
filters : list of openmc.filter.Filter
List of specified filters for the tally
nuclides : list of openmc.nuclide.Nuclide
List of nuclides to score results for
scores : list of str
List of defined scores, e.g. 'flux', 'fission', etc.
estimator : {'analog', 'tracklength', 'collision'}
Type of estimator for the tally
triggers : list of openmc.trigger.Trigger
List of tally triggers
num_score_bins : Integral
Total number of scores, accounting for the fact that a single
user-specified score, e.g. scatter-P3 or flux-Y2,2, might have multiple
bins
num_scores : Integral
Total number of user-specified scores
num_filter_bins : Integral
Total number of filter bins accounting for all filters
num_bins : Integral
Total number of bins for the tally
num_realizations : Integral
Total number of realizations
with_summary : bool
Whether or not a Summary has been linked
sum : ndarray
An array containing the sum of each independent realization for each bin
sum_sq : ndarray
An array containing the sum of each independent realization squared for
each bin
mean : ndarray
An array containing the sample mean for each bin
std_dev : ndarray
An array containing the sample standard deviation for each bin
"""
def __init__(self, tally_id=None, name=''):
# Initialize Tally class attributes
self.id = tally_id
self.name = name
self._filters = []
self._nuclides = []
self._scores = []
self._estimator = None
self._triggers = []
self._num_score_bins = 0
self._num_realizations = 0
self._with_summary = False
|
self._sum = None
self._sum_sq = None
self._mean = None
self._std_dev = None
self._with_batch_statistics = False
self._derived = False
self._sp_filename = None
self._results_read = False
def __deepcopy__(self, memo):
existing
|
= memo.get(id(self))
# If this is the first time we have tried to copy this object, create a copy
if existing is None:
clone = type(self).__new__(type(self))
clone.id = self.id
clone.name = self.name
clone.estimator = self.estimator
clone.num_score_bins = self.num_score_bins
clone.num_realizations = self.num_realizations
clone._sum = copy.deepcopy(self._sum, memo)
clone._sum_sq = copy.deepcopy(self._sum_sq, memo)
clone._mean = copy.deepcopy(self._mean, memo)
clone._std_dev = copy.deepcopy(self._std_dev, memo)
clone._with_summary = self.with_summary
clone._with_batch_statistics = self.with_batch_statistics
clone._derived = self.derived
clone._sp_filename = self._sp_filename
clone._results_read = self._results_read
clone._filters = []
for filter in self.filters:
clone.add_filter(copy.deepcopy(filter, memo))
clone._nuclides = []
for nuclide in self.nuclides:
clone.add_nuclide(copy.deepcopy(nuclide, memo))
clone._scores = []
for score in self.scores:
clone.add_score(score)
clone._triggers = []
for trigger in self.triggers:
clone.add_trigger(trigger)
memo[id(self)] = clone
return clone
# If this object has been copied before, return the first copy made
else:
return existing
def __eq__(self, other):
if not isinstance(other, Tally):
return False
# Check all filters
if len(self.filters) != len(other.filters):
return False
for filter in self.filters:
if filter not in other.filters:
return False
# Check all nuclides
if len(self.nuclides) != len(other.nuclides):
return False
for nuclide in self.nuclides:
if nuclide not in other.nuclides:
return False
# Check all scores
if len(self.scores) != len(other.scores):
return False
for score in self.scores:
if score not in other.scores:
return False
if self.estimator != other.estimator:
return False
return True
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(repr(self))
def __repr__(self):
string = 'Tally\n'
string += '{0: <16}{1}{2}\n'.format('\tID', '=\t', self.id)
string += '{0: <16}{1}{2}\n'.format('\tName', '=\t', self.name)
string += '{0: <16}{1}\n'.format('\tFilters', '=\t')
for filter in self.filters:
string += '{0: <16}\t\t{1}\t{2}\n'.format('', filter.type,
filter.bins)
string += '{0: <16}{1}'.format('\tNuclides', '=\t')
for nuclide in self.nuclides:
if isinstance(nuclide, Nuclide):
string += '{0} '.format(nuclide.name)
else:
string += '{0} '.format(nuclide)
string += '\n'
string += '{0: <16}{1}{2}\n'.format('\tScores', '=\t', self.scores)
string += '{0: <16}{1}{2}\n'.format('\tEstimator', '=\t', self.estimator)
return string
@property
def id(self):
return self._id
@property
def name(self):
return self._name
@property
def filters(self):
return self._filters
@property
def nuclides(self):
return self._nuclides
@property
def num_nuclides(self):
return len(self._nuclides)
@property
def scores(self):
return self._scores
@property
def num_scores(self):
return len(self._scores)
@property
def num_score_bins(self):
return self._num_score_bins
@property
def num_filter_bins(self):
num_bins = 1
for filter in self.filters:
num_bins *= filter.num_bins
return num_bins
@property
def num_bins(self):
num_bins = self.num_filter_bins
num_bins *= self.num_nuclides
num_bins *= self.num_score_bins
return num_bins
@property
def estimator(self):
return self._estimator
@property
def triggers(self):
return self._triggers
@property
def num_realizations(self):
return self._num_realizations
@property
def with_summary(self):
return self._with_summary
@property
def sum(self):
if not self._sp_filename:
return None
if not self._results_read:
import h5py
# Open the HDF5 statepoint file
|
KeyWeeUsr/KrySA
|
krysa/tests/test_tasks_basic_small.py
|
Python
|
gpl-3.0
| 3,977
| 0.000503
|
import unittest
import os
import sys
import time
import sqlite3
import os.path as op
from shutil import rmtree
from functools import partial
from kivy.clock impor
|
t Clock
main_path = op.dirname(op.dirname(op.abspath(__file__)))
sys.path.append(main_path)
from main import KrySA, ErrorPop
from tasks.basic import Basic
class Test(unittest.TestCase):
def pause(*args):
time.sleep(0.000001)
def run_test(self, app, *args):
Clock.schedule_interval(self.
|
pause, 0.000001)
# open New -> Project popup, set inputs
app.root._new_project()
app.root.savedlg.view.selection = [self.folder, ]
app.root.savedlg.ids.name.text = 'Test.krysa'
app.root.savedlg.run([self.folder, ], 'Test.krysa')
project_folder = op.join(self.path, 'test_folder', 'Test')
data = op.join(project_folder, 'data')
results = op.join(project_folder, 'results')
# open New -> Data popup, set inputs
app.root._new_data()
new_data = app.root.wiz_newdata.ids.container.children[0]
new_data.ids.table_name.text = 'NewData'
cols = new_data.ids.columns.children
# set columns for new data
range_vals = range(13)
for _ in range(2):
new_data.ids.columnadd.dispatch('on_release')
cols[0].ids.colname.text += str(len(cols))
cols[0].ids.coltype.text = 'INTEGER'
vals = cols[0].ids.vals.children
for i in range_vals:
cols[0].ids.valadd.dispatch('on_release')
vals[0].ids.value.text = str(i + 1)
new_data.ids.columnadd.dispatch('on_release')
cols[0].ids.colname.text += str(len(cols))
cols[0].ids.coltype.text = 'REAL'
vals = cols[0].ids.vals.children
for i in range_vals:
cols[0].ids.valadd.dispatch('on_release')
num = str(i + 1)
vals[0].ids.value.text = num + '.' + num
new_data = app.root.wiz_newdata.run()
# open Task's popup and get task
k = '3'
address = ['A1:D13', 'A1:B2', 'C1:D2',
'A12:B13', 'C12:D13', 'B3:C10']
for addr in address:
taskcls = Basic()
taskcls.basic_small()
children = app.root_window.children
for c in reversed(children):
if 'Task' in str(c):
index = children.index(c)
task = children[index]
# fill the task
body = task.children[0].children[0].children[0].children
body[-1].text = 'NewData'
body[-2].children[0].children[-1].children[0].text = addr
body[-2].children[0].children[-2].children[0].text = k
body[-3].children[0].dispatch('on_release')
# get results and test
expected = reversed([2, 2, 2, 13, 13, 4])
results = app.root.ids.results
skipone = False # if top padding with widget present
for c in results.children:
if 'Widget' in str(c):
skipone = True
break
for i, exp in enumerate(expected):
i = i + 1 if skipone else i
# Result -> Page -> container -> result
result = float(results.children[i].ids.page.children[1].text)
self.assertEqual(result, exp)
app.stop()
def test_tasks_basic_small(self):
self.path = op.dirname(op.abspath(__file__))
if not op.exists(op.join(self.path, 'test_folder')):
os.mkdir(op.join(self.path, 'test_folder'))
else:
rmtree(op.join(self.path, 'test_folder'))
os.mkdir(op.join(self.path, 'test_folder'))
self.folder = op.join(self.path, 'test_folder')
app = KrySA()
p = partial(self.run_test, app)
Clock.schedule_once(p, .000001)
app.run()
rmtree(self.folder)
if __name__ == '__main__':
unittest.main()
|
google/sling
|
sling/nlp/parser/trainer/transition_generator.py
|
Python
|
apache-2.0
| 12,819
| 0.012637
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sling
from action import Action
# Outputs a list of transitions that represent a given document's frame graph.
class TransitionGenerator:
# Bookkeeping for one frame.
class FrameInfo:
def __init__(self, handle):
self.handle = handle
self.type = None
self.edges = []
self.from_mention = False
# Whether this frame has been evoked.
self.output = False
# Labeled edge between two frames. Each edge is used to issue a CONNECT,
# EMBED, ELABORATE, or ASSIGN action.
class Edge:
def __init__(self, incoming=None, role=None, value=None):
self.incoming = incoming
self.role = role
self.neighbor = value
self.inverse = None
self.used = False
# Simple action that will be translated into an Action object later on.
class SimpleAction:
def __init__(self, t=None):
self.action = Action(t)
self.info = None
self.other_info = None
def __init__(self, commons):
self.commons = commons
self._id = commons["id"]
self._isa = commons["isa"]
# Returns whether 'handle' refers to another frame.
def is_ref(self, handle):
return type(handle) == sling.Frame
# Creates a FrameInfo object for 'frame' and recursively for all frames
# pointed to by it.
def _init_info(self, frame, frame_info, initialized):
if frame in initialized:
return
initialized[frame] = True
info = frame_info.get(frame, None)
if info is None:
info = TransitionGenerator.FrameInfo(frame)
frame_info[frame] = info
pending = []
for role, value in frame:
if not self.is_ref(value) or role == self._id: continue
if role == self._isa and value.islocal(): continue
if role == self._isa and info.type is None:
info.type = value
else:
edge = TransitionGenerator.Edge(incoming=False, role=role, value=value)
info.edges.append(edge)
if value == frame:
edge.inverse = edge
continue
if value.islocal():
nb_info = frame_info.get(value, None)
if nb_info is None:
nb_info = TransitionGenerator.FrameInfo(value)
frame_info[value] = nb_info
nb_edge = TransitionGenerator.Edge(
incoming=True, role=role, value=frame)
nb_info.edges.append(nb_edge)
nb_edge.inverse = edge
edge.inverse = nb_edge
pending.append(value)
# Initialize bookkeeping for all frames pointed to by this frame.
for p in pending:
self._init_info(p, frame_info, initialized)
# Translates 'simple' action to an Action using indices from 'attention'.
def _translate(self, attention, simple):
action = Action(t=simple.action.type)
if simple.action.length is not None:
action.length = simple.action.length
if simple.action.role is not None:
action.role = simple.action.role
if action.type == Action.EVOKE:
action.label = simple.info.type
elif action.type == Action.REFER:
action.target = attention.index(simpl
|
e.info.handle)
elif action.type == Action.EMBED:
action.label = simple.info.type
action.target = attention.index(simple.other_info.handle)
elif action.type =
|
= Action.ELABORATE:
action.label = simple.info.type
action.source = attention.index(simple.other_info.handle)
elif action.type == Action.CONNECT:
action.source = attention.index(simple.info.handle)
action.target = attention.index(simple.other_info.handle)
elif action.type == Action.ASSIGN:
action.source = attention.index(simple.info.handle)
action.label = simple.action.label
return action
# Updates frame indices in 'attention' as a result of the action 'simple'.
def _update(self, attention, simple):
t = simple.action.type
if t in [Action.EVOKE, Action.EMBED, Action.ELABORATE]:
# Insert a new frame at the center of attention.
attention.insert(0, simple.info.handle)
elif t in [Action.REFER, Action.ASSIGN, Action.CONNECT]:
# Promote an existing frame to the center of attention.
attention.remove(simple.info.handle)
attention.insert(0, simple.info.handle)
# Builds and returns a simple action of type 'type'.
def _simple_action(self, type=None):
return TransitionGenerator.SimpleAction(type)
# Stores mentions starting or ending or both at a given token.
class TokenToMentions:
def __init__(self):
self.starting = []
self.ending = []
self.singletons = []
# Record 'mention' at starting at this token.
def start(self, mention):
if len(self.starting) > 0:
# Check that the mention respects nesting.
assert self.starting[-1].end >= mention.end
self.starting.append(mention)
# Record 'mention' as ending at this token.
def end(self, mention):
if len(self.ending) > 0:
# Check that the mention respects nesting.
assert self.ending[0].begin <= mention.begin
self.ending.insert(0, mention) # most-nested is at the front
# Record 'mention' as starting and ending at this token.
def singleton(self, mention):
self.singletons.append(mention)
# Returns if there are no mentions starting/ending at this token.
def empty(self):
return len(self.starting) + len(self.ending) + len(self.singletons) == 0
# Returns a string representation of the object.
def __repr__(self):
return "Starting:" + str(self.starting) + ", Ending:" + \
str(self.ending) + ", Singletons:" + str(self.singletons)
# Generates transition sequence for 'document' which should be an instance of
# AnnotatedDocument.
def generate(self, document):
frame_info = {} # frame -> whether it is evoked from a span
initialized = {} # frame -> whether the frame's book-keeping is done
# Initialize book-keeping for all evoked frames.
for m in document.mentions:
for evoked in m.evokes():
self._init_info(evoked, frame_info, initialized)
frame_info[evoked].from_mention = True
# Initialize book-keeping for all thematic frames.
for theme in document.themes:
self._init_info(theme, frame_info, initialized)
# Record start/end boundaries of all mentions.
token_to_mentions = []
for _ in range(len(document.tokens)):
token_to_mentions.append(TransitionGenerator.TokenToMentions())
for m in document.mentions:
if m.length == 1:
token_to_mentions[m.begin].singleton(m)
else:
token_to_mentions[m.begin].start(m)
token_to_mentions[m.end - 1].end(m)
# Single token mentions are handled via EVOKE(length=1), and others
# are handled via MARK at the beginning token and EVOKE(length=None)
# at the end token.
simple_actions = []
marked = {} # frames for which we have output a MARK
evoked = {} # frames for which we have output an EVOKE
for index in range(len(document.tokens)):
t2m = token_to_mentions[index]
# First evoke/refer the singleton mentions.
for singleton in t2m.singletons:
for frame in singleton.evokes():
# If the frame is already evoked, refer to it.
if frame in marked:
assert frame in evoked, "Referring to marked but not evoked frame"
if frame in evoked:
refer = self._simple_action(Action.REFER)
refer.info = frame_info[frame]
refer.action.length = singleton.length # should be 1
simple_actions.append(refer)
continue
# Otherwise evoke a
|
skosukhin/spack
|
var/spack/repos/builtin/packages/bpp-suite/package.py
|
Python
|
lgpl-2.1
| 1,771
| 0.001129
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WAR
|
RANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack
|
import *
class BppSuite(CMakePackage):
"""BppSuite is a suite of ready-to-use programs for phylogenetic and
sequence analysis."""
homepage = "http://biopp.univ-montp2.fr/wiki/index.php/BppSuite"
url = "http://biopp.univ-montp2.fr/repos/sources/bppsuite/bppsuite-2.2.0.tar.gz"
version('2.2.0', 'd8b29ad7ccf5bd3a7beb701350c9e2a4')
depends_on('cmake@2.6:', type='build')
depends_on('texinfo', type='build')
depends_on('bpp-core')
depends_on('bpp-seq')
depends_on('bpp-phyl')
|
rodrigods/keystone
|
keystone/tests/test_backend.py
|
Python
|
apache-2.0
| 205,792
| 0.000005
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import hashlib
import uuid
from keystoneclient.common import cms
import mock
from oslo.utils import timeutils
import six
from testtools import matchers
from keystone.common import driver_hints
from keystone import config
from keystone import exception
from keystone import tests
from keystone.tests import default_fixtures
from keystone.tests import filtering
from keystone.tests import test_utils
from keystone.token import provider
CONF = config.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
NULL_OBJECT = object()
class IdentityTests(object):
def _get_domain_fixture(self):
domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.assignment_api.create_domain(domain['id'], domain)
return domain
def _set_domain_scope(self, domain_id):
# We only provide a domain scope if we have multiple drivers
if CONF.identity.domain_specific_drivers_enabled:
return domain_id
def test_project_add_and_remove_user_role(self):
user_ids = self.assignment_api.list_user_ids_for_project(
self.tenant_bar['id'])
self.assertNotIn(self.user_two['id'], user_ids)
self.assignment_api.add_role_to_user_and_project(
tenant_id=self.tenant_bar['id'],
user_id=self.user_two['id'],
role_id=self.role_other['id'])
user_ids = self.assignment_api.list_user_ids_for_project(
self.tenant_bar['id'])
self.assertIn(self.user_two['id'], user_ids)
self.assignment_api.remove_role_from_user_and_project(
tenant_id=self.tenant_bar['id'],
user_id=self.user_two['id'],
role_id=self.role_other['id'])
user_ids = self.assignment_api.list_user_ids_for_project(
self.tenant_bar['id'])
self.assertNotIn(self.user_two['id'], user_ids)
def test_remove_user_role_not_assigned(self):
# Expect failure if attempt to remove a role that was never assigned to
# the user.
self.assertRaises(exception.RoleNotFound,
self.assignment_api.
remove_role_from_user_and_project,
tenant_id=self.tenant_bar['id'],
user_id=self.user_two['id'],
role_id=self.role_other['id'])
def test_authenticate_bad_user(self):
self.assertRaises(AssertionError,
self.identity_api.authenticate,
context={},
user_id=uuid.uuid4().hex,
password=self.user_foo['password'])
def test_authenticate_bad_password(self):
self.assertRaises(AssertionError,
self.identity_api.authenticate,
context={},
user_id=self.user_foo['id'],
password=uuid.uuid4().hex)
def test_authenticate(self):
user_ref = self.identit
|
y_api.authenticate(
co
|
ntext={},
user_id=self.user_sna['id'],
password=self.user_sna['password'])
# NOTE(termie): the password field is left in user_sna to make
# it easier to authenticate in tests, but should
# not be returned by the api
self.user_sna.pop('password')
self.user_sna['enabled'] = True
self.assertDictEqual(user_ref, self.user_sna)
def test_authenticate_and_get_roles_no_metadata(self):
user = {
'name': 'NO_META',
'domain_id': DEFAULT_DOMAIN_ID,
'password': 'no_meta2',
}
new_user = self.identity_api.create_user(user)
self.assignment_api.add_user_to_project(self.tenant_baz['id'],
new_user['id'])
user_ref = self.identity_api.authenticate(
context={},
user_id=new_user['id'],
password=user['password'])
self.assertNotIn('password', user_ref)
# NOTE(termie): the password field is left in user_sna to make
# it easier to authenticate in tests, but should
# not be returned by the api
user.pop('password')
self.assertDictContainsSubset(user, user_ref)
role_list = self.assignment_api.get_roles_for_user_and_project(
new_user['id'], self.tenant_baz['id'])
self.assertEqual(1, len(role_list))
self.assertIn(CONF.member_role_id, role_list)
def test_authenticate_if_no_password_set(self):
id_ = uuid.uuid4().hex
user = {
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
}
self.identity_api.create_user(user)
self.assertRaises(AssertionError,
self.identity_api.authenticate,
context={},
user_id=id_,
password='password')
def test_create_unicode_user_name(self):
unicode_name = u'name \u540d\u5b57'
user = {'name': unicode_name,
'domain_id': DEFAULT_DOMAIN_ID,
'password': uuid.uuid4().hex}
ref = self.identity_api.create_user(user)
self.assertEqual(unicode_name, ref['name'])
def test_get_project(self):
tenant_ref = self.assignment_api.get_project(self.tenant_bar['id'])
self.assertDictEqual(tenant_ref, self.tenant_bar)
def test_get_project_404(self):
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
uuid.uuid4().hex)
def test_get_project_by_name(self):
tenant_ref = self.assignment_api.get_project_by_name(
self.tenant_bar['name'],
DEFAULT_DOMAIN_ID)
self.assertDictEqual(tenant_ref, self.tenant_bar)
def test_get_project_by_name_404(self):
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project_by_name,
uuid.uuid4().hex,
DEFAULT_DOMAIN_ID)
def test_list_user_ids_for_project(self):
user_ids = self.assignment_api.list_user_ids_for_project(
self.tenant_baz['id'])
self.assertEqual(2, len(user_ids))
self.assertIn(self.user_two['id'], user_ids)
self.assertIn(self.user_badguy['id'], user_ids)
def test_list_user_ids_for_project_no_duplicates(self):
# Create user
user_ref = {
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID,
'password': uuid.uuid4().hex,
'enabled': True}
user_ref = self.identity_api.create_user(user_ref)
# Create project
project_ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'domain_id': DEFAULT_DOMAIN_ID}
self.assignment_api.create_project(
project_ref['id'], project_ref)
# Create 2 roles and give user each role in project
for i in range(2):
role_ref = {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex}
self.assignment_api.create_role(role_ref['id'], role_ref)
self.assignment_api.add_role_to_user_and_project(
user_id=user_ref['id'],
tenant_id=project_ref['id'],
role_id=role_ref['id'])
# Get the list of user_ids in project
|
xiangke/pycopia
|
core/pycopia/OS/Linux/firewall.py
|
Python
|
lgpl-2.1
| 1,458
| 0.006173
|
#!/usr/bin/python2.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
# $Id$
#
# Copyright (C) 1999-2006 Keith Dart <keith@kdart.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (a
|
t your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
"""
Module for managing Linux firewall feature from Python, using sudo to run
ipfw program.
NOTE: this only works on Linux with firewall option enabled in the kernel.
"""
import sudo
#impo
|
rt socketlib
def port_forward(srcport, destport, rule=None):
"""Use firewall rule to forward a TCP port to a different port. Useful for
redirecting privileged ports to non-privileged ports. """
return NotImplemented
def add(rule, action):
return NotImplemented
def delete(rule):
return NotImplemented
def flush():
return NotImplemented
# XXX some day make this complete... :-)
class Firewall(object):
def read(self):
"""Read current rule set."""
return NotImplemented
class IPChains(object):
pass
if __name__ == "__main__":
pass
|
daverstephens/The-SOC-Shop
|
Threat_Intel/GScraper.py
|
Python
|
gpl-2.0
| 1,824
| 0.014254
|
#requirements: selenium wget python2.7 Geckodriver
import time
import sys
import wget
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
def googlescrape(str):
browser = webdriver.Chrome()
browser.get(url)
time.sleep(3) # sleep for 5 seconds so you can see the results
results = browser.find_elements_by_css_selector('div.g')
if len(results) == 0:
print "No results found"
browser.quit()
else:
for
|
x in range(0,len(results)):
link = results[x].find_element_by_tag_name("a")
|
href = link.get_attribute("href")
print href
wget.download(href)
browser.quit()
return
if len(sys.argv) == 3:
domain = sys.argv[1]
ftype = sys.argv[2]
url = "https://www.google.com/search?num=100&start=0&hl=em&meta=&q=site:"
url += domain
url += "+filetype:"
url += ftype
url += "&filter=0"
googlescrape(url)
elif len(sys.argv) == 2:
for i in range (0,3):
if i==0:
print "Checking for pdfs..."
ftype = "pdf"
elif i == 1:
print "Checking for docs..."
ftype = "doc"
elif i == 2:
print "Checking for xls..."
ftype = "xls"
domain = sys.argv[1]
url = "https://www.google.com/search?num=100&start=0&hl=em&meta=&q=site:"
url += domain
url += "+filetype:"
url += ftype
url += "&filter=0"
googlescrape(url)
else:
print "Error: Improper number of arguments. Usage: python search.py domain.com pdf"
sys.exit()
|
cdecker/pycoin
|
tests/test_network.py
|
Python
|
bsd-3-clause
| 8,572
| 0
|
from bitcoin import network
from bitcoin import messages
from io import BytesIO
from mock import patch
from gevent import socket
import mock
import os
import unittest
__author__ = 'cdecker'
BASENAME = os.path.dirname(__file__)
class TestNetworkClient(unittest.TestCase):
def test_parser(self):
"""Test parser selection.
Test to see whether we are selecting the correct parser.
"""
tx = BytesIO(open(
os.path.join(BASENAME, 'resources', 'tx-9c0f7b2.dmp'),
'r'
).read())
connection = network.Connection(None, ('host', 8333))
message = connection.parse_message('tx', tx)
self.assertEqual('tx', message.type)
self.assertIsInstance(message, messages.TxPacket)
def test_misc(self):
nc = network.NetworkClient()
self.assertRaises(NotImplementedError, nc.run_forever)
v = mock.Mock()
c = mock.Mock()
nc.handle_version(c, v)
self.assertEquals(v.version, c.version)
class TestConnection(unittest.TestCase):
def test_misc(self):
c = network.Connection(None, ('127.0.0.1', 8333))
self.assertRaises(NotImplementedError, c.disconnect)
def testConnectDisconnect(self):
nc = network.NetworkClient()
nc.connection_class = mock.MagicMock()
conn = nc.connect(('1.2.3.4', 8333))
nc.connect(('1.2.3.4', 8334))
nc.connect(('1.2.3.5', 8333))
self.assertRaises(ValueError, nc.connect, ('1.2.3.4', 8333))
self.assertTrue(('1.2.3.4', 8333) in nc.connections)
self.assertEqual(len(nc.connections), 3)
self.assertFalse(conn.disconnect.called)
nc.disconnect(('1.2.3.4', 8333))
self.assertTrue(conn.disconnect.called)
self.assertTrue(('1.2.3.4', 8333) not in nc.connections)
self.assertEqual(len(nc.connections), 2)
self.assertRaises(ValueError, nc.disconnect, ('1.2.3.4', 8333))
self.assertEqual(len(nc.connections), 2)
def test_roundtrip(self):
""" Do a full roundtrip of the network stack.
Use Connection to serialize a message and GeventConnection to
deserialize it again.
"""
network_client = mock.MagicMock()
connection = network.GeventConnection(network_client,
('127.0.0.1', 8333),
False)
connection.socket = mock.Mock()
p = messages.GetDataPacket()
|
connection.send(p.type, p)
wire = BytesIO(connection.socket.send.call_args[0][0])
def recv(n):
return wire.read(n)
connection.socket.recv = recv
message = connection.read_message()
self.assertTrue(isinstance(message, messages.GetDataPacket))
# This should produce a short read
wire = BytesIO(connection.socket.send.call_args[0][0][:-2])
self.ass
|
ertRaises(ValueError, connection.read_message)
# This will raise a non-matching magic error
wire = BytesIO("BEEF" + connection.socket.send.call_args[0][0][4:])
self.assertRaises(ValueError, connection.read_message)
class TestGeventNetworkClient(unittest.TestCase):
def test_init(self):
network.GeventNetworkClient()
@mock.patch('bitcoin.network.gevent')
def test_connect(self, mgevent):
nc = network.GeventNetworkClient()
conn = nc.connect(('10.0.0.1', 8333))
self.assertTrue(conn)
self.assertTrue(mgevent.spawn.called)
def test_run_forever(self):
nc = network.GeventNetworkClient()
nc.connection_group = mock.Mock()
nc.run_forever()
self.assertTrue(nc.connection_group.join.called)
@mock.patch('bitcoin.network.socket')
def test_listen(self, msocket):
nc = network.GeventNetworkClient()
group_size = len(nc.connection_group)
nc.listen()
self.assertTrue(nc.socket.bind.called)
self.assertTrue(nc.socket.listen.called)
self.assertEquals(len(nc.connection_group), group_size + 1)
@mock.patch('bitcoin.network.gevent.spawn_later')
@mock.patch('bitcoin.network.gevent.spawn')
def test_accept(self, mspawn, mspawn_later):
nc = network.GeventNetworkClient()
nc.socket = mock.Mock()
connection_handler = mock.Mock()
nc.register_handler(
network.ConnectionEstablishedEvent.type,
connection_handler
)
nc.socket.accept = mock.Mock(side_effect=[
(mock.Mock(), ('10.0.0.1', 8333)),
StopIteration()
])
self.assertRaises(StopIteration, nc.accept)
self.assertTrue(connection_handler.called)
@mock.patch('bitcoin.network.gevent.spawn_later')
@mock.patch('bitcoin.network.gevent.spawn')
def test_accept_idle_timeout(self, mspawn, mspawn_later):
nc = network.GeventNetworkClient()
nc.socket = mock.Mock()
connection_handler = mock.Mock()
nc.register_handler(
network.ConnectionLostEvent.type,
connection_handler
)
nc.socket.accept = mock.Mock(side_effect=[
(mock.Mock(), ('10.0.0.1', 8333)),
StopIteration()
])
def spawn_later(t, callable, *args, **kwargs):
callable(*args, **kwargs)
# Wire the idle timeout handler to be called immediately
mspawn_later.side_effect = spawn_later
self.assertRaises(StopIteration, nc.accept)
self.assertEquals(len(nc.connections), 0)
self.assertTrue(connection_handler.called)
class TestUtil(unittest.TestCase):
def test_bootstrap(self):
res = network.bootstrap()
self.assertTrue(res)
@patch('bitcoin.network.socket.getaddrinfo')
def test_bootstrap_fail(self, getaddrinfo):
""" socket.getaddrinfo may return None. """
def side_effect(a, b):
if a == network.DNS_SEEDS[0]:
raise socket.gaierror()
else:
return [(2, 2, 17, '', ('68.48.214.241', 0))]
getaddrinfo.side_effect = side_effect
res = network.bootstrap()
self.assertListEqual(res, [('68.48.214.241', 8333)])
class TestBehavior(unittest.TestCase):
def setUp(self):
self.network_client = mock.Mock()
self.network_client.bytes_sent = 0
self.connection = mock.Mock(incoming=False, host=('127.0.0.1', 8333))
def test_client_behavior_init(self):
network.ClientBehavior(self.network_client)
args = self.network_client.register_handler.call_args_list
types = [a[0][0] for a in args]
# Ensure we have at least handlers for the connection established event
# and an incoming version message
self.assertTrue(network.ConnectionEstablishedEvent.type in types)
self.assertTrue(messages.VersionPacket.type in types)
def test_client_behavior_on_connect(self):
b = network.ClientBehavior(self.network_client)
message = mock.Mock(type=network.ConnectionEstablishedEvent.type)
# We should not send anything on new incoming connections
self.connection.incoming = True
b.on_connect(self.connection, message)
self.assertFalse(self.connection.send.called)
# On outgoing connections we initiate the handshake
self.connection.incoming = False
b.on_connect(self.connection, message)
self.assertTrue(self.connection.send.called)
def test_client_behavior_send_verack(self):
b = network.ClientBehavior(self.network_client)
b.send_verack(self.connection)
self.connection.send.assert_called_with('verack', '')
self.assertEquals(self.connection.send.call_count, 1)
def test_client_behavior_on_version(self):
b = network.ClientBehavior(self.network_client)
b.send_version = mock.Mock()
b.send_verack = mock.Mock()
# This is an outgoing connection, so we should send just one verack
self.connection.incoming = False
b.on_version(self.connection, mock.Mock())
self.assertFalse(b.send_version.called)
self.
|
micgro42/ReceiptEval
|
receipteval/receipt_collection.py
|
Python
|
gpl-3.0
| 3,399
| 0
|
# -*- coding: utf-8 -*-
'''
Created on Nov 30, 2014
@author: Michael Große <mic.grosse@posteo.de>
'''
from collections import defaultdict
from receipteval.item_cat_dict import ItemCategoryDict
class ReceiptCollection(object):
'''
Collection of purchases with evaluation and output options.
'''
def __init__(self, purchases=[]):
self.categories = defaultdict(lambda: [0.0, set(), 0.0])
self.purchases = purchases
self.unsane_items = []
self.unsane_categories = []
self.category_dict = ItemCategoryDict()
self.total = 0.0
def collect_items(self):
'''
sort all positions in our stored receipts/purchases into their
respective categories
'''
for purchase in self.purchases:
self.unsane_items.extend(purchase.unsane_items)
for item in purchase.positions:
self.categories[item.category][1].add(item.name)
self.categories[item.category][0] += item.price
self.check_sanity()
self.calculate_total()
def totalize_categories(self):
self.initialize_super_categories()
for category in self.categories.keys():
catsum = 0
length = len(category)
for cat in self.categories.keys():
if (cat[0:length] == category):
catsum += self.categories[cat][0]
self.categories[category][2] = catsum
def initialize_super_categories(self):
missing_super_categories = []
for category in self.categories.keys():
if (category[:category.rfind(':')] not in self.categories.keys()):
missing_super_categories.append(category[:category.rfind(':')])
for missing in missing_super_categories:
while True:
if missing not in self.categories:
self.categories[missing][0] = 0
if missing.rfind(':') == -1:
break
missing = missing[:missing.rfind(':')]
def check_category(self, category, item):
'''
make list of conflicting categories
'''
stored_category = self.category_dict.get_category(item)
if category != stored_category:
self.unsane_categories.append((item, category, stored_category))
def check_sanity(self):
'''
make list of i
|
tems belonging to more than one category
'''
all_items = set()
for category in self.categories:
if category is '':
continue
for item in self.categories[category][1]:
if item in all_items:
self.unsane_items.append(item)
self.check_category(category, item)
|
all_items.add(item)
def calculate_total(self):
'''
calculate the grand total across all categories
'''
self.total = 0.0
for category in self.categories:
self.total += self.categories[category][0]
def get_ledger(self, date='1900-01-01'):
'''
create output in the format of the ledger application
'''
ledger_output = ""
for receipt in sorted(self.purchases, key=lambda t: t.date):
if receipt.date >= date:
ledger_output += receipt.get_ledger()
return ledger_output
|
raptorbird/RAVN
|
tests/runtests.py
|
Python
|
gpl-2.0
| 227,702
| 0.00004
|
#! /usr/bin/env python
sources = """
eNrsvWt3HEl2IDa7fqy3bK0kW1577WOfnKKpzGwWkiCnJY2wXd3q4ZAjanqaPHxoWgcNFRNVCSAH
icpiZhYBaDR7/CP83T/CX/3df8v3Fc+MzCqwu2fkc9zSEEBVxI2IGzfuK+698b//69+9/1Hy9k82
t9miqs+zxaJcl91i8f5fvf276XQawWfn5fo8+vLl8yiJN0292i6Lpo2jfL2K4mW9brdX9Df8ui6W
XbGKPpR5dFncXtfNqk0jADKZvP/Xb/8NjtB2q/f/2Zv/81/96Efl1aZuuqi9bSeTZZW3bfS6WyX1
6W8ARno0ieA/HP4qvyzaqKs3B1XxoaiizW13Ua+jK5hGBV/kH/Kyyk+rIsrhj3WUd11Tnm67YkYQ
8D8eCJfQXRRXEXQ+K5u2i/LlsmjbTI00oV9WxVmkMJC0RXUmU8H/8E9Az6pcwpfRHKeeyTzszudF
h7OQ/rNonV8VFpSuuTV/4H9XAAqGpFlCJ2quGxQ3y2LTRc/p26dNUzdu5yYv2yL6Uq2aWiRTwDQg
+gi2ZFutonXdCRKi++00uh+5QzRFt20Ao5MJ9IG54Dakk/f/+ds/wg1b1qsiw3/e/xdv/u/f6G3b
3E6sDTxr6quoXLcb2Ds11JMXi7//8tWXr37xeia///LpP/z6xaufv55MTrdlBTuyaIpNAyPij8kE
/63KU/gbxpUW2QLQxQCTGBvEsyiWhnE6mZRntAsfgADLeg37dlYfH55En8+jnzCeaGZdky+L03x5
qeZ2VjdXebdg5GLHel3dToqqLaxeevWLze3jPUEIJT+Bbn1Svm7yzaZoorypt3B2XjIl4xARt22J
DoNkOIOdvsamFiXB4nFrL/IW6S2RBrNouqwXZ2VV4DZPU59eqBEjmVYH5CofKgjpMK3SEVCwcee4
R2aNGGoPx60q18W69ruYLw6iR/2e/VGcEeRwuNQfOh9vbjfqaCDGchvpR9H9Bg6FRl+augcePjdT
sM958V7vTQ2cpbEwLUfK9J9zE/zDBrEudoHA6WIDDYK7/w3wYSCl7lYD2+Tdhc+wkOgETk4NZMnR
pi7XzBHrqK23zbJgjCQwXAFsModT3GkwV+X5RUczoX7YCTntstvmVXULu1C2BAwpIM00DeN/GyY0
HDur6mVeJQonNskYjN8Dfn97WkSreh13SH4wmbKNlhfF8hKG8El/k9E3iUfk96JvvvlGICGMi7xZ
wbmryktcXBFdF2WzQsFWLr1+5ZoatB0ItxzbAD86Rgpd5jBStt2s8o5/P4E5Fu0XTn9cbWh9/qZu
hjbxbFtVvB/jWylH9zVvnWwqcCSaPAJRu4oziOoz+pzo14Knf3e4neJvDMC0AaCziKQefQGHer2y
popL7okU7GTI/TsvzDq0catWSGw3tKp7+hSahgIKyHuTwxoBMQ5S1PY4s7CWp5eCIr45b+Xofsib
+bMchMfQsrrtBrbhuoQDiOuArqAywUFC2mhDy5s4ZAXEHsMYcQQnoS266E2zBSBAKGoE7M2wZKuh
dclK0XrlgBKtTE+hja4vClhxU7Tw1wAeLwAKE9UFUMZyyzsCOKBTj4iYWOLFOgP6Y2gDqgismBgp
Hg31iX2iYdbuOdbdHuh+Z1V+3kZ/bmkXd+uhdRBvz6UxTIEQeXykIJ0omf6sgS96Qv3XrkzPlVQ/
w9bRRV2tiDMuiPm1pDOfLc6r+hT+IhjAca4vyuUFsFHcBVRjgN8BfwXeVXzIqy0wnFU2rJ/OeChf
TdXilr7NYAJ9McvCWc3GamvPz2ooa7Bg0gchcUkt3C88tYNUJAWItY4BpgjMuCuQWEOsQ3/JgoLR
Dr/AEbeJGPVENYnMcFls+XW9LjydIcgGptM0KN89kKhPuTOWvbDYB+6rbB5rbJ98AoTXektTu49G
1qqIlWxi1DoTRu6ABlkD/IOU0byK8tWqlF9pmzRPaCeBxbYEGuhvW3WKicj4ACMsNQw9OPQBCNnc
JmmvnQjPhFbqY5IwwrhwqXKm+9v4uymWiz0QCM0+Bnn/cQx5PxwqLKuHFziOj8hCCFpESo+0GVRP
EsVtfgbYAD1vfdAUyy2YTR9gDDgCB0ilKZynBhkWGWbI5WORt8F1m4NS1hlCpnnIDMzsyhasuG0x
NEGBYku+j5CxFSihRLkoa9uIzGjsVm1hVbgS0FWN2NtXwJbrZbVdFT2hqgSpL3z2FaowbZga0Mvx
iaEOnGRzjqRqGIvCAgzuKbk928zAzVAmrVdJAl1nLkkew0cnloljmVG/LG4DBhRRJso/1gVYHQfx
VC+Benih2xZJ5mV7u6x7YpXmo0ToG2VEP13D7PsGch4hJMBwgd8jInLLdtcykNwGi7a7rVCgIP+e
8DI2DToA1GcjhjTB73l21BespdCvA0JVfZ11p7ZgNSKr6KxJyrhXrvUOCAExR5/iSpMpaVdTMN+r
en0+Tf3J2Wu+0qZowIYgPcUTlZ5Ie6bbmFXjWliVGILcFBUudgC2jaEDIToS7ywgjalvbwytajEA
cfqZSzDR/fbo/upzNNZ98GhgzuwpPHj0cfrEDgtk2zSoaxitwz7UolPM+4vX2kEPaXvpDPtb+2Tk
g31ORqxl2Ae4dgiFDvdlhjG22wE9UAtCNeVEQ5rRsVT/TqUlsGxQnIumym9JU27IaWWLtnLdFQ0w
09B+vTLfsnzPywrBmA1CZq10nBwgdtCiWEXIKNCBZ2s3eChPazBvrtFCxOnT9y3pBvAX9hBd3Ncr
Ne8JKpSGMDoW05meX5qhxN0kLke+sRTlhYMBgjSz0D9Dq2xbrRa49DlKrtSXbeT/Bc6KXg1QZG9m
OI80IDx8X5nBLWOwQF/y+kB0BHKbRQDOEycuQubRTZB2VAOH5DSXCPsK7qHz/u/YwGJ133Igis10
8CgC6ZfjMbUcA8rHnd8kI5xpFh26Rr41jRkw7I48P3Pc4LAaosnPHL3MM6avC5CXrFCgWCVS1KDx
6OJuAVsE1RDvNLquKSyt8x55EaCH9jsTJUeIy852kLkebGXtsGvH5jZNvj4vFjDOR3GxUnl1Rg0p
LaHFAwGwYcA124POlwBPowIgIir6UBlCkH15hI8tB8GwDFbTUMMiK06gH7MpSwh36DCSYQOUOuIz
l0Fm0WIGig3esAQ3wOb7M0HrrDfjff6TAefys3eZ9Pp23eU3AV2PZ2cL8geWu8Bcj9wRxYTYY2h5
YnY+LAiPCc1HMI8TPoeaGG1xwh86BsZFuVoV6xHfIqn05ZkjxcVHgxeHqNyDOqKVTYBXLBYeMbd1
9UGc5gjOtSGuaqAHdjAS20TbEQ56UPvvkciwUD2Oe7OKTyZ7ae5DBkJvJDEtx4fay1AQ6GSo2Wpe
2wW0vN78zta2iMQD3SNd4GgFDp0FJkDd4y+++MIYq3J/5PMKxyffmwaS/oCsrnxhbTByWufN6jnu
fLPddIFLKK9PcMwpzL6nqE2j6Bm68e83oP2itLjffruO6F9Uhc/WnuLLN84zgmkdkvWwWeDjR5yh
Gk2CRn0GGb6jzUlzX51DcKjNefZfgta6ZfjpL/RFaLFe5pt2W6H/C1W4+uysaKKL8vwCL3IwEsAY
UnSPz6dSgUHGWhbW5T7+fCrGnWtVDJmJ3anHSfDbMq/KfypYup6XH9DKF3XEW0Hmux/VLW13Ooti
MLXWxU0Xe0oYmW8JsKeAcnZ9gTSANvcou8X/bsuiWvGmsqGNEIMtEdwc/81kRh5Rtl3mO5thAZa+
15cJoU7QxdDhctvJx3jC50w/TLvyh6WRySdwZNANozsMOYEMAShtly/dkRTVjQ8pjrqhy8RPb5HI
P5DPPl/fAvlenZZrsgKwK1uZIhrJlW/rjiBaXH5EUSYsZPCelTSIjpS8g9PiQKvUVmhBiwZK0VwB
xJU7M5p1XlX1dYsYVOEsMohaWxADoNNk7sTqRsIXOvb15S1aOUlTXNUfWH2FKW/XJMcKvts9LbuW
b85WRV454OheC++ISPVVzmOlnz7Uy0vDvlOYzI3yebmkJDcGNxZr6n1PBu88GlQBk4RaiHoawWCm
15w2NO1dkuF/iUVydm874EJBopCUqqvjFFr0zxl2UU0zamgDTwfGFyqzhr7R/qe50OBAV9sqcvqH
rR6EZ/2ZpoNy3fDvG+M3Ct6KePFQJQhQzQ1AEbSGYDdouwXJkmj4LNHSzO6M3WyGalm0pK93YLcm
bVXC34epvwgZhQO4SBgBRPiwN3nyVmpeXFZwAhTnW8+r/Op0lUc3R7SnN5nWO9O7MCQ8LkuQozkQ
Pa6tjejg+Sce1Bk88tHZdr0kBkSnD9Vf4ydVHueZPdRzgOkeAxl6RjyL3QWOXkxeXDy1OB3VAGzp
HBbn0pe4lqyNIl2PIQBSeuwU0JjjnRLxL14n8TEXzHNCA9+DorOF0erAgl0ojKKfoqfmQ5GOXUsY
apV9VJpS6hr5yyZvL4iUR+wHIJmOnB88AYtps8uYNqcqcoMuQZUx1Ik/637ZIDcskRuSWZgcgDJ3
UCnDhv56lPo2Gys12OK4PAn5fti9a3A3eL5dl7d1nI8PHp3YLjm6OKpBQKyKmxGkEUlhGyUViAE9
dLYdSacpDExnbnVTnqP8BZpB18AGNdCmhL9Z7+QFmr58KdFYNGvjlt0K8+i3v3PRPTPXDcUaY1nx
as5blEQHrZxgDbrrRm2sKFYox+voum4uJRTA68pRRbSr0VXR5bCSc0DGFYpMuZtcFcsaxq4bijoS
B86m9ADxITkv1jTP1g0fJCq8yD+QVXvxkG6/ouL9FrTW7tYFhBFSOHHkJgCnC7hYmG56XvZylfS+
wfAYwaNIKXc08k+BlSBxXIRGs205XnvAmK4iiWKdXDJt0QkbYU5/fNJzcVZ9mj5zV9D7HuxrDFTo
hzHYxEEhd9gS9qgKa9sw+lmmrjjPMrnJXhDWh/03ePUhy6dFyiQWj+bwy927PZ6rmYbEt3eiXZLS
t4X2pppL95Avb6LWR5rX1QYskyQeXBHqF4PzjoNrjb/AOF9EZayNx6eKkT5fn9Xh4NqWgoGB41Ig
MAgJdS60BWl2+aKoNrTF6/xDeZ5rhdpj0IqBLMjy78BEQidDPGg0bjfaZGH/tm+vlHRFHfaX4hdz
bw0+pfs3DbQ2SxcCGMePTmbRl3S9COgiT0mAKCwPvUSs677xVXse+x7QkTmEKc4aoNXAx+HhWtQf
GRlMLapLSSyXpfEAcbNi52yRu/6jKPbuUwHDMjmYmPGvH3lCm2jP7YqKJnc7PjwZ7ql2xO3MLJl7
PxrpjaJFk6I3/qn0fzzSnya57gVh4ce2Uwz/Bo0YP7LcnX1oWttJ1LVUX7P1bqxNH3OXZVQyWUl6
h8tghwFE90HanYJqNOcb4Shx1gdWumhNZh6pE0G0xHhQdWqbW7p8H4swcRFCPmO+B3NVXlKEYwUw
Fq9x0SqnMevgHqUAMDeOWF39uUQ84xhWCiGl4GsLhngAPJ+BQ1GFBKTkalDtgUjaOnBgoInnpCYr
RVIqAPZpAcoZKIHnYTWcbkdQxIYyJcx2zayDYd2iKE6b/aYu12QNt71v8UfW+D5Z5LCC/951BfWw
GIvHOALsxRrqWNOU3ePEp1T82KK0prG4M9MboGL3LYVLH4afoC4N34UsHm8gxikPZ50zTPgAWiCz
xz9sci4Ue1DHRB8Q937Ztj3MOctcm8oKcLPdZwH10ouaH9AnZa2vYAnoFv8KFAnEUmJDRye4zN21
9axAOVjQNWshfOzxVvy2KuYcfuOqJflpS75HadidskU55xONFjoGaY2xD5SAKUWPeedQeSTdC13X
YWemekS/qygLDDz2DFG3Hy7oKMIF/TPt3z+v639Gd+YHS8/hVi7jkPUdoSFeKGd6lLCp1rslQgLt
iDA8twPo7y3RK2N2zkMb/Cl/hCwO4ZTn6xoMuLB1XAok1ChjBhYHL9KQOFx9ET/R0udr6pr049wo
bc6iZJ91JcPaB7Gy4a/5KI50V2OCTmF8a/0OaSr80j3GqYX7sytkWM/YwVusnrKik1j0bn5VRE//
hmleflpUr36xKF/90k8BuQIOjXK9UNNAhrPzbjDMWcgN4dpvPTbAPINnbPMPvMqeuVwt7bsu4ZSi
DWXPb7sukTv9i5mjzEfmqZIT/N3u2UfCdNjfoGOfJOBJxa08E18jW0y/YCdD3bTmNusee0D8izbO
mqzq68VV3lwWeKs0/Zx7IGzr06fDiQw7OLKmSOa6+zJhvsM1TGZujeM1kuhKlyGKpioca67H9bIj
ZHjUd+RXt4FMHiMg+DfvaxX2Qv4pR1ijB22t7swk9sGOiTorz7cYvA77yE05PYduJ714nX4+p7ro
DsQhkrbDwx08Sr//O+9gfII7ITxMQ7HKY4P3JzA0CetgHfon7VM4j4yFNDpgg0JHAKSeRkXH2AkX
68UYq1AWPvFudJne/zQcWxW+8DWhWrx9q0JoJQ2H11hT1tH6Oig/GC3sxfV7KQcLK0DfX66KjFSn
oZfZ1brR71YEPCt/8Asby+XaURWtiHhWRwMxR/C9HQPPAEVzV8tR8FMvfNHWL2HuX+JYLNpsRXLh
7LUKy0Q2viCP9/yAVVDtAppFO03MM8XEifsSy1xF243aZrKBsqCJZeFxR0ieCajyspsw8iTtxaTw
YqD5oT2A9c1n0eHRUK8H88jiIeYgbEB4LEAenZUIeUpYcObfN9147bKlDoAHivipybEZ/kRF1o5F
K5yRt31NRObAObIA9QlsbDocdjPcmg6FJSkfEAamIwtJ95uy6fHg0dF3mjS55JbKwxfmPSJq5sAl
1ZYRfo12quSWt/1mKmAna4zgkVI6j4w9kzHm/GNGJyKvJMi5x+IIpntGXS+PD/ZTA9E/lKFzpdY7
hf/7RP60pPC5RKA3heOXsXUtkNCWQFRjyLQGbO2s3VRll8TfrmMrj4zUNZkPE5SlZD2QyR0/OnKT
ixTVyNgjJ8waIETQcq8o2Atcvdjz8zAV3i2SJlbCwUxM3IBAsYzh/gL6MuWyuKVPURUnJMhljlic
Z/gblgz5Mezs30z7fbMWa4/0jyB5UQEQtuljQLl5JYgTG5+EPN/siwVrdrGQ3L92sYjDTm5nh6Z2
BxjoM/XX59O+i73P9wzdvqE4ehMNxCVj8Ib+tOCoHhBCp7e96CYDgfy2SaoDFWZyRwlwyZck5Voy
lKiAsQEoq7I935ak9BPX+VA0GH+1Ju0WHSdZ2HgG61GqyHjy3XMoOqP
|
htqNkks4piLG/OlThPZYX
bcRqvzcWgU3xjTPORZxFWDBo6FrO3dT7B48OkVqpTI+EWepJDqxlbHP1bQflbSnw335LDnMCPwRV
l9EY/lq8JRu625UfgjGcdJFfzZUpiwzuuilBVx/Utb7iwy+OXpcxaHNzYcI0RON0layQuq3lka2i
e5FLP7jS1N+ke8YO6OUSmFyWmRWNc4jZL7+hcNPhIR13wgElvAyPY/JdBhiosogZhJWD2HfJhRGn
LGptXgc6BnIfj3z3J+deKN+fNPdIadOIVcF1CLR9wnuthJalqTipJ9r4VNqIUvB7qr2lSbCgpn/H
FJye9AxouJRDULRtfk6B4BTmjUyAUe9WyRnm6QaCUuD4SpVVDH37B5xu6qJPfBN8WLDmGmUYGkeV
JwDLqgDRJox3wBVvkyI65GVuHqLo2Aug3obYe
|
3YUiuAwfa1dFz1CxvfiGHwvMTEU2q6ZpqGZBXpm
L1Y8UiFN+eg7KLif9rXZ3tzsaHuxJ3kqxqbX22bCq/vVwmxn1B7+mHW/glOanRbI3ysaq08c4qJ5
8Xo8SSQUrYtCeb1BcSzimcGnoZR0otf1ZhICOyA+XEPASUUxd9X6sPRu35UGqq9HPPQZGNYNAoce
2vtrB88FEuC0PeBcHL7arrvyqggFc/QgmtG9SD0zCwpntFo6Xmt13+OIAqDlJpc0FWfKnPHEfLEf
R2du372wRQm+SHgmImJSm10hl+pxBLo9sx3vQQVBqQa0zb1wydJoXBpcj0ULMuceekNxNAYx0x//
+MdwdFVcGwbzU0HMpEWuKzbHn0ebuqXyIum0B+0U9KbLEDMwoROyhJkZW
|
sqlalchemy/alembic
|
alembic/runtime/environment.py
|
Python
|
mit
| 38,523
| 0
|
from typing import Callable
from typing import ContextManager
from typing import Dict
from typing import List
from typing import Optional
from typing import overload
from typing import TextIO
from typing import Tuple
from typing import TYPE_CHECKING
from typing import Union
from .migration import MigrationContext
from .. import util
from ..operations import Operations
if TYPE_CHECKING:
from typing import Literal
from sqlalchemy.engine.base import Connection
from sqlalchemy.sql.schema import MetaData
from .migration import _ProxyTransaction
from ..config import Config
from ..script.base import ScriptDirectory
_RevNumber = Optional[Union[str, Tuple[str, ...]]]
class EnvironmentContext(util.ModuleClsProxy):
"""A configurational facade made available in an ``env.py`` script.
The :class:`.EnvironmentContext` acts as a *facade* to the more
nuts-and-bolts objects of :class:`.MigrationContext` as well as certain
aspects of :class:`.Config`,
within the context of the ``env.py`` script that is invoked by
most Alembic commands.
:class:`.EnvironmentContext` is normally instantiated
when a command in :mod:`alembic.command` is run. It then makes
itself available in the ``alembic.context`` module for the scope
of the command. From within an ``env.py`` script, the current
:class:`.EnvironmentContext` is available by importing this module.
:class:`.EnvironmentContext` also supports programmatic usage.
At this level, it acts as a Python context manager, that is, is
intended to be used using the
``with:`` statement. A typical use of :class:`.EnvironmentContext`::
from alembic.config import Config
from alembic.script import ScriptDirectory
config = Config()
config.set_main_option("script_location", "myapp:migrations")
script = ScriptDirectory.from_config(config)
def my_function(rev, context):
'''do something with revision "rev", which
will be the current database revision,
and "context", which is the MigrationContext
that the env.py will create'''
with EnvironmentContext(
config,
script,
fn = my_function,
as_sql = False,
starting_rev = 'base',
destination_rev = 'head',
tag = "sometag"
):
script.run_env()
The above script will invoke the ``env.py`` script
within the migration environment. If and when ``env.py``
calls :meth:`.MigrationContext.run_migrations`, the
``my_function()`` function above will be called
by the :class:`.MigrationContext`, given the context
itself as well as the current revision in the database.
.. note::
For most API usages other than full blown
invocation of migration scripts, the :class:`.MigrationContext`
and :class:`.ScriptDirectory` objects can be created and
used directly. The :class:`.EnvironmentContext` object
is *only* needed when you need to actually invoke the
``env.py`` module present in the migration environment.
"""
_migration_context: Optional["MigrationContext"] = None
config: "Config" = None # type:ignore[assignment]
"""An instance of :class:`.Config` representing the
configuration file contents as well as other variables
set programmatically within it."""
script: "ScriptDirectory" = None # type:ignore[assignment]
"""An instance of :class:`.ScriptDirectory` which provides
programmatic access to version files within the ``versions/``
directory.
"""
def __init__(
self, config: "Config", script: "ScriptDirectory", **kw
) -> None:
r"""Construct a new :class:`.EnvironmentContext`.
:param config: a :class:`.Config` instance.
:param script: a :class:`.ScriptDirectory` instance.
:param \**kw: keyword options that will be ultimately
passed along to the :class:`.MigrationContext` when
:meth:`.EnvironmentContext.configure` is called.
"""
self.config = config
self.script = script
self.context_opts = kw
def __enter__(self) -> "EnvironmentContext":
"""Establish a context which provides a
:class:`.EnvironmentContext` object to
env.py scripts.
The :class:`.EnvironmentContext` will
be made available as ``from alembic import context``.
"""
self._install_proxy()
return self
def __exit__(self, *arg, **kw) -> None:
self._remove_proxy()
def is_offline_mode(self) -> bool:
"""Return True if the current migrations environment
is running in "offline mode".
This is ``True`` or ``False`` depending
on the ``--sql`` flag passed.
This function does not require that the :class:`.MigrationContext`
has been configured.
"""
return self.context_opts.get("as_sql", False)
def is_transactional_ddl(self):
"""Return True if the context is configured to expect a
transactional DDL capable backend.
This defaults to the type of database in use, and
can be overridden by the ``transactional_ddl`` argument
to :meth:`.configure`
This function requires that a :class:`.MigrationContext`
has first been made available via :meth:`.configure`.
"""
return self.get_context().impl.transactional_ddl
def requires_connection(self) -> bool:
return not self.is_offline_mode()
def get_head_revision(self) -> _RevNumber:
"""Return the hex identifier of the 'head' script revision.
If the script directory has multiple heads, this
method raises a :class:`.CommandError`;
:meth:`.EnvironmentContext.get_head_revisions` should be preferred.
This function does not require that the :class:`.MigrationContext`
has been configured.
.. seealso:: :meth:`.EnvironmentContext.get_head_revisions`
"""
return self.script.as_revision_number("head")
def get_head_revisions(self) -> _RevNumber:
"""Return the hex identifier of the 'heads' script revision(s).
This returns a tuple containing the version number of all
heads in the script directory.
This function does not require that the :class:`.MigrationContext`
has been configured.
"""
return self.script.as_revision_number("heads")
def get_starting_revision_argument(self) -> _RevNumber:
"""Return the 'starting revision' argument,
if the revision was passed using ``start:end``.
|
This is only meaningful in "offline" mode.
Returns ``None`` if no value is available
or was configured.
This function does not require that the :class:`.MigrationContext`
has been configured.
"""
if self._migration_context is not None:
return self.script.as_revision_number(
self.get_context()._start_from_rev
)
elif "starting_rev" in self.context_opts:
return self.script.as_revision_number(
|
self.context_opts["starting_rev"]
)
else:
# this should raise only in the case that a command
# is being run where the "starting rev" is never applicable;
# this is to catch scripts which rely upon this in
# non-sql mode or similar
raise util.CommandError(
"No starting revision argument is available."
)
def get_revision_argument(self) -> _RevNumber:
"""Get the 'destination' revision argument.
This is typically the argument passed to the
``upgrade`` or ``downgrade`` command.
If it was specified as ``head``, the actual
version number is returned; if specified
as ``base``, ``None`` is returned.
This function does not require that the :class:`.MigrationContext`
has been configured.
"""
return self.script.as_revision_number(
|
yzl0083/orange
|
Orange/OrangeWidgets/Prototypes/OWPerformanceCurves.py
|
Python
|
gpl-3.0
| 7,520
| 0.007314
|
"""<name>Performance Curves</name>
<description>Model performance at different thresholds</description>
<icon>icons/PerformanceCurves.png</icon>
<priority>30</priority>
<contact>Janez Demsar (janez.demsar@fri.uni-lj.si)</contact>"""
from OWWidget import *
from OWGUI import *
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from OWDlgs import OWChooseImageSizeDlg
import sip
import orngTest
from OWGraph import *
class PerformanceGraph(OWGraph):
def __init__(self, master, *arg):
OWGraph.__init__(self, *arg)
self.master = master
self.mousePressed = False
def mousePressEvent(self, e):
self.mousePressed = True
canvasPos = self.canvas().mapFrom(self, e.pos())
self.master.thresholdChanged(self.invTransform(QwtPlot.xBottom, canvasPos.x()))
def mouseReleaseEvent(self, e):
self.mousePressed = False
def mouseMoveEvent(self, e):
if self.mousePressed:
self.mousePressEvent(e)
# Remove if this widget ever goes multilingual!
_ = lambda x:x
class OWPerformanceCurves(OWWidget):
settingsList = ["selectedScores", "threshold"]
def __init__(self, parent=None, signalManager=None, name="Performance Curves"):
OWWidget.__init__(self, parent, signalManager, name)
self.inputs=[("Evaluation Results", orngTest.ExperimentResults, self.setTestResults, Default)]
self.outputs=[]
self.selectedScores = []
self.classifiers = []
self.selectedClassifier = []
self.targetClass = -1
self.threshold = 0.5
self.thresholdCurve = None
self.statistics = ""
self.resize(980, 420)
self.loadSettings()
self.scores = [_('Classification accuracy'), _('Sensitivity (Recall)'), _('Specificity'),
_('Positive predictive value (Precision)'), _('Negative predictive value'),
_('F-measure')]
self.colors = [Qt.black, Qt.green, Qt.darkRed,
Qt.blue, Qt.red,
QColor(255, 128, 0)]
self.res = None
self.allScores = None
OWGUI.listBox(self.controlArea, self, 'selectedClassifier', 'classifiers', box = "Models", callback=self.classifierChanged, selectionMode = QListWidget.SingleSelection)
self.comTarget = OWGUI.comboBox(self.controlArea, self, 'targetClass', box="Target Class", callback=self.classifierChanged, valueType=0)
OWGUI.listBox(self.controlArea, self, 'selectedScores', 'scores', box = _("Performance scores"), callback=self.selectionChanged, selectionMode = QListWidget.MultiSelection)
sip.delete(self.mainArea.layout())
self.layout = QHBoxLayout(self.mainArea)
self.dottedGrayPen = QPen(QBrush(Qt.gray), 1, Qt.DotLine)
self.graph = graph = PerformanceGraph(self, self.mainArea)
graph.state = NOTHING
graph.setAxisScale(QwtPlot.xBottom, 0.0, 1.0, 0.0)
graph.setAxisScale(QwtPlot.yLeft, 0.0, 1.0, 0.0)
graph.useAntialiasing = True
graph.insertLegend(QwtLegend(), QwtPlot.BottomLegend)
graph.gridCurve.enableY(True)
graph.gridCurve.setMajPen(self.dottedGrayPen)
graph.gridCurve.attach(graph)
self.mainArea.layout().addWidget(graph)
b1 = OWGUI.widgetBox(self.mainArea, "Statistics")
OWGUI.label(b1, self, "%(statistics)s").setTextFormat(Qt.RichText)
OWGUI.rubber(b1)
self.controlArea.setFixedWidth(220)
def setTestResults(self, res):
self.res = res
if res and res.classifierNames:
self.classifiers = res.classifierNames
self.selectedClassifier = [0]
self.comTarget.clear()
self.comTarget.addItems(self.res.classValues)
self.targetClass=min(1, len(self.res.classValues))
self.classifierChanged()
else:
self.graph.clear()
self.thresholdCurve = None
self.allScores = None
def classifierChanged(self):
self.allScores = []
self.probs = []
classNo = self.selectedClassifier[0]
probsClasses = sorted((tex.probabilities[classNo][self.targetClass], self.targetClass==tex.actualClass) for tex in self.res.results)
self.all = all = len(probsClasses)
TP = self.P = P = float(sum(x[1] for x in probsClasses))
FP = self.N = N = all-P
TN = FN = 0.
prevprob = probsClasses[0][0]
for Nc, (prob, kls) in enumerate(probsClasses):
if kls:
TP -= 1
FN += 1
else:
FP -= 1
TN += 1
if prevprob != prob:
self.allScores.append(((TP+TN)/all, TP/(P or 1), TN/(N or 1), TP/(all-Nc), TN/Nc, 2*TP/(P+all-Nc), TP, TN, FP, FN, Nc))
self.probs.append(prevprob)
prevprob = prob
self.allScores.append(((TP+TN)/all, TP/(P or 1), TN/(N or 1), TP/(all-Nc), TN/Nc, 2*TP/(P+all-Nc), TP, TN, FP, FN, Nc))
self.probs.append(prevprob)
self.allScores = zip(*self.allScores)
self.selectionChanged()
def selectionChanged(self):
self.graph.clear()
self.thresholdCurve = None
if not self.allScores:
return
for c in self.selectedScores:
self.graph.addCurve(self.scores[c], self.colors[c], self.colors[c], 1, xData=self.probs, yData=self.allScores[c], style = QwtPlotCurve.Lines, symbol = QwtSymbol.NoSymbol, lineWidth=3, enableLegend=1)
self.thresholdChanged()
# self.graph.replot is called in thresholdChanged
def thresholdChanged(self, threshold=None):
if threshold is not None:
self.threshold = threshold
if self.thresholdCurve:
self.thresholdCurve.detach()
self.thresholdCurve = self.graph.addCurve("threshold", Qt.black, Qt.black, 1, xData=[self.threshold]*2, yData=[0,1], style=QwtPlotCurve.Lines, symbol = QwtSymbol.NoSymbol, lineWidth=1)
self.graph.replot()
if not self.allScores:
self.statistics = ""
return
ind = 0
while self.probs[ind] < self.threshold and ind+1 < len(self.probs):
ind += 1
alls = self.allScores
stat = "<b>Sample size: %i instances</b><br/> Positive: %i<br/> Negative: %i<br/><br/>" % (self.all, self.P, self.N)
stat += "<b>Current threshold: %.2f</b><br/><br/>" % self.threshold
stat += "<b>Positive predictions: %i</b><br/> True positive: %i<br/> False positive: %i<br/><br/>" % (self.all-alls[-1][ind], alls[-5][ind], alls[-3][ind])
stat += "<b>Negative predictions: %i</b><br/> True negative: %i<br/> False negative: %i<br/><br/>" % (alls[-1][i
|
nd], alls[-4][ind], alls[-3][ind])
if self.selectedScores:
stat += "<b>Performance</b><br/>"
stat += "<br/>".join("%s: %.2f" % (self.scores[i], alls[i][ind]) for i in self.selectedScores)
self.statistics =
|
stat
def sendReport(self):
if self.res:
self.reportSettings(_("Performance Curves"),
[(_("Model"), self.res.classifierNames[self.selectedClassifier[0]]),
(_("Target class"), self.res.classValues[self.targetClass])])
self.reportImage(self.graph.saveToFileDirect, QSize(790, 390))
self.reportSection("Performance")
self.reportRaw(self.statistics)
|
serge-sans-paille/pythran
|
pythran/tests/cases/monte_carlo.py
|
Python
|
bsd-3-clause
| 1,346
| 0.008915
|
# http://code.activestate.com/recipes/577263-numerical-integration-using-monte-carlo-method/
# Numerical Integration using Monte Carlo method
# FB - 201006137
#pythran export montecarlo_integration(float, float, int, float list, int)
#runas montecarlo_integration(1.,10.,100,[x/100. for x in range(100)],100)
#bench montecarlo_integration(1.,10.,650000,[x/100. for x in range(100)],100)
import math
def montecarlo_integration(xmin, xmax, numSteps,rand,randsize):
# define any function here!
def f(x):
return math.sin(x)
# find ymin-ymax
ymin = f(xmin)
ymax = ymin
for i in range(numSteps):
x = xmin + (xmax - xmin) * float(i) / numSteps
y = f(x)
if y < ymin: ymin = y
if y > ymax: ymax = y
# Monte Carlo
rectArea = (xmax - xmin) * (ymax - y
|
min)
numPoints = numSteps # bigger the better but slower!
ctr = 0
for j in range(numPoints):
x = xmin + (xmax - xmin) * rand[j%randsize]
y = ymin + (ymax - ymin) * rand[j%randsize]
if math.fabs(y) <= math.fabs(f(x)):
if f(x)
|
> 0 and y > 0 and y <= f(x):
ctr += 1 # area over x-axis is positive
if f(x) < 0 and y < 0 and y >= f(x):
ctr -= 1 # area under x-axis is negative
fnArea = rectArea * float(ctr) / numPoints
return fnArea
|
Telestream/telestream-cloud-python-sdk
|
telestream_cloud_qc_sdk/telestream_cloud_qc/models/active_format_test.py
|
Python
|
mit
| 4,684
| 0
|
# coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: cloudsupport@telestream.net
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_qc.configuration import Configuration
class ActiveFormatTest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'afd': 'int',
'reject_on_error': 'bool',
'checked': 'bool'
}
attribute_map = {
'afd': 'afd',
'reject_on_error': 'reject_on_error',
'checked': 'checked'
}
def __init__(self, afd=None,
|
reject_on_error=None, checked=None, local_vars_configuration=None): # noqa: E501
"""ActiveFormatTest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._afd = None
self._reject_
|
on_error = None
self._checked = None
self.discriminator = None
if afd is not None:
self.afd = afd
if reject_on_error is not None:
self.reject_on_error = reject_on_error
if checked is not None:
self.checked = checked
@property
def afd(self):
"""Gets the afd of this ActiveFormatTest. # noqa: E501
:return: The afd of this ActiveFormatTest. # noqa: E501
:rtype: int
"""
return self._afd
@afd.setter
def afd(self, afd):
"""Sets the afd of this ActiveFormatTest.
:param afd: The afd of this ActiveFormatTest. # noqa: E501
:type: int
"""
self._afd = afd
@property
def reject_on_error(self):
"""Gets the reject_on_error of this ActiveFormatTest. # noqa: E501
:return: The reject_on_error of this ActiveFormatTest. # noqa: E501
:rtype: bool
"""
return self._reject_on_error
@reject_on_error.setter
def reject_on_error(self, reject_on_error):
"""Sets the reject_on_error of this ActiveFormatTest.
:param reject_on_error: The reject_on_error of this ActiveFormatTest. # noqa: E501
:type: bool
"""
self._reject_on_error = reject_on_error
@property
def checked(self):
"""Gets the checked of this ActiveFormatTest. # noqa: E501
:return: The checked of this ActiveFormatTest. # noqa: E501
:rtype: bool
"""
return self._checked
@checked.setter
def checked(self, checked):
"""Sets the checked of this ActiveFormatTest.
:param checked: The checked of this ActiveFormatTest. # noqa: E501
:type: bool
"""
self._checked = checked
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ActiveFormatTest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ActiveFormatTest):
return True
return self.to_dict() != other.to_dict()
|
altsen/diandiyun-platform
|
common/lib/xmodule/xmodule/modulestore/django.py
|
Python
|
agpl-3.0
| 7,244
| 0.001242
|
"""
Module that provides a connection to the ModuleStore specified in the django settings.
Passes settings.MODULESTORE as kwargs to MongoModuleStore
"""
from __future__ import absolute_import
from importlib import import_module
import re
from django.conf import settings
from django.core.cache import get_cache, InvalidCacheBackendError
from django.dispatch import Signal
import django.utils
from xmodule.modulestore.loc_mapper_store import LocMapperStore
from xmodule.util.django import get_current_request_hostname
# We may not always h
|
ave the request_cache module available
try:
from request_cache.middleware import RequestCache
HAS_REQUEST_CACHE = True
except ImportError:
HAS_REQUEST_CACHE = False
_MODULESTORES = {}
FUNCTION_KEYS = ['render_template']
def load_function(path):
"""
Load a function by name.
path is a string of the form "path.to.module.function"
retu
|
rns the imported python object `function` from `path.to.module`
"""
module_path, _, name = path.rpartition('.')
return getattr(import_module(module_path), name)
def create_modulestore_instance(engine, doc_store_config, options, i18n_service=None):
"""
This will return a new instance of a modulestore given an engine and options
"""
class_ = load_function(engine)
_options = {}
_options.update(options)
for key in FUNCTION_KEYS:
if key in _options and isinstance(_options[key], basestring):
_options[key] = load_function(_options[key])
if HAS_REQUEST_CACHE:
request_cache = RequestCache.get_request_cache()
else:
request_cache = None
try:
metadata_inheritance_cache = get_cache('mongo_metadata_inheritance')
except InvalidCacheBackendError:
metadata_inheritance_cache = get_cache('default')
return class_(
metadata_inheritance_cache_subsystem=metadata_inheritance_cache,
request_cache=request_cache,
modulestore_update_signal=Signal(providing_args=['modulestore', 'course_id', 'location']),
xblock_mixins=getattr(settings, 'XBLOCK_MIXINS', ()),
xblock_select=getattr(settings, 'XBLOCK_SELECT_FUNCTION', None),
doc_store_config=doc_store_config,
i18n_service=i18n_service or ModuleI18nService(),
**_options
)
def get_default_store_name_for_current_request():
"""
This method will return the appropriate default store mapping for the current Django request,
else 'default' which is the system default
"""
store_name = 'default'
# see what request we are currently processing - if any at all - and get hostname for the request
hostname = get_current_request_hostname()
# get mapping information which is defined in configurations
mappings = getattr(settings, 'HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS', None)
# compare hostname against the regex expressions set of mappings
# which will tell us which store name to use
if hostname and mappings:
for key in mappings.keys():
if re.match(key, hostname):
store_name = mappings[key]
return store_name
return store_name
def modulestore(name=None):
"""
This returns an instance of a modulestore of given name. This will wither return an existing
modulestore or create a new one
"""
if not name:
# If caller did not specify name then we should
# determine what should be the default
name = get_default_store_name_for_current_request()
if name not in _MODULESTORES:
_MODULESTORES[name] = create_modulestore_instance(
settings.MODULESTORE[name]['ENGINE'],
settings.MODULESTORE[name].get('DOC_STORE_CONFIG', {}),
settings.MODULESTORE[name].get('OPTIONS', {})
)
# inject loc_mapper into newly created modulestore if it needs it
if name == 'split' and _loc_singleton is not None:
_MODULESTORES['split'].loc_mapper = _loc_singleton
return _MODULESTORES[name]
_loc_singleton = None
def loc_mapper():
"""
Get the loc mapper which bidirectionally maps Locations to Locators. Used like modulestore() as
a singleton accessor.
"""
# pylint: disable=W0603
global _loc_singleton
# pylint: disable=W0212
if _loc_singleton is None:
try:
loc_cache = get_cache('loc_cache')
except InvalidCacheBackendError:
loc_cache = get_cache('default')
# instantiate
_loc_singleton = LocMapperStore(loc_cache, **settings.DOC_STORE_CONFIG)
# inject into split mongo modulestore
if 'split' in _MODULESTORES:
_MODULESTORES['split'].loc_mapper = _loc_singleton
return _loc_singleton
def clear_existing_modulestores():
"""
Clear the existing modulestore instances, causing
them to be re-created when accessed again.
This is useful for flushing state between unit tests.
"""
_MODULESTORES.clear()
# pylint: disable=W0603
global _loc_singleton
cache = getattr(_loc_singleton, "cache", None)
if cache:
cache.clear()
_loc_singleton = None
def editable_modulestore(name='default'):
"""
Retrieve a modulestore that we can modify.
This is useful for tests that need to insert test
data into the modulestore.
Currently, only Mongo-backed modulestores can be modified.
Returns `None` if no editable modulestore is available.
"""
# Try to retrieve the ModuleStore
# Depending on the settings, this may or may not
# be editable.
store = modulestore(name)
# If this is a `MixedModuleStore`, then we will need
# to retrieve the actual Mongo instance.
# We assume that the default is Mongo.
if hasattr(store, 'modulestores'):
store = store.modulestores['default']
# At this point, we either have the ability to create
# items in the store, or we do not.
if hasattr(store, 'create_xmodule'):
return store
else:
return None
class ModuleI18nService(object):
"""
Implement the XBlock runtime "i18n" service.
Mostly a pass-through to Django's translation module.
django.utils.translation implements the gettext.Translations interface (it
has ugettext, ungettext, etc), so we can use it directly as the runtime
i18n service.
"""
def __getattr__(self, name):
return getattr(django.utils.translation, name)
def strftime(self, *args, **kwargs):
"""
A locale-aware implementation of strftime.
"""
# This is the wrong place to import this function. I'm putting it here
# because the xmodule test suite can't import this module, because
# Django is not available in that suite. This function isn't called in
# that suite, so this hides the import so the test won't fail.
#
# As I said, this is wrong. But Cale says this code will soon be
# refactored to a place that will be right, and the code can be made
# right there. If you are reading this comment after April 1, 2014,
# then Cale was a liar.
from util.date_utils import strftime_localized
return strftime_localized(*args, **kwargs)
|
henrywm/URI
|
src/beginner/1042.py
|
Python
|
apache-2.0
| 431
| 0.027842
|
a,b,c
|
= input().split(" ")
numbers = [int(a),int(b),int(c)]
cnumbers
|
= list(numbers)
for j in range(0,len(numbers)):
for i in range(0,len(numbers)-1):
if numbers[i] > numbers[i+1]:
aux = numbers[i+1]
numbers[i+1] = numbers[i]
numbers[i] = aux
for a in range(len(numbers)):
print(numbers[a])
print("")
for b in range(len(cnumbers)):
print(cnumbers[b])
|
perkinslr/schemepy
|
scheme/syntax_rules.py
|
Python
|
lgpl-2.1
| 1,919
| 0.007817
|
from __future__ import unicode_literals
from zope.interface import implements, classProvides
from scheme.macro import Macro
from scheme.symbol import Symbol
from scheme.environment import Environment, SyntaxEnvironment
from scheme.syntax import SyntaxSymbol
from scheme.PatternMatcher import PatternMatcher
# from scheme.utils import syntax_copy_with_replacement
from scheme.utils import transformCode
import scheme.debug
class syntax_rules(object):
implements(Macro)
classProvides(Macro)
def __init__(self, processer, ast):
literals = ast[0]
patterns = ast[1:]
self.name = patterns[0][0][0]
self.env = processer.cenv.parent
self.literals = literals
self.patterns = patterns
def __call__(self, processer, params):
params=params[0].toObject(processer.cenv)
for pattern in self.patterns:
template = pattern[1:]
pattern = pattern[0]
bindings = PatternMatcher(pattern, self.literals).match(params)
if bindings is None:
continue
env = Environment(self.env)
l = {}
l.update(globals())
l.update(locals())
#import code
#code.InteractiveConsole(locals=l).interact()
transformedCode = transformCode(template, bindings, env, self)
#osp = processer.stackPointer
#processer.popStack(transformedCode)
##processer.ast = transformedCode
#processer.stackPointer = osp
if scheme.debug.getDebug('syntax'):
print 56, transformedCode
if len(transformedCode)==1:
return transformedCode[0]
return transformedCode
raise SyntaxError("syntax-rules no case matching %r for %s" % (params, self.
|
name))
import scheme.Globals
scheme.Gl
|
obals.Globals['syntax-rules'] = syntax_rules
|
blancltd/blanc-basic-pages
|
blanc_basic_pages/models.py
|
Python
|
bsd-3-clause
| 2,112
| 0.002367
|
from __future__ import unicode_literals
from blanc_basic_assets.fields import AssetForeignKey
from django.core.exceptions import ValidationError
from django.core.urlresolvers import get_script_prefix
from django.core.validators import RegexValidator
from django.db import m
|
odels
from django.utils.encoding import iri_to_uri, python_2_unicode_compatible
from mptt.managers import TreeManager
from mptt.models import MPTTModel, TreeForeignKey
# Validator from flatpages
url_valid
|
ator = RegexValidator(regex=r'^[-\w/\.~]+$',
message="This value must contain only letters, numbers, dots, "
"underscores, dashes, slashes or tildes.")
# Another validator to ensure the URL starts and ends with a slash
def slash_validator(url):
if not url.startswith('/'):
raise ValidationError("This value must start with a leading slash.")
elif not url.endswith('/'):
raise ValidationError("This value must end with a trailing slash.")
@python_2_unicode_compatible
class Page(MPTTModel):
url = models.CharField(
'URL', max_length=100, unique=True,
help_text="Example: '/about/contact/'. Make sure to have leading and trailing slashes.",
validators=[url_validator, slash_validator])
title = models.CharField(max_length=200)
parent = TreeForeignKey('self', null=True, blank=True, related_name='children')
show_in_navigation = models.BooleanField(default=True, db_index=True)
hero_image = AssetForeignKey('assets.Image', blank=True, null=True, on_delete=models.SET_NULL)
content = models.TextField(blank=True)
template_name = models.CharField(max_length=100, blank=True)
published = models.BooleanField(default=True, db_index=True)
login_required = models.BooleanField(default=False, db_index=True)
objects = TreeManager()
def __str__(self):
return '%s -- %s' % (self.url, self.title)
def get_absolute_url(self):
# Handle script prefix manually because we bypass reverse()
return iri_to_uri(get_script_prefix().rstrip('/') + self.url)
|
hamsterbacke23/wagtail
|
wagtail/wagtailsnippets/edit_handlers.py
|
Python
|
bsd-3-clause
| 2,653
| 0.001508
|
from __future__ import absolute_import, unicode_literals
import warnings
from django.core.exceptions import ImproperlyConfigured
from django.template.loader import render_to_string
from django.utils.s
|
afestring import mark_safe
from wagtail.utils.deprecation import RemovedInWagtail16Warning
from wagtail.wagtailadmin.edit_handlers import BaseChooserPanel
from wagtail.wagtailcore.utils import resolve_model_string
from .widgets import AdminSnippetChooser
class BaseSnippetChooserPanel(BaseChooserPanel):
|
object_type_name = 'item'
_target_model = None
@classmethod
def widget_overrides(cls):
return {cls.field_name: AdminSnippetChooser(model=cls.target_model())}
@classmethod
def target_model(cls):
if cls._target_model is None:
if cls.snippet_type:
# RemovedInWagtail16Warning: The target_model is automatically
# detected from the relation, so snippet_type is deprecated.
try:
cls._target_model = resolve_model_string(cls.snippet_type)
except LookupError:
raise ImproperlyConfigured(
"{0}.snippet_type must be of the form 'app_label.model_name', given {1!r}"
.format(cls.__name__, cls.snippet_type)
)
except ValueError:
raise ImproperlyConfigured(
"{0}.snippet_type refers to model {1!r} that has not been installed"
.format(cls.__name__, cls.snippet_type)
)
else:
cls._target_model = cls.model._meta.get_field(cls.field_name).rel.model
return cls._target_model
def render_as_field(self):
instance_obj = self.get_chosen_item()
return mark_safe(render_to_string(self.field_template, {
'field': self.bound_field,
self.object_type_name: instance_obj,
}))
class SnippetChooserPanel(object):
def __init__(self, field_name, snippet_type=None):
self.field_name = field_name
if snippet_type is not None:
warnings.warn(
'The snippet_type argument to SnippetChooserPanel is deprecated. '
'The related model is now automatically detected.',
RemovedInWagtail16Warning)
self.snippet_type = snippet_type
def bind_to_model(self, model):
return type(str('_SnippetChooserPanel'), (BaseSnippetChooserPanel,), {
'model': model,
'field_name': self.field_name,
'snippet_type': self.snippet_type,
})
|
VaysseB/id_generator
|
src/parser.py
|
Python
|
gpl-3.0
| 15,217
| 0.00092
|
import string
import ast
from state_machine import PSM, Source
class SpecialPattern:
individual_chars = ('t', 'n', 'v', 'f', 'r', '0')
range_chars = ('d', 'D', 'w', 'W', 's', 'S')
special_chars = ('^', '$', '[', ']', '(', ')', '{', '}', '\\', '.', '*',
'?', '+', '|', '.')
restrict_special_chars = ('\\', '[', ']')
posix_classes = ("alnum", "alpha", "blank", "cntrl", "digit", "graph",
"lower", "print", "punct", "space", "upper", "xdigit",
"d", "w", "s")
min_len_posix_class = 1
#-------------------------------------
# Group
class WrappedGroup:
def __init__(self):
self.group = ast.Group()
self.is_alt = False
def add(self, other):
if self.is_alt:
last_alt = self.alt.parts[-1] + (other,)
self.alt.parts = self.alt.parts[:-1] + (last_alt,)
else:
self.group.seq = self.group.seq + (other,)
@property
def alt(self) -> ast.Alternative:
assert self.is_alt
return self.group.seq[0]
def collapse_alt(self):
if self.is_alt:
self.alt.parts = self.alt.parts + ((),)
else:
self.is_alt = True
first_alt_elems = self.group.seq
self.group.seq = (ast.Alternative(),)
self.alt.parts = (first_alt_elems,())
class OpeningOfGroup:
def __init__(self, parent: None, initial: bool=False):
self.is_initial = initial
self.parent = parent # OpeningOfGroup or ContentOfGroup
self.g = WrappedGroup()
self.content_of_initial = None
# forward of function
self.add = self.g.add
# if this group is the initial, their is no parent but we must refer
# to itself as the returning state
# but if it is a nested group, it must be added into its global group
if self.is_initial:
self.content_of_initial = ContentOfGroup(self, initial)
else:
self.parent.add(self.g.group)
def next(self, psm: PSM):
if not self.is_initial and psm.char == "?":
return FirstOptionOfGroup(self)
elif psm.char == ")":
if self.is_initial:
psm.error = 'unexpected ")"'
else:
return self.parent
elif psm.char == "(":
return OpeningOfGroup(self)
elif self.is_initial:
return self.content_of_initial.next(psm)
else:
t = ContentOfGroup(self)
return t.next(psm)
class FirstOptionOfGroup:
def __init__(self, parent: OpeningOfGroup):
self.parent = parent
def next(self, psm: PSM):
if psm.char == ":":
self.parent.g.group.ignored = True
return ContentOfGroup(self.parent)
elif psm.char == "!":
self.parent.g.group.lookhead = ast.Group.NegativeLookhead
return ContentOfGroup(self.parent)
elif psm.char == "=":
self.parent.g.group.lookhead = ast.Group.PositiveLookhead
return ContentOfGroup(self.parent)
elif psm.char == "<":
self.parent.g.group.name = ""
return NameOfGroup(self.parent)
else:
psm.error = 'expected ":", "!", "<" or "="'
class NameOfGroup:
def __init__(self, parent: OpeningOfGroup):
self.parent = parent
def next(self, psm: PSM):
if psm.char.isalpha() or psm.char == "_":
self.parent.g.group.name += psm.char
return self
elif psm.char == ">":
return self.parent
else:
psm.error = 'expected a letter, "_" or ">"'
class ContentOfGroup:
NotQuantified = 0
Quantified = 1
UngreedyQuantified = 2
def __init__(self, parent: OpeningOfGroup, initial: bool=False):
self.parent = parent
self.is_initial = initial
self.limited_prev = parent if initial else self
self.quantified = ContentOfGroup.NotQuantified
# forward of function
self.add = self.parent.add
def next(self, psm: PSM):
quantified = self.quantified
self.quantified = ContentOfGroup.NotQuantified
if psm.char == ")":
if self.is_initial:
psm.error = "unbalanced parenthesis"
else:
return self.parent.parent
elif psm.char == "(":
return OpeningOfGroup(self.limited_prev)
elif psm.char == "^":
self.add(ast.MatchBegin())
return self.limited_prev
elif psm.char == "$":
self.add(ast.MatchEnd())
return self.limited_prev
elif psm.char == ".":
t = ast.PatternChar()
t.pattern = psm.char
self.add(t)
return self.limited_prev
elif psm.char == "\\":
return EscapedChar(self.limited_prev,
as_single_chars=SpecialPattern.special_chars)
elif psm.char == "[":
return CharClass(self.limited_prev)
elif psm.char == "|":
self.parent.g.collapse_alt()
return self.limited_prev
# >>> Quantifiers
elif psm.char == "?" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
last = self._last_or_fail(psm)
if last:
last.quantifier = ast.NoneOrOnce()
return self.limited_prev
elif psm.char == "*" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
last = self._last_or_fail(psm)
if last:
last.quantifier = ast.NoneOrMore()
return self.limited_prev
elif psm.char == "+" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
last = self._last_or_fail(psm)
if last:
last.quantifier = ast.OneOrMore()
return self.limited_prev
elif psm.char == "{" and quantified == ContentOfGroup.NotQuantified:
self.quantified = ContentOfGroup.Quantified
t = MinimumOfRepetition(self.limited_prev)
last = self._last_or_fail(psm)
if last:
last.quantifier = t.between
return t
elif psm.char == "?" and quantified == ContentOfGroup.Quantified:
self.quantified = ContentOfGroup.UngreedyQuantified
last = self._last_or_fail(psm)
if last:
last.quantifier.greedy = False
return self.limited_prev
elif quantified == ContentOfGroup.Quantified:
psm.error = "unexpected quantifier"
elif quantified == ContentOfGrou
|
p.UngreedyQuantified:
psm.error =
|
"quantifier repeated"
# <<< Quantifier
else:
t = ast.SingleChar()
t.char = psm.char
self.add(t)
return self.limited_prev
def _last_or_fail(self, psm: PSM):
if self.parent.g.group.seq:
return self.parent.g.group.seq[-1]
else:
psm.error = "nothing to repeat"
class MinimumOfRepetition:
def __init__(self, parent: ContentOfGroup):
self.parent = parent
self.between = ast.Between()
self.min = []
def next(self, psm: PSM):
if psm.char.isdigit():
self.min.append(psm.char)
return self
elif psm.char == ",":
self._interpret()
return MaximumOfRepetition(self)
elif psm.char == "}":
self._interpret()
return self.parent
else:
psm.error = 'expected digit, "," or "}"'
def _interpret(self):
if not self.min:
return
try:
count = int("".join(self.min))
except ValueError:
assert False, "internal error: cannot convert to number minimum of repetition"
self.between.min = count
class MaximumOfRepetition:
def __init__(self, repeat: MinimumOfRepetition):
self.repeat = repeat
sel
|
radiasoft/sirepo
|
sirepo/template/flash_views.py
|
Python
|
apache-2.0
| 27,201
| 0.00114
|
# -*- coding: utf-8 -*-
u"""Flash Config parser.
:copyright: Copyright (c) 2021 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from pykern import pkio
from pykern.pkcollections import PKDict
from pykern.pkdebug import pkdc, pkdp, pkdlog
import inspect
import re
def _fields(templates, values):
# template: [field template, label template]
# values: values to insert into the field/label templates
return {
t[0].format(v): t[1].format(v.upper()) for v in values for t in templates
}
class SpecializedViews:
# POSIT: FLASH field names are unique so flat list is ok
_LABELS = PKDict(
LimitedSlopeBeta='Limited Slope Beta',
RiemannSolver='Riemann Solver',
UnitSystem='System of Units',
allowDtSTSDominate='allowDtSTSDominate',
cfl='Courant Factor',
charLimiting='Characteristic Limiting',
cvisc='Artificial Viscosity Constant',
diff_eleFlCoef='Flux Limiter Coefficient',
diff_eleXlBoundaryType='X Left Boundary',
diff_eleXrBoundaryType='X Right Boundary',
diff_eleYlBoundaryType='Y Left Boundary',
diff_eleYrBoundaryType='Y Right Boundary',
diff_eleZlBoundaryType='Z Left Boundary',
diff_eleZrBoundaryType='Z Right Boundary',
diff_thetaImplct='Implicitness Factor',
diff_useEleCond='Use Ele Conduction',
dt_diff_factor='Timestep Factor',
dtinit='Initial Timestep [s]',
dtmax='Maximum Timestep',
dtmin='Minimum Timestep',
ed_crossSectionFunctionType_1='Cross Section Function Type',
ed_gaussianCenterMajor_1='Major Gaussian Center',
ed_gaussianCenterMinor_1='Minor Gaussian Center',
ed_gaussianExponent_1='Gaussian Exponent',
ed_gaussianRadiusMajor_1='Major Gaussian Radius',
ed_gaussianRadiusMinor_1='Minor Gaussian Radius',
ed_gradOrder='Gradient Order',
ed_gridType_1='Type of Beam Grid',
ed_laser3Din2D='3D Ray Tracing',
ed_laser3Din2DwedgeAngle='Wedge Angle',
ed_laserIOMaxNumberOfPositions='Max Ray Positions',
ed_laserIOMaxNumberOfRays='Max Rays',
ed_lensSemiAxisMajor_1='Lens Semi Axis Major',
ed_lensX_1='Lens X',
ed_lensY_1='Lens Y',
ed_lensZ_1='Lens Z',
ed_maxRayCount='Max Ray Count',
ed_numberOfBeams='Number of Beams',
ed_numberOfPulses='Number of Pulses',
ed_numberOfRays_1='Number of Rays',
ed_numberOfSections_1='Number of Sections',
ed_power_1_1='Laser Pulse Section 1',
ed_power_1_2='Laser Pulse Section 2',
ed_power_1_3='Laser Pulse Section 3',
ed_power_1_4='Laser Pulse Section 4',
ed_pulseNumber_1='Pulse Number',
ed_semiAxisMajorTorsionAngle_1='Major Semiaxis Torsion Angle',
ed_semiAxisMajorTorsionAxis_1='Major Semiaxis Torsion Axis',
ed_targetSemiAxisMajor_1='Major Target Semiaxis',
ed_targetSemiAxisMinor_1='Minor Target Semiaxis',
ed_targetX_1='X Target',
ed_targetY_1='Y Target',
ed_targetZ_1='Z Target',
ed_time_1_1='Laser Pulse Section 1',
ed_time_1_2='Laser Pulse Section 2',
ed_time_1_3='Laser Pulse Section 3',
ed_time_1_4='Laser Pulse Section 4',
ed_useLaserIO='Use Laser IO',
ed_wavelength_1='Wavelength',
entropy='Entropy Fix',
eosMode='Eos Mode',
eosModeInit='Initial Eos Mode',
fl_b='Flame Width',
fl_epsilon_0='Lower Sharpening Factor',
fl_epsilon_1='Upper Sharpening Factor',
fl_fsConstFlameSpeed='Constant Flame Speed',
fl_kpp_fact='Prefactor Adjustment',
flame_deltae='Flame Delta e',
gconst='Acceleration Constant',
gdirec='Direction of Acceleration',
geometry='Grid Geometry',
grav_boundary_type='Boundary Condition',
lrefine_max='Maximum Refinement Level',
lrefine_min='Minimum Refinement Level',
order='Order',
plotFileIntervalTime='Plot File Interval Time [s]',
refine_var_count='Refine Variable Count',
rt_dtFactor='Time Step Coefficient',
rt_mgdBounds_1='Boundary 1',
rt_mgdBounds_2='Boundary 2',
rt_mgdBounds_3='Boundary 3',
rt_mgdBounds_4='Boundary 4',
rt_mgdBounds_5='Boundary 5',
rt_mgdBounds_6='Boundary 6',
rt_mgdBounds_7='Boundary 7',
rt_mgdFlCoef='MGD Flux Limiter Coefficient',
rt_mgdFlMode='MGD Glux Limiter Mode',
rt_mgdNumGroups='Number of Groups',
rt_mgdXlBoundaryType='X MGD Left Boundary',
rt_mgdXrBoundaryType='X MGD Right Boundary',
rt_mgdYlBoundaryType='Y MGD Left Boundary',
rt_mgdYrBoundaryType='Y MGD Right Boundary',
rt_mgdZlBoundaryType='Z MGD Left Boundary',
rt_mgdZrBoundaryType='Z MGD Right Boundary',
rt_useMGD='Use Multigroup Radiation Diffusion',
shockDetect='Use Strong Compressive Shock Detection',
slopeLimiter='Slope Limiter',
sumyi_burned='Burned sumyi',
sumyi_unburned='Unburned sumyi',
threadHydroBlockList='Block List Threading',
threadHydroWithinBlock='Within Block Threading',
tmax='Maximum Simulation Time [s]',
updateHydroFluxes='Update Hydro Fluxes',
useDiffuse='Use Diffusive Effects',
useEnergyDeposition='Use Energy Deposition',
useFlame='Use Flame',
useGravity='Use Gravity',
useHydro='Use Hydro Calculation',
useRadTrans='Use Radiative Transfer',
use_cma_advection='Use CMA Advection',
use_cma_flattening='Use CMA Flattening',
ye_burned='Burned ye',
ye_unburned='Unburned ye',
**_fields([
['{}l_boundary_type', '{} Lower Boundary Type'],
['{}r_boundary_type', '{} Upper Boundary Type'],
['{}min', '{} Minimum'],
['{}max', '{} Maximum'],
['nblock{}', 'Blocks in {}'],
], ['x', 'y', 'z']),
**_fields([
['refine_var_{}', 'Name Varia
|
ble {}'],
['refine_cutoff_{}', 'Refine Variable {}'],
['derefine_cutoff_{}', 'Derefine Variable {}'],
], [str(v) for v in range(1, 7)]),
)
_VIEW_FUNC_PREFIX = '_view_'
def __init__(self):
self._view_fns = PKDict()
|
for n, o in inspect.getmembers(self):
if n.startswith(self._VIEW_FUNC_PREFIX) and inspect.ismethod(o):
self._view_fns[n[len(self._VIEW_FUNC_PREFIX):]] = o
def update_schema(self, schema):
self._update_labels(schema)
self._update_views(schema)
return schema
def _assert_model_view_fields_exist(self, name, view, schema):
"""Check that model fields in view exist in models"""
def flatten(to_flatten):
def flatten_column(to_flatten):
if isinstance(to_flatten[0], str):
return flatten(to_flatten[1])
res = []
for f in to_flatten:
res += flatten_column(f)
return res
res = []
for f in to_flatten:
if isinstance(f, str):
res.append(f)
continue
assert isinstance(f, list), \
'uknown type f={f}'
res += flatten_column(f)
return res
for f in flatten(view.get('basic', []) + view.get('advanced', [])):
if '.' not in f:
f = f'{name}.{f}'
p = f.split('.')
assert p[0] in schema.model, \
f'model name={p[0]} does not exist in known models={schema.model.keys()}'
assert p[1] in schema.model[p[0]], \
f'field={p[1]} does not exist in model={schema.model[p[0]]} name={p[0]}'
def _get_species_list(self, schema):
res = []
for f in schema.model.Multispecies_MultispeciesMain:
m = re.search(r'eos_(.*)EosType', f)
if m:
res.append(m.group(1))
return res
def _update_labels(self, schema):
|
abel-von/commons
|
tests/python/twitter/common/zookeeper/serverset/test_serverset_unit.py
|
Python
|
apache-2.0
| 2,836
| 0.0067
|
# ==================================================================================================
# Copyright 2013 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-
|
2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WAR
|
RANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
from twitter.common.zookeeper.serverset.endpoint import ServiceInstance
from twitter.common.zookeeper.serverset.serverset import ServerSet
from twitter.common.zookeeper.group.group_base import GroupInterface, Membership
from twitter.common.zookeeper.group.kazoo_group import ActiveKazooGroup
from kazoo.client import KazooClient
import mock
SERVICE_INSTANCE_JSON = '''{
"additionalEndpoints": {
"aurora": {
"host": "smfd-aki-15-sr1.devel.twitter.com",
"port": 31510
},
"health": {
"host": "smfd-aki-15-sr1.devel.twitter.com",
"port": 31510
}
},
"serviceEndpoint": {
"host": "smfd-aki-15-sr1.devel.twitter.com",
"port": 31510
},
"shard": 0,
"status": "ALIVE"
}'''
@mock.patch('twitter.common.zookeeper.serverset.serverset.ActiveKazooGroup')
@mock.patch('twitter.common.zookeeper.serverset.serverset.validate_group_implementation')
def test_internal_monitor(mock_group_impl_validator, MockActiveKazooGroup):
mock_zk = mock.Mock(spec=KazooClient)
mock_group = mock.MagicMock(spec=GroupInterface)
MockActiveKazooGroup.mock_add_spec(ActiveKazooGroup)
MockActiveKazooGroup.return_value = mock_group
# by default it tries to assert that the group impl is a subclass of GroupInterface
# since the group impl will be a mock, it doesn't pass that check, so we mock the validator
# as well.
mock_group_impl_validator.return_value = True
def devnull(*args, **kwargs): pass
serverset = ServerSet(
mock_zk,
'/some/path/to/group',
on_join=devnull,
on_leave=devnull)
members = [Membership(id) for id in range(2)]
print("Members are: %s" % members)
serverset._internal_monitor(frozenset(members))
for call in mock_group.info.mock_calls:
_, (_, callback), _ = call
callback(ServiceInstance.unpack(SERVICE_INSTANCE_JSON))
assert len(serverset._members) == 2
|
UnbDroid/robomagellan
|
Codigos/Raspberry/desenvolvimentoRos/devel/lib/python2.7/dist-packages/tf2_msgs/srv/_FrameGraph.py
|
Python
|
gpl-3.0
| 7,187
| 0.016558
|
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from tf2_msgs/FrameGraphRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class FrameGraphRequest(genpy.Message):
_md5sum = "d41d8cd98f00b204e9800998ecf8427e"
_type = "tf2_msgs/FrameGraphRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """"""
__slots__ = []
_slot_types = []
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(FrameGraphRequest, self).__init__(*args, **kwds)
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from tf2_msgs/FrameGraphResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class FrameGraphResponse(genpy.Message):
_md5sum = "437ea58e9463815a0d511c7326b686b0"
_type = "tf2_msgs/FrameGraphResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """string frame_yaml
"""
__slots__ = ['frame_yaml']
_slot_types = ['string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
frame_yaml
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(FrameGraphResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.frame_yaml is None:
self.frame_yaml = ''
else:
self.frame_yaml = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.frame_yaml
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.frame_yaml = str[start:end].decode('utf-8')
else:
self.frame_yaml = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.frame_yaml
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.frame_yaml = str[start:end].decode('utf-8')
else:
self.frame_yaml = str[start:end]
return se
|
lf
except struct.error as e:
raise genpy.Des
|
erializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
class FrameGraph(object):
_type = 'tf2_msgs/FrameGraph'
_md5sum = '437ea58e9463815a0d511c7326b686b0'
_request_class = FrameGraphRequest
_response_class = FrameGraphResponse
|
namhyung/uftrace
|
tests/t139_kernel_dynamic2.py
|
Python
|
gpl-2.0
| 1,228
| 0.000814
|
#!/usr/bin/env python
import os
from runtest import TestBase
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'openclose', serial=True, result="""
# DURATION TID FUNCTION
[ 9875] | main() {
[ 9875] | fopen() {
14.416 us [ 9875] | sys_open();
19.099 us [ 9875] | } /* fopen */
9.720 us [ 9875] | fclose();
37.051 us [ 9875] | } /* main */
""")
def prerun(self, timeout):
if os.geteuid() != 0:
return TestBase.TEST_SKIP
if os.path.exists('/.dockerenv'):
return TestBase.TEST_SKIP
return TestBase.TEST_SUCCESS
# check syscall name would corrected (for SyS_ prefix)
def setup(self):
self.option = "-k -P '_*sys_open@kernel'"
def fixup
|
(self, cflags, result):
uname = os.uname()
# Linux v4.17 (x86_64) changed syscall routines
major, minor, release = uname[2].split('.')
if uname[0] == 'Linux' and uname[4] == 'x86_64' and \
int(major) >= 5 or (int(major) == 4 and int(minor) >= 17):
return result.replace(' sys_open', ' __
|
x64_sys_openat')
else:
return result.replace(' sys_open', ' sys_openat')
|
neuropil/boltons
|
boltons/dictutils.py
|
Python
|
bsd-3-clause
| 24,228
| 0.000124
|
# -*- coding: utf-8 -*-
"""Python has a very powerful mapping type at its core: the :class:`dict`
type. While versatile and featureful, the :class:`dict` prioritizes
simplicity and performance. As a result, it does not retain the order
of item insertion [1]_, nor does it store multiple values per key. It
is a fast, unordered 1:1 mapping.
The :class:`OrderedMultiDict` contrasts to the built-in :class:`dict`,
as a relatively maximalist, ordered 1:n subtype of
:class:`dict`. Virtually every feature of :class:`dict` has been
retooled to be intuitive in the face of this added
complexity. Additional methods have been added, such as
:class:`collections.Counter`-like functionality.
A prime advantage of the :class:`OrderedMultiDict` (OMD) is its
non-destructive nature. Data can be added to an :class:`OMD` without being
rearranged or overwritten. The property can allow the developer to
work more freely with the data, as well as make more assumptions about
where input data will end up in the output, all without any extra
work.
One great example of this is the :meth:`OMD.inverted()` method, which
returns a new OMD with the values as keys and the keys as values. All
the data and the respective order is still represented in the inverted
form, all from an operation which would be outright wrong and reckless
with a built-in :class:`dict` or :class:`collections.OrderedDict`.
The OMD has been performance tuned to be suitable for a wide range of
usages, including as a basic unordered MultiDict. Special
thanks to `Mark Williams`_ for all his help.
.. [1] As of 2015, `basic dicts on PyPy are ordered
<http://morepypy.blogspot.com/2015/01/faster-more-memory-efficient-and-more.html>`_.
.. _Mark Williams: https://github.com/markrwilliams
"""
from collections import KeysView, ValuesView, ItemsView
try:
from itertools import izip_longest
except ImportError:
from itertools import zip_longest as izip_longest
try:
from typeutils import make_sentinel
_MISSING = make_sentinel(var_name='_MISSING')
except ImportError:
_MISSING = object()
PREV, NEXT, KEY, VALUE, SPREV, SNEXT = range(6)
__all__ = ['MultiDict', 'OMD', 'OrderedMultiDict']
try:
profile
except NameError:
profile = lambda x: x
class OrderedMultiDict(dict):
"""A MultiDict is a dictionary that can have multiple values per key
and the OrderedMultiDict (OMD) is a MultiDict that retains
original insertion order. Common use cases include:
* handling query strings parsed from URLs
* inverting a dictionary to create a reverse index (values to keys)
* stacking data from multiple dictionaries in a non-destructive way
The OrderedMultiDict constructor is identical to the built-in
:class:`dict`, and overall the API is constitutes an intuitive
superset of the built-in type:
>>> omd = OrderedMultiDict()
>>> omd['a'] = 1
>>> omd['b'] = 2
>>> omd.add('a', 3)
>>> omd.get('a')
3
>>> omd.getlist('a')
[1, 3]
Some non-:class:`dict`-like behaviors also make an appearance,
such as support for :func:`reversed`:
>>> list(reversed(omd))
['b', 'a']
Note that unlike some other MultiDicts, this OMD gives precedence
to the most recent value added. ``omd['a']`` refers to ``3``, not
``1``.
>>> omd
OrderedMultiDict([('a', 1), ('b', 2), ('a', 3)])
>>> omd.poplast('a')
3
>>> omd
OrderedMultiDict([('a', 1), ('b', 2)])
>>> omd.pop('a')
1
>>> omd
OrderedMultiDict([('b', 2)])
Note that calling :func:`dict` on an OMD results in a dict of keys
to *lists* of values:
>>> from pprint import pprint as pp # ensuring proper key ordering
>>> omd = OrderedMultiDict([('a', 1), ('b', 2), ('a', 3)])
>>> pp(dict(omd))
{'a': [1, 3], 'b': [2]}
Note that modifying those lists will modify the OMD. If you want a
safe-to-modify or flat dictionary, use :meth:`OrderedMultiDict.
|
todict()`.
>>> pp(omd.todict())
{'a': 3, 'b': 2}
>>> pp(omd.todict(multi=True))
{'a': [1, 3], 'b': [2]}
With ``multi=False``, items appear with the keys in t
|
o original
insertion order, alongside the most-recently inserted value for
that key.
>>> OrderedMultiDict([('a', 1), ('b', 2), ('a', 3)]).items(multi=False)
[('a', 3), ('b', 2)]
"""
def __init__(self, *args, **kwargs):
if len(args) > 1:
raise TypeError('%s expected at most 1 argument, got %s'
% (self.__class__.__name__, len(args)))
super(OrderedMultiDict, self).__init__()
self._clear_ll()
if args:
self.update_extend(args[0])
if kwargs:
self.update(kwargs)
def _clear_ll(self):
try:
_map = self._map
except AttributeError:
_map = self._map = {}
self.root = []
_map.clear()
self.root[:] = [self.root, self.root, None]
def _insert(self, k, v):
root = self.root
cells = self._map.setdefault(k, [])
last = root[PREV]
cell = [last, root, k, v]
last[NEXT] = root[PREV] = cell
cells.append(cell)
def add(self, k, v):
"""Add a single value *v* under a key *k*. Existing values under *k*
are preserved.
"""
values = super(OrderedMultiDict, self).setdefault(k, [])
self._insert(k, v)
values.append(v)
def addlist(self, k, v):
"""Add an iterable of values underneath a specific key, preserving
any values already under that key.
>>> omd = OrderedMultiDict([('a', -1)])
>>> omd.addlist('a', range(3))
>>> omd
OrderedMultiDict([('a', -1), ('a', 0), ('a', 1), ('a', 2)])
Called ``addlist`` for consistency with :meth:`getlist`, but
tuples and other sequences and iterables work.
"""
self_insert = self._insert
values = super(OrderedMultiDict, self).setdefault(k, [])
for subv in v:
self_insert(k, subv)
values.extend(v)
def get(self, k, default=None):
"""Return the value for key *k* if present in the dictionary, else
*default*. If *default* is not given, ``None`` is returned.
This method never raises a :exc:`KeyError`.
To get all values under a key, use :meth:`OrderedMultiDict.getlist`.
"""
return super(OrderedMultiDict, self).get(k, [default])[-1]
def getlist(self, k, default=_MISSING):
"""Get all values for key *k* as a list, if *k* is in the
dictionary, else *default*. The list returned is a copy and
can be safely mutated. If *default* is not given, an empty
:class:`list` is returned.
"""
try:
return super(OrderedMultiDict, self).__getitem__(k)[:]
except KeyError:
if default is _MISSING:
return []
return default
def clear(self):
"Empty the dictionary."
super(OrderedMultiDict, self).clear()
self._clear_ll()
def setdefault(self, k, default=_MISSING):
"""If key *k* is in the dictionary, return its value. If not, insert
*k* with a value of *default* and return *default*. *default*
defaults to ``None``. See :meth:`dict.setdefault` for more
information.
"""
if not super(OrderedMultiDict, self).__contains__(k):
self[k] = [] if default is _MISSING else [default]
return default
def copy(self):
"Return a shallow copy of the dictionary."
return self.__class__(self.iteritems(multi=True))
@classmethod
def fromkeys(cls, keys, default=None):
"""Create a dictionary from a list of keys, with all the values
set to *default*, or ``None`` if *default* is not set.
"""
return cls([(k, default) for k in keys])
def update(self, E, **F):
"""Add items from a dictionary or iterable (and/or keyword arguments),
overwriting values under an existing key. See
:meth:`dict.update` for more details.
"""
# E and F are throwback names to the dict() __doc__
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/security_group_view_result.py
|
Python
|
mit
| 1,116
| 0.000896
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SecurityGroupViewResult(Model):
"""The information about security rules applied to the spe
|
cified VM.
:param network_interfaces: List of network interfaces on the specified VM.
:type network_interfaces:
list[~azure.mgmt.network.v2017_09_01.models.SecurityGroupNetworkInterface]
"""
_attribute_map = {
'network_interfaces': {'key': 'networkInterfaces', 'type': '[SecurityGroupNetworkInterface]'},
}
def
|
__init__(self, network_interfaces=None):
super(SecurityGroupViewResult, self).__init__()
self.network_interfaces = network_interfaces
|
j5shi/Thruster
|
pylibs/test/test_structmembers.py
|
Python
|
gpl-2.0
| 3,646
| 0.001646
|
import unittest
from test import test_support
# Skip this test if the _testcapi module isn't available.
test_support.import_module('_testcapi')
from _testcapi import _test_structmembersType, \
CHAR_MAX, CHAR_MIN, UCHAR_MAX, \
SHRT_MAX, SHRT_MIN, USHRT_MAX, \
INT_MAX, INT_MIN, UINT_MAX, \
LONG_MAX, LONG_MIN, ULONG_MAX, \
LLONG_MAX, LLONG_MIN, ULLONG_MAX
ts=_test_structmembersType(False, 1, 2, 3, 4, 5, 6, 7, 8,
9.99999, 10.1010101010, "hi")
class
|
ReadWriteTests(unittest.TestCase):
def test_bool(self):
ts.T_BOOL = True
self.assertEqual(ts.T_BOOL, True)
ts.T_BO
|
OL = False
self.assertEqual(ts.T_BOOL, False)
self.assertRaises(TypeError, setattr, ts, 'T_BOOL', 1)
def test_byte(self):
ts.T_BYTE = CHAR_MAX
self.assertEqual(ts.T_BYTE, CHAR_MAX)
ts.T_BYTE = CHAR_MIN
self.assertEqual(ts.T_BYTE, CHAR_MIN)
ts.T_UBYTE = UCHAR_MAX
self.assertEqual(ts.T_UBYTE, UCHAR_MAX)
def test_short(self):
ts.T_SHORT = SHRT_MAX
self.assertEqual(ts.T_SHORT, SHRT_MAX)
ts.T_SHORT = SHRT_MIN
self.assertEqual(ts.T_SHORT, SHRT_MIN)
ts.T_USHORT = USHRT_MAX
self.assertEqual(ts.T_USHORT, USHRT_MAX)
def test_int(self):
ts.T_INT = INT_MAX
self.assertEqual(ts.T_INT, INT_MAX)
ts.T_INT = INT_MIN
self.assertEqual(ts.T_INT, INT_MIN)
ts.T_UINT = UINT_MAX
self.assertEqual(ts.T_UINT, UINT_MAX)
def test_long(self):
ts.T_LONG = LONG_MAX
self.assertEqual(ts.T_LONG, LONG_MAX)
ts.T_LONG = LONG_MIN
self.assertEqual(ts.T_LONG, LONG_MIN)
ts.T_ULONG = ULONG_MAX
self.assertEqual(ts.T_ULONG, ULONG_MAX)
@unittest.skipUnless(hasattr(ts, "T_LONGLONG"), "long long not present")
def test_longlong(self):
ts.T_LONGLONG = LLONG_MAX
self.assertEqual(ts.T_LONGLONG, LLONG_MAX)
ts.T_LONGLONG = LLONG_MIN
self.assertEqual(ts.T_LONGLONG, LLONG_MIN)
ts.T_ULONGLONG = ULLONG_MAX
self.assertEqual(ts.T_ULONGLONG, ULLONG_MAX)
## make sure these will accept a plain int as well as a long
ts.T_LONGLONG = 3
self.assertEqual(ts.T_LONGLONG, 3)
ts.T_ULONGLONG = 4
self.assertEqual(ts.T_ULONGLONG, 4)
def test_inplace_string(self):
self.assertEqual(ts.T_STRING_INPLACE, "hi")
self.assertRaises(TypeError, setattr, ts, "T_STRING_INPLACE", "s")
self.assertRaises(TypeError, delattr, ts, "T_STRING_INPLACE")
class TestWarnings(unittest.TestCase):
def test_byte_max(self):
with test_support.check_warnings(('', RuntimeWarning)):
ts.T_BYTE = CHAR_MAX+1
def test_byte_min(self):
with test_support.check_warnings(('', RuntimeWarning)):
ts.T_BYTE = CHAR_MIN-1
def test_ubyte_max(self):
with test_support.check_warnings(('', RuntimeWarning)):
ts.T_UBYTE = UCHAR_MAX+1
def test_short_max(self):
with test_support.check_warnings(('', RuntimeWarning)):
ts.T_SHORT = SHRT_MAX+1
def test_short_min(self):
with test_support.check_warnings(('', RuntimeWarning)):
ts.T_SHORT = SHRT_MIN-1
def test_ushort_max(self):
with test_support.check_warnings(('', RuntimeWarning)):
ts.T_USHORT = USHRT_MAX+1
def test_main(verbose=None):
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main(verbose=True)
|
bjpop/berp
|
test/regression/features/assignment/assign_unpack.py
|
Python
|
bsd-3-clause
| 796
| 0.04397
|
# singleton tuple <- singleton tuple
|
x, = 0,
print(x)
# singleton tuple <- singleton list
x, = [-1]
print(x)
# binary tuple <- binary tuple
x,y = 1,2
print(x,y)
# binary tuple swap
x,y = y,x
print(x,y)
# ternary tuple <- ternary tuple
x,y,z = 3,4,5
print(x,y,z)
# singleton list <- singleton list
[x] = [42]
print(x)
# singleton list <- singleton tuple
[x] = 43,
print(x)
# binary list <- binary list
[x,y] = [6,7]
# binary list <- binary tuple
[x,
|
y] = [44,45]
print(x,y)
# binary tuple (parens) <- binary list
(x,y) = [7,8]
print(x,y)
# binary tuple <- result of function call
(x,y) = (lambda: (9,10))()
print(x,y)
# nested binary tuple (parens) <- nested binary tuple (parens)
((x,y),z) = ((11,12),13)
print(x,y,z)
# nested binary tuple <- nested binary tuple
(x,y),z = (14,15),16
print(x,y,z)
|
cloudera/hue
|
apps/sqoop/src/sqoop/sqoop_properties.py
|
Python
|
apache-2.0
| 1,837
| 0.009799
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import logging
import os
from sqoop.conf import SQOOP_CONF_DIR
LOG = logging.getLogger(__name__)
_PROPERTIES_DICT = None
_CONF_SQOOP_AUTHENTICATION_TYPE
|
= 'org.apache.sqoop.security.authentication.type'
def reset():
global _PROPERTIES_DICT
_PROPERTIES_DICT = None
def get_props():
if _PROPERTIES_DICT is None:
_parse_properties()
return _PROPERTIES_DICT
def has_sqoop_has_security():
return get_props().get(_CONF_SQOOP_AUTHENTICATION_TYPE, 'SI
|
MPLE').upper() == 'KERBEROS'
def _parse_properties():
global _PROPERTIES_DICT
properties_file = os.path.join(SQOOP_CONF_DIR.get(), 'sqoop.properties')
_PROPERTIES_DICT = _parse_site(properties_file)
def _parse_site(site_path):
try:
with open(site_path, 'r') as f:
data = f.read()
except IOError as err:
if err.errno != errno.ENOENT:
LOG.error('Cannot read from "%s": %s' % (site_path, err))
return
data = ""
return dict([line.split('=', 1) for line in data.split('\n') if '=' in line and not line.startswith('#')])
|
ovaistariq/mha-helper
|
mha_helper/__init__.py
|
Python
|
gpl-3.0
| 900
| 0
|
# (c) 2013, Ovais Tariq <me@ovaistariq.net>
#
# This file is part of mha_helper
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version
|
3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without
|
even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# -*- coding: utf-8 -*-
__name__ = 'mha_helper'
__author__ = 'Ovais Tariq'
__email__ = 'me@ovaistariq.net'
__version__ = '0.4.2'
__url__ = 'https://github.com/ovaistariq/mha-helper'
|
meganbkratz/acq4
|
acq4/analysis/old/StdpCtrlTemplate.py
|
Python
|
mit
| 3,654
| 0.00301
|
# -*- coding: utf-8 -*-
from __future__ import print_function
# Form implementation generated from reading ui file './acq4/analysis/old/StdpCtrlTemplate.ui'
#
# Created: Tue Dec 24 01:49:15 2013
# by: PyQt4 UI code generator 4.10
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
|
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGu
|
i.QApplication.translate(context, text, disambig)
class Ui_StdpCtrlWidget(object):
def setupUi(self, StdpCtrlWidget):
StdpCtrlWidget.setObjectName(_fromUtf8("StdpCtrlWidget"))
StdpCtrlWidget.resize(227, 321)
self.gridLayout = QtGui.QGridLayout(StdpCtrlWidget)
self.gridLayout.setMargin(0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label = QtGui.QLabel(StdpCtrlWidget)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.thresholdSpin = QtGui.QDoubleSpinBox(StdpCtrlWidget)
self.thresholdSpin.setObjectName(_fromUtf8("thresholdSpin"))
self.gridLayout.addWidget(self.thresholdSpin, 0, 1, 1, 2)
self.label_2 = QtGui.QLabel(StdpCtrlWidget)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.durationSpin = QtGui.QSpinBox(StdpCtrlWidget)
self.durationSpin.setObjectName(_fromUtf8("durationSpin"))
self.gridLayout.addWidget(self.durationSpin, 1, 1, 1, 2)
self.label_4 = QtGui.QLabel(StdpCtrlWidget)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 2, 0, 1, 1)
self.slopeWidthSpin = SpinBox(StdpCtrlWidget)
self.slopeWidthSpin.setObjectName(_fromUtf8("slopeWidthSpin"))
self.gridLayout.addWidget(self.slopeWidthSpin, 2, 1, 1, 2)
self.apExclusionCheck = QtGui.QCheckBox(StdpCtrlWidget)
self.apExclusionCheck.setObjectName(_fromUtf8("apExclusionCheck"))
self.gridLayout.addWidget(self.apExclusionCheck, 3, 0, 1, 1)
self.label_3 = QtGui.QLabel(StdpCtrlWidget)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 4, 0, 1, 2)
self.apthresholdSpin = QtGui.QDoubleSpinBox(StdpCtrlWidget)
self.apthresholdSpin.setObjectName(_fromUtf8("apthresholdSpin"))
self.gridLayout.addWidget(self.apthresholdSpin, 4, 2, 1, 1)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 5, 0, 1, 1)
self.retranslateUi(StdpCtrlWidget)
QtCore.QMetaObject.connectSlotsByName(StdpCtrlWidget)
def retranslateUi(self, StdpCtrlWidget):
StdpCtrlWidget.setWindowTitle(_translate("StdpCtrlWidget", "Form", None))
self.label.setText(_translate("StdpCtrlWidget", "PspThreshold:", None))
self.label_2.setText(_translate("StdpCtrlWidget", "Post-stim Duration (ms):", None))
self.label_4.setText(_translate("StdpCtrlWidget", "Slope width:", None))
self.apExclusionCheck.setText(_translate("StdpCtrlWidget", "Exclude APs", None))
self.label_3.setText(_translate("StdpCtrlWidget", "Exclusion Threshold (mV):", None))
from SpinBox import SpinBox
|
enfancemill/baobaocloud
|
baobaologic/config.py
|
Python
|
gpl-3.0
| 260
| 0
|
# coding: utf-8
class ActMail
|
(object):
subject = '宝宝云帐号注册激活邮件'
content = '感谢您申请注册宝宝云账号! 请点击链接完成注册: '
source = 'enfancemill@gmail.com'
secret = 'kwhr8xkq4eoq6bvvh6nvr6
|
267lrdcqb2'
|
miteshvp/fabric8-analytics-worker
|
f8a_worker/workers/cvedbsync.py
|
Python
|
gpl-3.0
| 4,809
| 0.004367
|
"""Update vulnerability sources."""
from selinon import StoragePool
from f8a_worker.base import BaseTask
from f8a_worker.enums import EcosystemBackend
from f8a_worker.models import Ecosystem
from f8a_worker.solver import get_ecosystem_solver, OSSIndexDependencyParser
from f8a_worker.workers import CVEcheckerTask
class CVEDBSyncTask(BaseTask):
"""Update vulnerability sources."""
def components_to_scan(self, previous_sync_timestamp, only_already_scanned):
"""Get EPV that were recently updated in OSS Index, so they can contain new vulnerabilities.
Get components (e:p:v) that were recently (since previous_sync_timestamp) updated
in OSS Index, which means that they can contain new vulnerabilities.
:param previous_sync_timestamp: timestamp of previous check
:param only_already_scanned: include already scanned components only
:return: generator of e:p:v
"""
# TODO: reduce cyclomatic complexity
to_scan = []
rdb = StoragePool.get_connected_storage('BayesianPostgres')
for ecosystem in ['nuget']:
ecosystem_solver = get_ecosystem_solver(self.storage.get_ecosystem(ecosystem),
with_parser=OSSIndexDependencyParser())
self.log.debug("Retrieving new %s vulnerabilities from OSS Index", ecosystem)
ossindex_updated_packages = CVEcheckerTask.\
query_ossindex_vulnerability_fromtill(ecosystem=ecosystem,
from_time=previous_sync_timestamp)
for ossindex_updated_package in ossindex_updated_packages:
if Ecosystem.by_name(rdb.session, ecosystem).is_backed_by(EcosystemBackend.maven):
package_name = "{g}:{n}".format(g=ossindex_updated_package['group'],
n=ossindex_updated_package['name'])
else:
package_name = ossindex_updated_package['name']
package_affected_versions = set()
for vulnerability in ossindex_updated_package.get('vulnerabilities', []):
for version_string in vulnerability.get('versions', []):
try:
resolved_versions = ecosystem_solver.\
solve(["{} {}".format(package_name, version_string)],
all_versions=True)
except Exception:
self.log.exception("Failed to resolve %r for %s:%s", version_string,
ecosystem, package_name)
continue
resolved_versions = resolved_versions.get(package_name, [])
if only_already_scanned:
already_scanned_versions =\
[ver for ver in resolved_versions if
self.storage.get_analysis_count(ecosystem, package_name, ver) > 0]
package_affected_versions.update(already_scanned_versions)
else:
package_affected_versions.update(resolved_versions)
for version in package_affected_versions:
to_scan.append({
'ecosystem': ecosystem,
'name': package_name,
'version': version
})
msg = "Components to be {prefix}scanned for vulnerabilities: {components}".\
format(prefix="re-" if only_already_scanned else "",
components=to_scan)
self.log.info(msg)
return to_scan
def execute(self, arguments):
"""Start the task.
:param arguments: optional argument 'on
|
ly_already_scanned' to run only
on already analysed packages
:return: EPV dict describing which packages should be analysed
"""
only_already_scanned = arguments.pop('only_already_scanned', True) if arguments else True
ignore_modification_time = (arguments.pop('ignore_modification_time', False)
|
if arguments else False)
CVEcheckerTask.update_victims_cve_db_on_s3()
self.log.debug('Updating sync associated metadata')
s3 = StoragePool.get_connected_storage('S3VulnDB')
previous_sync_timestamp = s3.update_sync_date()
if ignore_modification_time:
previous_sync_timestamp = 0
# get components which might have new vulnerabilities since previous sync
to_scan = self.components_to_scan(previous_sync_timestamp, only_already_scanned)
return {'modified': to_scan}
|
cligu/gitdox
|
modules/gitdox_sql.py
|
Python
|
apache-2.0
| 8,292
| 0.032562
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Data access functions to read from and write to the SQLite backend.
"""
import sqlite3
import codecs
import os
import re
def setup_db():
dbpath = os.path.dirname(os.path.realpath(__file__)) + os.sep +".."+os.sep+"gitdox.db"
conn = sqlite3.connect(dbpath)
cur = conn.cursor()
# Drop tables if they exist
cur.execute("DROP TABLE IF EXISTS docs")
cur.execute("DROP TABLE IF EXISTS users")
cur.execute("DROP TABLE IF EXISTS metadata")
cur.execute("DROP TABLE IF EXISTS validate")
conn.commit()
# Create tables
#user table not used
#cur.execute('''CREATE TABLE IF NOT EXISTS users
# (id INTEGER PRIMARY KEY AUTOINCREMENT, username text)''')
#docs table
cur.execute('''CREATE TABLE IF NOT EXISTS docs
(id INTEGER PRIMARY KEY AUTOINCREMENT, name text, corpus text, status text,assignee_username text ,filename text, content text, mode text, schema text, validation text, timestamp text, cache text)''')
#metadata table
cur.execute('''CREATE TABLE IF NOT EXISTS metadata
(docid INTEGER, metaid INTEGER PRIMARY KEY AUTOINCREMENT, key text, value text, corpus_meta text, UNIQUE (docid, metaid) ON CONFLICT REPLACE, UNIQUE (docid, key) ON CONFLICT REPLACE)''')
#validation table
cur.execute('''CREATE TABLE IF NOT EXISTS validate
(doc text, corpus text, domain text, name text, operator text, argument text, id INTEGER PRIMARY KEY AUTOINCREMENT)''')
conn.commit()
conn.close()
def create_document(doc_id, name, corpus, status, assigned_username, filename, content,mode="xml", schema='--none--'):
generic_query("INSERT INTO docs(id, name,corpus,status,assignee_username,filename,content,mode,schema) VALUES(?,?,?,?,?,?,?,'xml',?)",
(int(doc_id), name, corpus, status, assigned_username, filename, content, schema))
def generic_query(sql, params, return_new_id=False):
# generic_query("DELETE FROM rst_nodes WHERE doc=? and project=?",(doc,project))
dbpath = os.path.dirname(os.path.realpath(__file__)) + os.sep + ".." + os.sep + "gitdox.db"
conn = sqlite3.connect(dbpath)
with conn:
cur = conn.cursor()
if params is not None:
cur.execute(sql,params)
else:
cur.execute(sql)
if return_new_id:
return cur.lastrowid
else:
rows = cur.fetchall()
return rows
def invalidate_doc_by_name(doc,corpus):
generic_query("UPDATE docs SET validation=NULL WHERE name like ? and corpus like ?", (doc, corpus))
def invalidate_ether_docs(doc,corpus):
generic_query("UPDATE docs SET validation=NULL WHERE name like ? and corpus like ? and mode = 'ether'", (doc, corpus))
def invalidate_doc_by_id(id):
generic_query("UPDATE docs SET validation=NULL WHERE id=?", (id,))
def doc_exists(doc,corpus):
res = generic_query("SELECT name from docs where name=? and corpus=?",(doc,corpus))
return len(res) > 0
def save_changes(id,content):
"""save change from the editor"""
generic_query("UPDATE docs SET content=? WHERE id=?",(content,id))
invalidate_doc_by_id(id)
def update_assignee(doc_id,user_name):
generic_query("UPDATE docs SET assignee_username=? WHERE id=?",(user_name,doc_id))
def update_status(id,status):
generic_query("UPDATE docs SET status=? WHERE id=?",(status,id))
def update_docname(id,docname):
generic_query("UPDATE docs SET name=? WHERE id=?",(docname,id))
invalidate_doc_by_id(id)
def update_filename(id,filename):
generic_query("UPDATE docs SET filename=? WHERE id=?",(filename,id))
def update_corpus(id,corpusname):
generic_query("UPDATE docs SET corpus=? WHERE id=?",(corpusname,id))
invalidate_doc_by_id(id)
def update_mode(id,mode):
generic_query("UPDATE docs SET mode=? WHERE id=?",(mode,id))
def update_schema(id, schema):
generic_query("UPDATE docs SET schema=? WHERE id=?", (schema, id))
def delete_doc(id):
generic_query("DELETE FROM docs WHERE id=?",(id,))
generic_q
|
uery("DELETE FROM metadata WHERE docid=?", (id,))
def cell(text):
if isinstance(text, int):
text = str(text)
return "\n <td>" + text + "</td>"
def update_meta(meta_id,doc_id,key,value,corpus=False):
if corpus:
_, corpus_name, _, _, _, _, _ = get_doc_info(doc_id)
generic_query("REPLACE INTO metadata(metaid,docid,key,value,corpus_meta) VALUES(?,?,?,?,?)", (meta_id, None, key, value,corpus_
|
name))
else:
generic_query("REPLACE INTO metadata(metaid,docid,key,value,corpus_meta) VALUES(?,?,?,?,?)",(meta_id,doc_id,key,value,None))
invalidate_doc_by_id(doc_id)
def save_meta(doc_id,key,value,corpus=False):
if corpus:
_, corpus_name, _, _, _, _, _ = get_doc_info(doc_id)
new_id = generic_query("REPLACE INTO metadata(docid,key,value,corpus_meta) VALUES(?,?,?,?)", (None, key, value,corpus_name), return_new_id = True)
else:
new_id = generic_query("INSERT OR REPLACE INTO metadata(docid,key,value,corpus_meta) VALUES(?,?,?,?)",(doc_id,key,value,None), return_new_id = True)
invalidate_doc_by_id(doc_id)
return new_id
def delete_meta(metaid, doc_id, corpus=False):
generic_query("DELETE FROM metadata WHERE metaid=?", (metaid,))
if not corpus:
invalidate_doc_by_id(doc_id)
def get_doc_info(doc_id):
res = generic_query("SELECT name,corpus,filename,status,assignee_username,mode,schema FROM docs WHERE id=?", (int(doc_id),))
if len(res) > 0:
return res[0]
else:
return res
def get_doc_content(doc_id):
res = generic_query("SELECT content FROM docs WHERE id=?", (int(doc_id),))
return res[0][0]
def get_all_doc_ids_for_corpus(corpus):
return map(lambda x: x[0],
generic_query("SELECT id FROM docs WHERE corpus=?", (corpus,)))
def get_all_docs(corpus=None, status=None):
if corpus is None:
if status is None:
return generic_query("SELECT id, name, corpus, mode, content FROM docs", None)
else:
return generic_query("SELECT id, name, corpus, mode, content FROM docs where status=?", (status,))
else:
if status is None:
return generic_query("SELECT id, name, corpus, mode, content FROM docs where corpus=?", (corpus,))
else:
return generic_query("SELECT id, name, corpus, mode, content FROM docs where corpus=? and status=?", (corpus, status))
def get_doc_meta(doc_id, corpus=False):
if corpus:
fields = get_doc_info(doc_id)
if len(fields) > 0:
_, corpus_name, _, _, _, _, _ = fields
return generic_query("SELECT * FROM metadata WHERE corpus_meta=? ORDER BY key COLLATE NOCASE",(corpus_name,))
else:
return []
else:
return generic_query("SELECT * FROM metadata WHERE docid=? ORDER BY key COLLATE NOCASE", (int(doc_id),))
def get_corpora():
return generic_query("SELECT DISTINCT corpus FROM docs ORDER BY corpus COLLATE NOCASE", None)
def get_validate_rules(sort=None, domain=None):
query = "SELECT corpus, doc, domain, name, operator, argument, id FROM validate"
args = []
if domain:
query += " WHERE domain=? "
args.append(domain)
if sort:
query += " ORDER BY " + sort
return generic_query(query, args)
def get_rule_domain(id):
return generic_query("SELECT domain FROM validate WHERE id=?", (id,))[0][0]
def get_xml_rules():
return get_validate_rules(domain='xml')
def get_meta_rules():
return get_validate_rules(domain='meta')
def get_ether_rules():
return get_validate_rules(domain='ether')
def get_export_rules():
return get_validate_rules(domain='export')
def create_validate_rule(doc, corpus, domain, name, operator, argument):
new_id = generic_query("INSERT INTO validate(doc,corpus,domain,name,operator,argument) VALUES(?,?,?,?,?,?)", (doc, corpus, domain, name, operator, argument), return_new_id = True)
if domain == "meta":
invalidate_doc_by_name("%","%")
else:
invalidate_ether_docs("%","%")
return new_id
def delete_validate_rule(id):
generic_query("DELETE FROM validate WHERE id=?", (int(id),))
invalidate_doc_by_name("%", "%")
def update_validate_rule(doc, corpus, domain, name, operator, argument, id):
generic_query("UPDATE validate SET doc = ?, corpus = ?, domain = ?, name = ?, operator = ?, argument = ? WHERE id = ?",(doc, corpus, domain, name, operator, argument, id))
if domain == "meta":
invalidate_doc_by_name("%", "%")
else:
invalidate_ether_docs("%", "%")
def update_validation(doc_id,validation):
generic_query("UPDATE docs SET validation=? where id=?",(validation,doc_id))
def upd
|
bwc126/MLND-Subvocal
|
simple_svr.py
|
Python
|
mit
| 290
| 0.003448
|
i
|
mport pcf8591read
"""
This is a barebones script for controlling the work flow of recording EMG words and associating the data with a specific word, captured in the filename.
"""
reader = pcf8591read.adc_reader()
filename = input('Current word:')
reader.record = True
reader.run(fil
|
ename)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/volume/_reversescale.py
|
Python
|
mit
| 457
| 0.002188
|
import _plotly_utils.basevalidators
cl
|
ass ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(self, plotly_name="reversescale", parent_name="volume", **kwargs):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop
|
("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
dims/nova
|
nova/cmd/dhcpbridge.py
|
Python
|
apache-2.0
| 5,189
| 0.000193
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Handle lease database updates from DHCP servers.
"""
from __future__ import print_function
import os
import sys
import traceback
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import importutils
from nova.conductor import rpcapi as conductor_rpcapi
import nova.conf
from nova import config
from nova import context
import nova.db.api
from nova import exception
from nova.i18n import _LE, _LW
from nova.network import rpcapi as network_rpcapi
from nova import objects
from nova.objects import base as objects_base
from nova import rpc
CONF = nova.conf.CONF
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('network_manager', 'nova.service')
LOG = logging.getLogger(__name__)
def add_lease(mac, ip_address):
"""Set the IP that was assigned by the DHCP server."""
api = network_rpcapi.NetworkAPI()
api.lease_fixed_ip(context.get_admin_context(), ip_address, CONF.host)
def old_lease(mac, ip_address):
"""Called when an old lease is recognized."""
# NOTE(vish): We assume we heard about this lease the first time.
# If not, we will get it the next time the lease is
# renewed.
pass
def del_lease(mac, ip_address):
"""Called when a lease expires."""
api = network_rpcapi.NetworkAPI()
api.release_fixed_ip(context.get_admin_context(), ip_address,
CONF.host, mac)
def init_leases(network_id):
"""Get the list of hosts for a network."""
ctxt = context.get_admin_context()
network = objects.Network.get_by_id(ctxt, network_id)
network_manager = importutils.import_object(CONF.network_manager)
return network_manager.get_dhcp_leases(ctxt, network)
def add_action_parsers(subparsers):
subparsers.add_parser('init')
# NOTE(cfb): dnsmasq always passes mac, and ip. hostname
# is passed if known. We don't care about
# hostname, but argparse will complain if we
# do not accept it.
for action in ['add', 'del', 'old']:
parser = subparsers.add_parser(action)
parser.add_argument('mac')
parser.add_argument('ip')
parser.add_argument('hostname', nargs='?', default='')
parser.set_defaults(func=globals()[action + '_lease'])
CONF.register_cli_opt(
cfg.SubCommandOpt('action',
title='Action options',
help='Available dhcpbridge options',
handler=add_action_parsers))
def block_db_access():
class NoDB(object):
def __getattr__(self, attr):
return self
def __call__(self, *args, **kwargs):
stacktrace = "".join(traceback.format_stack())
LOG.error(_LE('No db access allowed in
|
nova-dhcpbridge: %s'),
stacktrace)
raise exception.DBNotAllowed('nova-dhcpbridge')
nova.db.api.IMPL = NoDB()
def main():
"""Parse environment and arguments and call the appropriate action."""
config.parse_args(sys.argv,
default_config_files=jsonutils.loads(os.environ['CONFIG_FILE']))
logging.setup(CONF, "nova")
global LOG
LOG = logging.getLogger('nova.dhcpbridge')
if CONF.action.name == 'old':
#
|
NOTE(sdague): old is the most frequent message sent, and
# it's a noop. We should just exit immediately otherwise we
# can stack up a bunch of requests in dnsmasq. A SIGHUP seems
# to dump this list, so actions queued up get lost.
return
objects.register_all()
if not CONF.conductor.use_local:
block_db_access()
objects_base.NovaObject.indirection_api = \
conductor_rpcapi.ConductorAPI()
else:
LOG.warning(_LW('Conductor local mode is deprecated and will '
'be removed in a subsequent release'))
if CONF.action.name in ['add', 'del']:
LOG.debug("Called '%(action)s' for mac '%(mac)s' with IP '%(ip)s'",
{"action": CONF.action.name,
"mac": CONF.action.mac,
"ip": CONF.action.ip})
CONF.action.func(CONF.action.mac, CONF.action.ip)
else:
try:
network_id = int(os.environ.get('NETWORK_ID'))
except TypeError:
LOG.error(_LE("Environment variable 'NETWORK_ID' must be set."))
return(1)
print(init_leases(network_id))
rpc.cleanup()
|
rahulunair/nova
|
nova/objects/vcpu_model.py
|
Python
|
apache-2.0
| 2,317
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nova.db import api as db
from nova.objects import base
from nova.objects import fields
@base.NovaObjectRegistry.register
class VirtCPUModel(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'arch': fields.ArchitectureField(nullable=True),
'vendor': fields.StringField(nullable=True),
'topology': fields.ObjectField('VirtCPUTopology',
nullable=True),
'features': fields.ListOfObjectsField("VirtCPUFeature",
default=[]),
'mode': fields.CPUModeField(nullable=True),
'model': fields.StringField(nullable=True),
'match': fields.CPUMatchField(nullable=True),
}
def obj_load_attr(self, attrname):
setattr(self, attrname, None)
def to_json(self):
return jsonutils.dumps(self.obj_to_primitive())
@classmethod
def from_json(cls, jsonstr):
return cls.obj_from_primitive(jsonutils.loads(jsonstr))
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_extra = db.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=['vcpu_model'])
if not db_extra or not db_extra['vcpu_model']:
return N
|
one
return cls.obj_from_primitive(jso
|
nutils.loads(db_extra['vcpu_model']))
@base.NovaObjectRegistry.register
class VirtCPUFeature(base.NovaObject):
VERSION = '1.0'
fields = {
'policy': fields.CPUFeaturePolicyField(nullable=True),
'name': fields.StringField(nullable=False),
}
def obj_load_attr(self, attrname):
setattr(self, attrname, None)
|
accpy/accpy
|
accpy/simulate/const.py
|
Python
|
gpl-3.0
| 1,881
| 0
|
# -*- coding: utf-8 -*-
'''
accpy.simulate.const
Felix Kramer (felix.kramer@physik.hu-berlin.de)
'''
# ratio of circumference to diameter of circle
pi = 3.141592653589793115997963468544185161590576171875
# speed of light----------------------------------------/ (m/s)
cl = 299792458.
# elementary charge-------------------------------------/ (e)=(As)=(C)
qe = 1.602176565e-19
# electron mass-----------------------------------------/ (kg)
me = 9.10938291e-31
# proton mass-------------------------------------------/ (kg)
mp = 1.672621777e-27
# muon mass---------------------------------------------/ (kg)
mu = 1.883531475e-28
# electron restenergy-----------------------------------/ (J)=(Nm)=(Ws)
Ee = me*cl**2
# proton restenergy-------------------------------------/ (J)=(Nm)=(Ws)
Ep = mp*cl**2
# muon restenergy---------------------------------------/ (J)=(Nm)=(Ws)
Eu = mu*cl**2
# classical radius of electron--------------------------/ (m)
re = qe**2/(me*1e7)
# classical radius of proton----------------------------/ (m)
rp = qe**2/(mp*1e7)
# classical radius of muon------------------------------/ (m)
ru = qe**2/(mu*1e7)
# vacuum permeability / magnetic field contant----------/ (N/A^2)=(Henry/m)
u0 = 4*pi*1E-7
# vacuum permittivity / elektrical field const----------/ (As/Vm)
e0 = 1/(u0*cl**
|
2)
# Planck constant---------------------------------------/ (Js)
hp = 6.62606957e-34
# reduced Planck constant-------------------------------/ (Js)
hb = hp/2/pi
# Boltzmann constant--------------------
|
----------------/ (J/K)
kb = 1.3806488e-23
# Avogadro number---------------------------------------/ (1/mole)
NA = 6.02214129e23
# gas constant------------------------------------------/ (J/Kmole)
RG = kb*NA
# gravitational constant--------------------------------/ (Nm^2/kg^2)
Gr = 6.67384e-11
# gravitational acceleration Berlin---------------------/ (m/s^2)
ge = 9.812753
|
aggendo/jDesigner
|
configuration.py
|
Python
|
gpl-2.0
| 5,488
| 0.013484
|
import visualizer as v
import os as os
import json
from shutil import rmtree
from os.path import expanduser
from os.path import join as OSpathJoin
global appLoc
appLoc = "C:\Users\john\Desktop\jDesigner"#C:\\Program Files\\aggendo\\jCode\\"
configFold = "C:\Users\john\Desktop\jDesigner\config"
recent = ""
canvas = ""
shortcuts = ""
prefs = ""
#jDe = json.JSONDecoder
recF = file
prefF = file
tempFold = "C:\\Users\\john\\Desktop\\jDesigner\\temp"
def genPaths():
partialPath = v.__file__ #get the path to visualizer.pyc
partialPath = partialPath[:-15] #strip away the /visualizer.pyc
appLoc = partialPath
configFold = OSpathJoin(partialPath, "config")
tempFold = OSpathJoin(partialPath, "temp")
print(partialPath)
genPaths()
def getTemp():
return(tempFold) #TODO make tempFold configurable
def singleton(cls):
instances = {}
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
@singleton
class MyClass:
appLoc = ""
def __init__():
if not os.path.exists(appLoc):
os.makedirs(appLoc)
def getRecent():
recF = open(recent, "r")
jRec = json.load(recF)
recF.close()
return(jRec)
#TODO has something that add the path to identical filenames in drop down
def lastFile():
recF = open(recent, "r")
jRec = json.loads(recF.read())
recF.close()
if(len(jRec)!=0):
return(jRec[0]["name"])
else:
return(None)
def addRecent(name, path):
recF = open(recent, "r")
jDe = json.JSONDecoder()
jRec = jDe.decode(recF.read())
recF.close()
del jDe
for dic in jRec:
if(dic["path"]==path):
jRec.remove(dic)
break
#if(jRec.count({name: path})==1):
#jRec.remove({name: path})
if(len(jRec)==getSetting('recents')):
del jRec[getSetting('recents')-1]
jRec.insert(0, {"name": name, "path": path})
recF = open(recent, "w
|
+")
recF.write(json.dumps(jRec, sort_keys=True, indent=4, separators=(',', ': ')))
recF.close()
del jRec
def printRecents():
recF = open(recent, "r")
print(recF.read())
recF.close()
def getSetting(name):
global prefs
prefF = open(prefs, "r")
jPref = json.loads(prefF.read())
prefF.close
return(jPref[name])
def storeSetting(name, value):
prefF = open(prefs, "r")
jPref = json.loads(prefF.read())
prefF.close()
prefF = open(pr
|
efs, "w+")
jPref[name] = value
prefF.write(json.dumps(jPref, sort_keys=True, indent=4, separators=(',', ': ')))
prefF.close()
def createDefaultPrefs():
jPref = {}
jPref['recents'] = 3
jPref['sizeX'] = 700
jPref['sizeY'] = 600
jPref['defaultBitSize'] = 3.3
jPref["defaultFolder"] = expanduser("~")
jPref['defaultHomingPos'] = 0 #0=bot left, 1 = bot right, 2 = top left 3=top right
return(json.dumps(jPref, sort_keys=True, indent=4, separators=(',', ': ')))
def createRecentsList():
ret = [{'file.j':'location'},]
return(ret)
def __init__():
global recent
global canvas
global shortcuts
global prefs
if not os.path.exists(appLoc):
print("creating files")
os.makedirs(appLoc)
if not os.path.exists(configFold):
os.mkdir(configFold)#{"filename": "filepath"},{"otherName": "filepath"}
recent = open(os.path.join(configFold, "recent.json"), "w+")
recent.write('[]')# indent=4))# separtors=(',', ': ')))
canvas = open(os.path.join(configFold, "canvas.json"), "w+")
shortcuts = open(os.path.join(configFold, "shortcuts.json"), "w+")
prefs = open(os.path.join(configFold, "prefs.json"), "w+")
prefs.write(createDefaultPrefs())
#files are created
recent.close()
canvas.close()
shortcuts.close()
prefs.close()
recent = os.path.join(configFold, "recent.json")
canvas = os.path.join(configFold, "canvas.json")
shortcuts = os.path.join(configFold, "shortcuts.json")
prefs = os.path.join(configFold, "prefs.json")
def __kill__():
global recF
global prefF
global recent
global canvas
global shortcuts
global prefs
try:
recF.close()
except:
pass
try:
canvas.close()
except:
pass
try:
shortcuts.close()
except:
pass
try:
prefF.close()
except:
pass
del recent
del canvas
del shortcuts
del prefs
del prefF
del recF
#del jDe
if(__name__=="__main__"):
#singleton();
try:
rmtree(configFold)
except:
pass
try:
__init__()
addRecent('test file', 'testLoc')
addRecent('test2', 'loc')
addRecent('test3', 'loc1')
addRecent('test3', 'loc1')
printRecents()
__kill__()
except Exception as inst:
# print type(inst) # the exception instance
# print inst.args # arguments stored in .args
# print inst # __str__ allows args to be printed directly
__kill__()
raise
if(prefs==""):
try:
__init__() #find a way to use with statements to close files
except Exception as inst:
# print type(inst) # the exception instance
# print inst.args # arguments stored in .args
# print inst # __str__ allows args to be printed directly
__kill__()
raise
|
showell/zulip
|
zerver/migrations/0301_fix_unread_messages_in_deactivated_streams.py
|
Python
|
apache-2.0
| 909
| 0.0033
|
from django.db import migrations
class Migration(migra
|
tions.Migration):
"""
We're changing the stream deactivation process to
|
make it mark all messages
in the stream as read. For things to be consistent with streams that have been
deactivated before this change, we need a migration to fix those old streams,
to have all messages marked as read.
"""
dependencies = [
('zerver', '0300_add_attachment_is_web_public'),
]
operations = [
migrations.RunSQL(
sql="""
UPDATE zerver_usermessage SET flags = flags | 1
FROM zerver_message
INNER JOIN zerver_stream ON zerver_stream.recipient_id = zerver_message.recipient_id
WHERE zerver_message.id = zerver_usermessage.message_id
AND zerver_stream.deactivated;
""",
reverse_sql="",
),
]
|
hiteshgarg14/openstates
|
scrapers/de/__init__.py
|
Python
|
gpl-3.0
| 3,685
| 0.000271
|
from utils import url_xpath, State
from .people import DEPersonScraper
from .bills import DEBillScraper
# from .events import DEEventScraper
# from .committees import DECommitteeScraper
class Delaware(State):
scrapers = {
"people": DEPersonScraper,
"bills": DEBillScraper,
# 'events': DEEventScraper,
# 'committees': DECommitteeScraper,
}
legislative_sessions = [
{
"_scraped_name": "1998 - 2000 (GA 140)",
"identifier": "140",
"name": "140th General Assembly (1999-2000)",
"start_date": "1999-01-05",
"end_date": "2001-01-01",
},
{
"_scraped_name": "2000 - 2002 (GA 141)",
"identifier": "141",
"name": "141st General Assembly (2001-2002)",
"start_date": "2001-01-02",
"end_date": "2003-01-01",
},
{
"_scraped_name": "2002 - 2004 (GA 142)",
"identifier": "142",
"name": "142nd General Assembly (2003-2004)",
"start_date": "2003-01-07",
"end_date": "2005-01-01",
},
{
"_scraped_name": "2004 - 2006 (GA 143)",
"identifier": "143",
"name": "143rd General Assembly (2005-2006)",
"start_date": "2005-01-04",
"end_date": "2007-01-01",
},
{
"_scraped_name": "2006 - 2008 (GA 144)",
"identifier": "144",
"name": "144th General Assembly (2007-2008)",
"start_date": "2007-01-09",
"end_date": "2009-01-01",
},
{
"_scraped_name": "2008 - 2010 (GA 145)",
"identifier": "145",
"name": "145th General Assembly (2009-2010)",
"start_date": "2009-01-06",
"end_date": "2010-05-05",
},
{
"_scraped_name": "2010 - 2012 (GA 146)",
"identifier": "146",
"name": "146th General Assembly (2011-2012)",
"start_date": "2011-01-05",
"end_date": "2012-05-09",
},
{
"_scraped_name": "2012 - 2014 (GA 147)",
"identifier": "147",
"name": "147th General Assembly (2013-2014)",
"start_date": "2013-01-09",
"end_date": "2014-05-07",
},
{
"_scraped_name": "2014 - 2016 (GA 148)",
"identifier": "148",
"name": "148th General Assembly (2015-2016)",
"start_date": "2015-01-07",
"end_date": "2016-05-04",
},
{
"_scraped_name": "2016 - 2018 (GA 149)",
"identifier": "149",
"name": "14
|
9th General Assembly (2017-2018)",
"start_date": "2017-01-10",
"end_date": "2018-05-09",
},
{
"_scraped_name": "2018 - 2020 (GA 150)",
"identifier": "150",
"name": "150th General Assembly (2019-2020)",
"start_date": "2019-01-08",
"end_date": "2020-05-06",
},
# {
# "_scraped_name": "2020 - 2022 (GA 151)",
#
|
"identifier": "151",
# "name": "151st General Assembly (2020-2022)",
# "start_date": "2021-01-12",
# "end_date": "2022-05-06",
# },
]
ignored_scraped_sessions = [
"2020 - 2022 (GA 151)"
]
def get_session_list(self):
url = "https://legis.delaware.gov/"
sessions = url_xpath(url, '//select[@id="billSearchGARefiner"]/option/text()')
sessions = [session.strip() for session in sessions if session.strip()]
return sessions
|
Code4SA/umibukela
|
umibukela/forms.py
|
Python
|
mit
| 5,345
| 0.001123
|
from django import forms
from django.contrib.gis.geos import Point
from widgets import AddAnotherWidgetWrapper
from django.core.exceptions import ValidationError
from .models import (Site, CycleResultSet, Monitor, ProgrammeResources,
ProgrammeImage)
class SiteForm(forms.ModelForm):
latitude = forms.DecimalField(
min_value=-90,
max_value=90,
required=False,
)
longitude = forms.DecimalField(
min_value=-180,
max_value=180,
required=False,
)
class Meta(object):
model = Site
exclude = []
widgets = {'coordinates': forms.HiddenInput()}
def __init__(self, *args, **kwargs):
if args: # If args exist
dat
|
a = args[0]
if data['latitude'] and data['longitude']:
latitude = float(data['latitude'])
longitude = float(data['longitude'])
data['coordinates'] = Point(longitude, latitude)
if 'instance' in kwargs and kwargs['instance'] is not None and kwargs['instance'].coordinates:
coordinates = kwar
|
gs['instance'].coordinates.tuple
initial = kwargs.get('initial', {})
initial['longitude'] = coordinates[0]
initial['latitude'] = coordinates[1]
kwargs['initial'] = initial
super(SiteForm, self).__init__(*args, **kwargs)
class CycleResultSetForm(forms.ModelForm):
site_option_name = forms.CharField(widget=forms.TextInput)
class Meta(object):
model = CycleResultSet
exclude = []
def __init__(self, *args, **kwargs):
super(CycleResultSetForm, self).__init__(*args, **kwargs)
crs = kwargs.get('instance', None)
if crs:
partner = crs.partner
else:
partner = None
self.fields['monitors'].queryset = Monitor.objects.filter(
partner=partner)
self.fields[
'site_option_name'].help_text = "This is the name of the option for this site in the form, e.g. for 'Folweni clinic' it's probably 'folweni' (without the single quotes). You can find the names of options in the relevant Survey admin page."
class CRSFromKoboForm(forms.Form):
def __init__(self, *args, **kwargs):
facilities = kwargs.pop('facilities')
super(CRSFromKoboForm, self).__init__(*args, **kwargs)
for i, facility in enumerate(facilities):
crs_field = forms.ModelChoiceField(
queryset=CycleResultSet.objects.order_by('site__name').all(),
label=facility['label'])
crs_field.widget = AddAnotherWidgetWrapper(crs_field.widget,
CycleResultSet)
self.fields['crs_%d' % i] = crs_field
self.fields['facility_%d' % i] = forms.CharField(
widget=forms.HiddenInput(), initial=facility['name'])
self.fields['num_facilities'] = forms.CharField(
widget=forms.HiddenInput(), initial=len(facilities))
class ProgrammeResourcesForm(forms.ModelForm):
class Meta:
model = ProgrammeResources
exclude = ('document_extension', )
def clean(self):
link = self.cleaned_data.get('link')
document = self.cleaned_data.get('document')
order_no = self.cleaned_data.get('order')
resource = self.cleaned_data.get('resource')
programme = self.cleaned_data.get('programme')
if resource.name == 'Link' and link is None:
raise ValidationError('Enter a link')
if resource.name == 'Reports' and document is None:
raise ValidationError('Upload a document')
if resource.name == 'Survey Instrument' and document is None:
raise ValidationError('Upload a document')
if link and document:
raise ValidationError(
"You cant have an External link and a Document")
if ProgrammeResources.objects.filter(
order=order_no, resource=resource,
programme=programme).exists():
raise ValidationError(
'A Resource already exists for this order number')
if resource.name == 'Links' and document:
raise ValidationError(
'A resource of type Link cannot have a document, expecting a link'
)
if resource.name == 'Reports' and link:
raise ValidationError(
'A resource of type Reports cannot have a link, expecting a document'
)
if resource.name == 'Survey Instrument' and link:
raise ValidationError(
'A resource of type Survey Instrument cannot have a link, expecting a document'
)
return self.cleaned_data
class ProgrammeImageForm(forms.ModelForm):
class Meta:
model = ProgrammeImage
fields = '__all__'
def clean(self):
featured = self.cleaned_data.get('featured')
programme = self.cleaned_data.get('programme')
if featured:
if ProgrammeImage\
.objects\
.filter(programme=programme, featured=True):
raise ValidationError(
"An image in this programme is already marked as a featured image"
)
return self.cleaned_data
|
thiblahute/meson
|
run_project_tests.py
|
Python
|
apache-2.0
| 29,483
| 0.003426
|
#!/usr/bin/env python3
# Copyright 2012-2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import os
import subprocess
import shutil
import sys
import signal
from io import StringIO
from ast import literal_eval
from enum import Enum
import tempfile
from pathlib import Path, PurePath
from mesonbuild import build
from mesonbuild import environment
from mesonbuild import mesonlib
from mesonbuild import mlog
from mesonbuild import mtest
from mesonbuild.mesonlib import stringlistify, Popen_safe
from mesonbuild.coredata import backendlist
import argparse
import xml.etree.ElementTree as ET
import time
import multiprocessing
from concurrent.futures import ProcessPoolExecutor
import re
from run_tests import get_fake_options, run_configure, get_meson_script
from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
from run_tests import ensure_backend_detects_changes
class BuildStep(Enum):
configure = 1
build = 2
test = 3
install = 4
clean = 5
validate = 6
class TestResult:
def __init__(self, msg, step, stdo, stde, mlog, conftime=0, buildtime=0, testtime=0):
self.msg = msg
self.step = step
self.stdo = stdo
self.stde = stde
self.mlog = mlog
self.conftime = conftime
self.buildtime = buildtime
self.testtime = testtime
class AutoDeletedDir:
def __init__(self, d):
self.dir = d
def __enter__(self):
os.makedirs(self.dir, exist_ok=True)
return self.dir
def __exit__(self, _type, value, traceback):
# We don't use tempfile.TemporaryDirectory, but wrap the
# deletion in the AutoDeletedDir class because
# it fails on Windows due antivirus programs
# holding files open.
mesonlib.windows_proof_rmtree(self.dir)
failing_logs = []
print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ
under_ci = not {'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ)
do_debug = under_ci or print_debug
no_meson_log_msg = 'No meson-log.txt found.'
system_compiler = None
class StopException(Exception):
def __init__(self):
super().__init__('Stopped by user')
stop = False
def stop_handler(signal, frame):
global stop
stop = True
signal.signal(signal.SIGINT, stop_handler)
signal.signal(signal.SIGTERM, stop_handler)
def setup_commands(optbackend):
global do_debug, backend, backend_flags
global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands
backend = optbackend
msbuild_exe = shutil.which('msbuild')
# Auto-detect backend if unspecified
if backend is None:
if msbuild_exe is not None:
backend = 'vs' # Meson will auto-detect VS version to use
else:
backend = 'ninja'
# Set backend arguments for Meson
if backend.startswith('vs'):
backend_flags = ['--backend=' + backend]
backend = Backend.vs
elif backend == 'xcode':
backend_flags = ['--backend=xcode']
backend = Backend.xcode
elif backend == 'ninja':
backend_flags = ['--backend=ninja']
backend = Backend.ninja
else:
raise RuntimeError('Unknown backend: {!r}'.format(backend))
compile_commands, clean_commands, test_commands, install_commands, \
uninstall_commands = get_backend_commands(backend, do_debug)
def get_relative_files_list_from_dir(fromdir):
paths = []
for (root, _, files) in os.walk(fromdir):
reldir = os.path.relpath(root, start=fromdir)
for f in files:
path = os.path.join(reldir, f).replace('\\', '/')
if path.startswith('./'):
path = path[2:]
paths.append(path)
return paths
def platform_fix_name(fname, compiler, env):
if '?lib' in fname:
if mesonlib.for_cygwin(env.is_cross_build(), env):
fname = re.sub(r'lib/\?lib(.*)\.so$', r'bin/cyg\1.dll', fname)
fname = re.sub(r'\?lib(.*)\.dll$', r'cyg\1.dll', fname)
else:
fname = re.sub(r'\?lib', 'lib', fname)
if fname.endswith('?exe'):
fname = fname[:-4]
if mesonlib.for_windows(env.is_cross_build(), env) or mesonlib.for_cygwin(env.is_cross_build(), env):
return fname + '.exe'
if fname.startswith('?msvc:'):
fname = fname[6:]
if compiler != 'cl':
return None
if fname.startswith('?gcc:'):
fname = fname[5:]
if compiler == 'cl':
return None
return fname
def validate_install(srcdir, installdir, compiler, env):
# List of installed files
info_file = os.path.join(srcdir, 'installed_files.txt')
# If this exists, the test does not inst
|
all any other files
noinst_file = 'usr/no-installed-files'
expected = {}
ret_msg = ''
# Generate list of expected files
if os.path.exists(os.path.join(installdir, noinst_file)):
expected[no
|
inst_file] = False
elif os.path.exists(info_file):
with open(info_file) as f:
for line in f:
line = platform_fix_name(line.strip(), compiler, env)
if line:
expected[line] = False
# Check if expected files were found
for fname in expected:
file_path = os.path.join(installdir, fname)
if os.path.exists(file_path) or os.path.islink(file_path):
expected[fname] = True
for (fname, found) in expected.items():
if not found:
# Ignore missing PDB files if we aren't using cl
if fname.endswith('.pdb') and compiler != 'cl':
continue
ret_msg += 'Expected file {0} missing.\n'.format(fname)
# Check if there are any unexpected files
found = get_relative_files_list_from_dir(installdir)
for fname in found:
# Windows-specific tests check for the existence of installed PDB
# files, but common tests do not, for obvious reasons. Ignore any
# extra PDB files found.
if fname not in expected and not fname.endswith('.pdb') and compiler == 'cl':
ret_msg += 'Extra file {0} found.\n'.format(fname)
return ret_msg
def log_text_file(logfile, testdir, stdo, stde):
global stop, executor, futures
logfile.write('%s\nstdout\n\n---\n' % testdir.as_posix())
logfile.write(stdo)
logfile.write('\n\n---\n\nstderr\n\n---\n')
logfile.write(stde)
logfile.write('\n\n---\n\n')
if print_debug:
try:
print(stdo)
except UnicodeError:
sanitized_out = stdo.encode('ascii', errors='replace').decode()
print(sanitized_out)
try:
print(stde, file=sys.stderr)
except UnicodeError:
sanitized_err = stde.encode('ascii', errors='replace').decode()
print(sanitized_err, file=sys.stderr)
if stop:
print("Aborting..")
for f in futures:
f[2].cancel()
executor.shutdown()
raise StopException()
def bold(text):
return mlog.bold(text).get_text(mlog.colorize_console)
def green(text):
return mlog.green(text).get_text(mlog.colorize_console)
def red(text):
return mlog.red(text).get_text(mlog.colorize_console)
def yellow(text):
return mlog.yellow(text).get_text(mlog.colorize_console)
def run_test_inprocess(testdir):
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
old_stderr = sys.stderr
sys.stderr = mystderr = StringIO()
old_cwd = os.getcwd()
os.chdir(testdir)
test_log_fname = Path('meson-logs', 'testlog.txt')
try:
returncode_test = mtest.run(['--no-rebuild'])
if test
|
TouK/vumi
|
vumi/transports/smpp/smpp_transport.py
|
Python
|
bsd-3-clause
| 25,023
| 0
|
# -*- test-case-name: vumi.transports.smpp.tests.test_smpp_transport -*-
import warnings
from uuid import uuid4
from twisted.internet import reactor
from twisted.internet.defer import (
inlineCallbacks, maybeDeferred, returnValue, Deferred, succeed)
from twisted.internet.task import LoopingCall
from vumi.reconnecting_client import ReconnectingClientService
from vumi.transports.base import Transport
from vumi.message import TransportUserMessage
from vumi.transports.smpp.config import SmppTransportConfig
from vumi.transports.smpp.deprecated.transport import (
SmppTransportConfig as OldSmppTransportConfig)
from vumi.transports.smpp.deprecated.utils import convert_to_new_config
from vumi.transports.smpp.protocol import EsmeTransceiverFactory
from vumi.transports.smpp.sequence import RedisSequence
from vumi.transports.failures import FailureMessage
from vumi.persist.txredis_manager import TxRedisManager
from smpp.pdu_builder import BindTransceiver, BindReceiver, BindTransmitter
from vumi import log
def sequence_number_key(seq_no):
return 'sequence_number:%s' % (seq_no,)
def multipart_info_key(seq_no):
return 'multipart_info:%s' % (seq_no,)
def message_key(message_id):
return 'message:%s' % (message_id,)
def remote_message_key(message_id):
return 'remote_message:%s' % (message_id,)
class SmppTransceiverProtocol(EsmeTransceiverFactory.protocol):
bind_pdu = BindTransceiver
def connectionMade(self):
EsmeTransceiverFactory.protocol.connectionMade(self)
config = self.vumi_transport.get_static_config()
password = config.password
# Overly long passwords should be truncated.
if len(password) > 8:
password = password[:8]
log.warning("Password longer than 8 characters, truncating.")
self.bind(
system_id=config.system_id,
password=password,
system_type=config.system_type,
interface_version=config.interface_version,
address_range=config.address_range)
def connectionLost(self, reason):
d = maybeDeferred(self.vumi_transport.pause_connectors)
d.addCallback(
lambda _: EsmeTransceiverFactory.protocol.connectionLost(
self, reason))
return d
def on
|
_smpp_bind(self, sequence_number):
d = maybeDeferred(EsmeTr
|
ansceiverFactory.protocol.on_smpp_bind,
self, sequence_number)
d.addCallback(lambda _: self.vumi_transport.unpause_connectors())
return d
def on_submit_sm_resp(self, sequence_number, smpp_message_id,
command_status):
cb = {
'ESME_ROK': self.vumi_transport.handle_submit_sm_success,
'ESME_RTHROTTLED': self.vumi_transport.handle_submit_sm_throttled,
'ESME_RMSGQFUL': self.vumi_transport.handle_submit_sm_throttled,
}.get(command_status, self.vumi_transport.handle_submit_sm_failure)
message_stash = self.vumi_transport.message_stash
d = message_stash.get_sequence_number_message_id(sequence_number)
d.addCallback(
message_stash.set_remote_message_id, smpp_message_id)
d.addCallback(
self._handle_submit_sm_resp_callback, smpp_message_id,
command_status, cb)
return d
def _handle_submit_sm_resp_callback(self, message_id, smpp_message_id,
command_status, cb):
if message_id is None:
# We have no message_id, so log a warning instead of calling the
# callback.
log.warning("Failed to retrieve message id for deliver_sm_resp."
" ack/nack from %s discarded."
% self.vumi_transport.transport_name)
else:
return cb(message_id, smpp_message_id, command_status)
class SmppReceiverProtocol(SmppTransceiverProtocol):
bind_pdu = BindReceiver
class SmppTransmitterProtocol(SmppTransceiverProtocol):
bind_pdu = BindTransmitter
class SmppTransceiverClientFactory(EsmeTransceiverFactory):
protocol = SmppTransceiverProtocol
class SmppService(ReconnectingClientService):
def __init__(self, endpoint, factory):
ReconnectingClientService.__init__(self, endpoint, factory)
self.wait_on_protocol_deferreds = []
def clientConnected(self, protocol):
ReconnectingClientService.clientConnected(self, protocol)
while self.wait_on_protocol_deferreds:
deferred = self.wait_on_protocol_deferreds.pop()
deferred.callback(protocol)
def get_protocol(self):
if self._protocol is not None:
return succeed(self._protocol)
else:
d = Deferred()
self.wait_on_protocol_deferreds.append(d)
return d
def is_bound(self):
if self._protocol is not None:
return self._protocol.is_bound()
return False
def stopService(self):
if self._protocol is not None:
d = self._protocol.disconnect()
d.addCallback(
lambda _: ReconnectingClientService.stopService(self))
return d
return ReconnectingClientService.stopService(self)
class SmppMessageDataStash(object):
"""
Stash message data in Redis.
"""
def __init__(self, redis, config):
self.redis = redis
self.config = config
def init_multipart_info(self, message_id, part_count):
key = multipart_info_key(message_id)
expiry = self.config.third_party_id_expiry
d = self.redis.hmset(key, {
'parts': part_count,
})
d.addCallback(lambda _: self.redis.expire(key, expiry))
return d
def get_multipart_info(self, message_id):
key = multipart_info_key(message_id)
return self.redis.hgetall(key)
def _update_multipart_info_success_cb(self, mp_info, key, remote_id):
if not mp_info:
# No multipart data, so do nothing.
return
part_key = 'part:%s' % (remote_id,)
mp_info[part_key] = 'ack'
d = self.redis.hset(key, part_key, 'ack')
d.addCallback(lambda _: mp_info)
return d
def update_multipart_info_success(self, message_id, remote_id):
key = multipart_info_key(message_id)
d = self.get_multipart_info(message_id)
d.addCallback(self._update_multipart_info_success_cb, key, remote_id)
return d
def _update_multipart_info_failure_cb(self, mp_info, key, remote_id):
if not mp_info:
# No multipart data, so do nothing.
return
part_key = 'part:%s' % (remote_id,)
mp_info[part_key] = 'fail'
d = self.redis.hset(key, part_key, 'fail')
d.addCallback(lambda _: self.redis.hset(key, 'event_result', 'fail'))
d.addCallback(lambda _: mp_info)
return d
def update_multipart_info_failure(self, message_id, remote_id):
key = multipart_info_key(message_id)
d = self.get_multipart_info(message_id)
d.addCallback(self._update_multipart_info_failure_cb, key, remote_id)
return d
def _determine_multipart_event_cb(self, mp_info, message_id, event_type,
remote_id):
if not mp_info:
# We don't seem to have a multipart message, so just return the
# single-message data.
return (True, event_type, remote_id)
part_status_dict = dict(
(k[5:], v) for k, v in mp_info.items() if k.startswith('part:'))
remote_id = ','.join(sorted(part_status_dict.keys()))
event_result = mp_info.get('event_result', None)
if event_result is not None:
# We already have a result, even if we don't have all the parts.
event_type = event_result
elif len(part_status_dict) >= int(mp_info['parts']):
# We have all the parts, so we can determine the event type.
if all(pv == 'ack' for pv in part_status_dict.values()):
# All parts happy.
event_type = 'ack'
else:
|
b1-systems/kiwi
|
kiwi/utils/sync.py
|
Python
|
gpl-3.0
| 3,632
| 0
|
# Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public Licen
|
se
# along with kiwi. If not, see <http://www.gnu.org
|
/licenses/>
#
import os
import logging
from stat import ST_MODE
import xattr
# project
from kiwi.command import Command
log = logging.getLogger('kiwi')
class DataSync:
"""
**Sync data from a source directory to a target directory
using the rsync protocol**
:param str source_dir: source directory path name
:param str target_dir: target directory path name
"""
def __init__(self, source_dir, target_dir):
self.source_dir = source_dir
self.target_dir = target_dir
def sync_data(self, options=None, exclude=None):
"""
Sync data from source to target using rsync
:param list options: rsync options
:param list exclude: file patterns to exclude
"""
target_entry_permissions = None
exclude_options = []
rsync_options = []
if options:
rsync_options = options
if not self.target_supports_extended_attributes():
warn_me = False
if '-X' in rsync_options:
rsync_options.remove('-X')
warn_me = True
if '-A' in rsync_options:
rsync_options.remove('-A')
warn_me = True
if warn_me:
log.warning(
'Extended attributes not supported for target: %s',
self.target_dir
)
if exclude:
for item in exclude:
exclude_options.append('--exclude')
exclude_options.append(
'/' + item
)
if os.path.exists(self.target_dir):
target_entry_permissions = os.stat(self.target_dir)[ST_MODE]
Command.run(
['rsync'] + rsync_options + exclude_options + [
self.source_dir, self.target_dir
]
)
if target_entry_permissions:
# rsync applies the permissions of the source directory
# also to the target directory which is unwanted because
# only permissions of the files and directories from the
# source directory and its contents should be transfered
# but not from the source directory itself. Therefore
# the permission bits of the target directory before the
# sync are applied back after sync to ensure they have
# not changed
os.chmod(self.target_dir, target_entry_permissions)
def target_supports_extended_attributes(self):
"""
Check if the target directory supports extended filesystem
attributes
:return: True or False
:rtype: bool
"""
try:
xattr.getxattr(self.target_dir, 'user.mime_type')
except Exception as e:
if format(e).startswith('[Errno 95]'):
# libc interface [Errno 95] Operation not supported:
return False
return True
|
PressLabs/django-bnr
|
django_bnr/utils.py
|
Python
|
mit
| 1,519
| 0.000658
|
# -*- coding: utf-8 -*-
# vim: ft=python:sw=4:ts=4:sts=4:et:
import requests
import decimal
from datetime import timedelta
from xml.etree import ElementTree
from django_bnr.models import Rate
def get_bnr_rate(date, currency='USD'):
try:
rate = Rate.objects.get(date=date, currency=currency)
return rate.rate
except Rate.DoesNotExist:
d = date.strftime('%Y-%m-%d')
r = requests.get('https://www.bnr.ro/nbrfxrates10days.xml')
r.raise_for_status()
rate = None
days = 0
xpath_fmt = ("./{xsd}Body/{xsd}Cube[@date='{date}']/"
"{xsd}Rate[@currency='{currency}']")
while rate is None:
rate = ElementTree.fromstring(r.text).find(xpath_fmt.format(
xsd='{http://www.bnr.ro/xsd}',
date=d,
currency=currency
))
if rate is None:
days += 1
if days == 7:
|
raise RuntimeError('Cannot get exchange rate for '
'%(currency)s from %(date)s' % {
'currency': currency,
'date': date
})
d = (date - timedelta(days=days)).strftime('%Y-%m-%d')
rate = decimal.Decimal(rate.text)
try:
Rate.objects.create(date=date, currency=currency, rate=rate)
|
except:
pass
return rate
|
gyurisc/DigitsRest
|
use_archive.py
|
Python
|
mit
| 8,394
| 0.005957
|
#!/usr/bin/env python2
# Based on example.py from DIGITS
# https://github.com/NVIDIA/DIGITS
"""
Classify an image using individual model files imported from DIGITS
This tool is for testing your deployed model with images from the test folder.
All the configuration params are removed or commented out as compared to the original example.py
- Copy your digits trained model to the model folder
- Copy your test images to the images folder
"""
import argparse
import os
import time
from google.protobuf import text_format
import numpy as np
import PIL.Image
import scipy.misc
os.environ['GLOG_minloglevel'] = '2' # Suppress most caffe output
import caffe
from caffe.proto import caffe_pb2
def get_net(caffemodel, deploy_file, use_gpu=True):
"""
Returns an instance of caffe.Net
Arguments:
caffemodel -- path to a .caffemodel file
deploy_file -- path to a .prototxt file
Keyword arguments:
use_gpu -- if True, use the GPU for inference
"""
if use_gpu:
caffe.set_mode_gpu()
# load a new model
return caffe.Net(deploy_file, caffemodel, caffe.TEST)
def get_transformer(deploy_file, mean_file=None):
"""
Returns an instance of caffe.io.Transformer
Arguments:
deploy_file -- path to a .prototxt file
Keyword arguments:
mean_file -- path to a .binaryproto file (optional)
"""
network = caffe_pb2.NetParameter()
with open(deploy_file) as infile:
text_format.Merge(infile.read(), network)
if network.input_shape:
dims = network.input_shape[0].dim
else:
dims = network.input_dim[:4]
t = caffe.io.Transformer(
inputs = {'data': dims}
)
t.set_transpose('data', (2,0,1)) # transpose to (channels, height, width)
# color images
if dims[1] == 3:
# channel swap
t.set_channel_swap('data', (2,1,0))
if mean_file:
# set mean pixel
with open(mean_file,'rb') as infile:
blob = caffe_pb2.BlobProto()
blob.MergeFromString(infile.read())
if blob.HasField('shape'):
blob_dims = blob.shape
assert len(blob_dims) == 4, 'Shape should have 4 dimensions - shape is "%s"' % blob.shape
elif blob.HasField('num') and blob.HasField('channels') and \
blob.HasField('height') and blob.HasField('width'):
blob_dims = (blob.num, blob.channels, blob.height, blob.width)
else:
raise ValueError('blob does not provide shape or 4d dimensions')
pixel = np.reshape(blob.data, blob_dims[1:]).mean(1).mean(1)
t.set_mean('data', pixel)
return t
def load_image(path, height, width, mode='RGB'):
"""
Load an image from disk
Returns an np.ndarray (channels x width x height)
Arguments:
path -- path to an image on disk
width -- resize dimension
height -- resize dimension
Keyword arguments:
mode -- the PIL mode that the image should be converted to
(RGB for color or L for grayscale)
"""
image = PIL.Image.open(path)
image = image.convert(mode)
image = np.array(image)
# squash
image = scipy.misc.imresize(image, (height, width), 'bilinear')
return image
def forward_pass(images, net, transformer, batch_size=None):
"""
Returns scores for each image as an np.ndarray (nImages x nClasses)
Arguments:
images -- a list of np.ndarrays
net -- a caffe.Net
transformer -- a caffe.io.Transformer
Keyword arguments:
batch_size -- how many images can be processed at once
(a high value may result in out-of-memory errors)
"""
if batch_size is None:
batch_size = 1
caffe_images = []
for image in images:
if image.ndim == 2:
caffe_images.append(image[:,:,np.newaxis])
else:
caffe_images.append(image)
caffe_images = np.array(caffe_images)
dims = transformer.inputs['data'][1:]
scores = None
for chunk in [caffe_images[x:x+batch_size] for x in xrange(0, len(caffe_images), batch_size)]:
new_shape = (len(chunk),) + tuple(dims)
if net.blobs['data'].data.shape != new_shape:
net.blobs['data'].reshape(*new_shape)
for index, image in enumerate(chunk):
image_data = transformer.preprocess('data', image)
net.blobs['data'].data[index] = image_data
output = net.forward()[net.outputs[-1]]
if scores is None:
scores = np.copy(output)
else:
scores = np.vstack((scores, output))
return scores
def read_labels(labels_file):
"""
Returns a list of strings
Arguments:
labels_file -- path to a .txt file
"""
if not labels_file:
print 'WARNING: No labels file provided. Results will be difficult to interpret.'
return None
labels = []
with open(labels_file) as infile:
for line in infile:
label = line.strip()
if label:
labels.append(label)
assert len(labels), 'No labels found'
return labels
def classify(caffemodel, deploy_file, image_files,
mean_file=None, labels_file=None, batch_size=None, use_gpu=True):
"""
Classify some images against a Caffe model and print the results
Arguments:
caffemodel -- path to a .caffemodel
deploy_file -- path to a .prototxt
image_files -- list of paths to images
Keyword arguments:
mean_file -- path to a .binaryproto
labels_file path to a .txt file
use_gpu -- if True, run inference on the GPU
"""
# Load the model and images
net = get_net(caffemodel, deploy_file, use_gpu)
transformer = get_transformer(deploy_file, mean_file)
_, channels, height, width = transformer.inputs['data']
if channels == 3:
mode = 'RGB'
elif channels == 1:
mode = 'L'
else:
raise ValueError('Invalid number for channels: %s' % channels)
images = [load_image(image_file, height, width, mode) for image_file in image_files]
labels = read_labels(labels_file)
# Classify the image
classify_start_time = time.time()
scores = forward_pass(images, net, transformer, batch_size=batch_size)
print 'Classification took %s seconds.' % (time.time() - classify_start_time,)
### Process the results
indices = (-scores).argsort()[:, :5] # take top 5 results
classifications = []
for image_index, index_list in enumerate(indices):
result = []
for i in index_list:
# 'i' is a category in labels and also an index into scores
if labels is None:
label = 'Class #%s' % i
else:
label = labels[i]
result.append((label, round(100.0*scores[image_index, i],4)))
classifications.append(result)
return classifications
def print_classification_results(results, image_files):
for index, classification in enumerate(results):
print '{:-^80}'.format(' Prediction for %s ' % image_files[index])
for label, confidence in classification:
print '{:9.4%} - "{}"'.format(confidence/100.0, label)
print
if __name__ == '__main__':
script_start_time = time.time()
parser = argparse.ArgumentParser(description='Classification exa
|
mple - DIGITS')
### Positional arguments
parser.add_argument('caffemodel', help='Pa
|
th to a .caffemodel')
parser.add_argument('deploy_file', help='Path to the deploy file')
parser.add_argument('image_file',
nargs='+',
help='Path[s] to an image')
### Optional arguments
parser.add_argument('-m', '--mean',
help='Path to a mean file (*.npy)')
parser.add_argument('-l', '--labels',
help='Path to a labels file')
parser.add_argument('--batch-size',
type=int)
parser.add_argument('--nogpu',
action='store_true',
help="Don't use the GPU")
args = vars(parser.parse_args())
results = classify(args['caffemodel'], args['deploy_file'], args['image
|
kidmaple/CoolWall
|
user/python/Lib/xml/dom/minidom.py
|
Python
|
gpl-2.0
| 14,813
| 0.004186
|
"""\
minidom.py -- a lightweight DOM implementation based on SAX.
parse( "foo.xml" )
parseString( "<foo><bar/></foo>" )
Todo:
=====
* convenience methods for getting elements and text.
* more testing
* bring some of the writer and linearizer code into conformance with this
interface
* SAX 2 namespaces
"""
import pulldom
import string
from StringIO import StringIO
import types
class Node:
ELEMENT_NODE = 1
ATTRIBUTE_NODE = 2
TEXT_NODE = 3
CDATA_SECTION_NODE = 4
ENTITY_REFERENCE_NODE = 5
ENTITY_NODE = 6
PROCESSING_INSTRUCTION_NODE = 7
COMMENT_NODE = 8
DOCUMENT_NODE = 9
DOCUMENT_TYPE_NODE = 10
DOCUMENT_FRAGMENT_NODE = 11
NOTATION_NODE = 12
allnodes = {}
_debug = 0
_makeParentNodes = 1
debug = None
def __init__(self):
self.childNodes = []
if Node._debug:
index = repr(id(self)) + repr(self.__class__)
Node.allnodes[index] = repr(self.__dict__)
if Node.debug is None:
Node.debug = StringIO()
#open( "debug4.out", "w" )
Node.debug.write("create %s\n" % index)
def __getattr__(self, key):
if key[0:2] == "__":
raise AttributeError
# getattr should never call getattr!
if self.__dict__.has_key("inGetAttr"):
del self.inGetAttr
raise AttributeError, key
prefix, attrname = ke
|
y[:5], key[5:]
if prefix == "_get_":
self.inGetAttr = 1
if hasattr(self, attrname):
del self.inGetAttr
return (lambda self=self, attrname=attrname:
getattr(self, attrname))
else:
del self.inGetAttr
raise AttributeError, key
else:
self.inGetAttr = 1
try:
func = getattr(self, "_get_" + key)
exc
|
ept AttributeError:
raise AttributeError, key
del self.inGetAttr
return func()
def __nonzero__(self):
return 1
def toxml(self):
writer = StringIO()
self.writexml(writer)
return writer.getvalue()
def hasChildNodes(self):
if self.childNodes:
return 1
else:
return 0
def _get_firstChild(self):
return self.childNodes[0]
def _get_lastChild(self):
return self.childNodes[-1]
def insertBefore(self, newChild, refChild):
index = self.childNodes.index(refChild)
self.childNodes.insert(index, newChild)
if self._makeParentNodes:
newChild.parentNode = self
def appendChild(self, node):
if self.childNodes:
last = self.lastChild
node.previousSibling = last
last.nextSibling = node
else:
node.previousSibling = None
node.nextSibling = None
self.childNodes.append(node)
return node
def replaceChild(self, newChild, oldChild):
index = self.childNodes.index(oldChild)
self.childNodes[index] = oldChild
def removeChild(self, oldChild):
index = self.childNodes.index(oldChild)
del self.childNodes[index]
def cloneNode(self, deep):
import new
clone = new.instance(self.__class__, self.__dict__)
clone.attributes = self.attributes.copy()
if not deep:
clone.childNodes = []
else:
clone.childNodes = map(lambda x: x.cloneNode, self.childNodes)
return clone
def unlink(self):
self.parentNode = None
while self.childNodes:
self.childNodes[-1].unlink()
del self.childNodes[-1] # probably not most efficient!
self.childNodes = None
self.previousSibling = None
self.nextSibling = None
if self.attributes:
for attr in self._attrs.values():
self.removeAttributeNode(attr)
assert not len(self._attrs)
assert not len(self._attrsNS)
if Node._debug:
index = repr(id(self)) + repr(self.__class__)
self.debug.write("Deleting: %s\n" % index)
del Node.allnodes[index]
def _write_data(writer, data):
"Writes datachars to writer."
data = string.replace(data, "&", "&")
data = string.replace(data, "<", "<")
data = string.replace(data, "\"", """)
data = string.replace(data, ">", ">")
writer.write(data)
def _getElementsByTagNameHelper(parent, name, rc):
for node in parent.childNodes:
if node.nodeType == Node.ELEMENT_NODE and \
(name == "*" or node.tagName == name):
rc.append(node)
_getElementsByTagNameHelper(node, name, rc)
return rc
def _getElementsByTagNameNSHelper(parent, nsURI, localName, rc):
for node in parent.childNodes:
if node.nodeType == Node.ELEMENT_NODE:
if ((localName == "*" or node.tagName == localName) and
(nsURI == "*" or node.namespaceURI == nsURI)):
rc.append(node)
_getElementsByTagNameNSHelper(node, name, rc)
class Attr(Node):
nodeType = Node.ATTRIBUTE_NODE
def __init__(self, qName, namespaceURI="", localName=None, prefix=None):
# skip setattr for performance
self.__dict__["localName"] = localName or qName
self.__dict__["nodeName"] = self.__dict__["name"] = qName
self.__dict__["namespaceURI"] = namespaceURI
self.__dict__["prefix"] = prefix
self.attributes = None
Node.__init__(self)
# nodeValue and value are set elsewhere
def __setattr__(self, name, value):
if name in ("value", "nodeValue"):
self.__dict__["value"] = self.__dict__["nodeValue"] = value
else:
self.__dict__[name] = value
class AttributeList:
"""the attribute list is a transient interface to the underlying
dictionaries. mutations here will change the underlying element's
dictionary"""
def __init__(self, attrs, attrsNS):
self._attrs = attrs
self._attrsNS = attrsNS
self.length = len(self._attrs.keys())
def item(self, index):
try:
return self[self.keys()[index]]
except IndexError:
return None
def items(self):
return map(lambda node: (node.tagName, node.value),
self._attrs.values())
def itemsNS(self):
return map(lambda node: ((node.URI, node.localName), node.value),
self._attrs.values())
def keys(self):
return self._attrs.keys()
def keysNS(self):
return self._attrsNS.keys()
def values(self):
return self._attrs.values()
def __len__(self):
return self.length
def __cmp__(self, other):
if self._attrs is getattr(other, "_attrs", None):
return 0
else:
return cmp(id(self), id(other))
#FIXME: is it appropriate to return .value?
def __getitem__(self, attname_or_tuple):
if type(attname_or_tuple) is types.TupleType:
return self._attrsNS[attname_or_tuple]
else:
return self._attrs[attname_or_tuple]
# same as set
def __setitem__(self, attname, value):
if type(value) is types.StringType:
node = Attr(attname)
node.value=value
else:
assert isinstance(value, Attr) or type(value) is types.StringType
node = value
old = self._attrs.get(attname, None)
if old:
old.unlink()
self._attrs[node.name] = node
self._attrsNS[(node.namespaceURI, node.localName)] = node
def __delitem__(self, attname_or_tuple):
node = self[attname_or_tuple]
node.unlink()
del self._attrs[node.name]
del self._attrsNS[(node.namespaceURI, node.localName)]
class Element(Node):
nodeType = Node.ELEMENT_NODE
def __init__(self, tagName, namespaceURI="", prefix=""
|
ToontownUprising/src
|
toontown/building/DistributedBossElevator.py
|
Python
|
mit
| 3,644
| 0.004665
|
from pandac.PandaModules import *
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from ElevatorConstants import *
from ElevatorUtils import *
import DistributedElevator
import DistributedElevatorExt
from toontown.toonbase import ToontownGlobals
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import ClassicFSM
from direct.fsm import State
from toontown.hood import ZoneUtil
from toontown.toonbase import TTLocalizer
from toontown.toontowngui import TTDialog
class DistributedBossElevator(DistributedElevatorExt.DistributedElevatorExt):
def __init__(self, cr):
DistributedElevatorExt.DistributedElevatorExt.__init__(self, cr)
self.elevatorPoints = BigElevatorPoints
self.openSfx = base.loadSfx('phase_9/audio/sfx/CHQ_FACT_door_open_sliding.ogg')
self.finalOpenSfx = base.loadSfx('phase_9/audio/sfx/CHQ_FACT_door_open_final.ogg')
self.closeSfx = base.loadSfx('phase_9/audio/sfx/CHQ_FACT_door_open_sliding.ogg')
self.finalCloseSfx = base.loadSfx('phase_9/audio/sfx/CHQ_FACT_door_open_final.ogg')
self.type = ELEVATOR_VP
self.countdownTime = ElevatorData[self.type]['countdown']
def disable(self):
DistributedElevator.DistributedElevator.disable(self)
def generate(self):
DistributedElevatorExt.DistributedElevatorExt.generate(self)
def delete(self):
self.elevatorModel.removeNode()
del self.elevatorModel
DistributedElevatorExt.DistributedElevatorExt.delete(self)
def setupElevator(self):
self.elevatorModel = loader.loadModel('phase_9/models/cogHQ/cogHQ_elevator')
icon = self.elevatorModel.find('**/big_frame/')
icon.hide()
self.leftDoor = self.elevatorModel.find('**/left-door')
self.rightDoor = self.elevatorModel.find('**/right-door')
geom = base.cr.playGame.hood.loader.geom
locator = geom.find('**/elevator_locator')
self.elevatorModel.reparentTo(locator)
self.elevatorModel.setH(180)
DistributedElevator.DistributedElevator.setupElevator(self)
def getElevatorModel(self):
return self.elevatorModel
def gotBldg(self, buildingList):
return DistributedElevator.DistributedElevator.gotBldg(self, buildingList)
def getZoneId(self):
return 0
def __doorsClosed(self, zoneId):
pass
def
|
setBossOfficeZone(self, zoneId):
if self.localToonOnBoard:
hoodId = self.cr.playGame.hood.hoodId
doneStatus = {'loader': 'cogHQLoader',
'where': 'cogHQBossBattle',
'how': 'movie',
|
'zoneId': zoneId,
'hoodId': hoodId}
self.cr.playGame.getPlace().elevator.signalDone(doneStatus)
def setBossOfficeZoneForce(self, zoneId):
place = self.cr.playGame.getPlace()
if place:
place.fsm.request('elevator', [self, 1])
hoodId = self.cr.playGame.hood.hoodId
doneStatus = {'loader': 'cogHQLoader',
'where': 'cogHQBossBattle',
'how': 'movie',
'zoneId': zoneId,
'hoodId': hoodId}
if hasattr(place, 'elevator') and place.elevator:
place.elevator.signalDone(doneStatus)
else:
self.notify.warning("setMintInteriorZoneForce: Couldn't find playGame.getPlace().elevator, zoneId: %s" % zoneId)
else:
self.notify.warning("setBossOfficeZoneForce: Couldn't find playGame.getPlace(), zoneId: %s" % zoneId)
def getDestName(self):
return TTLocalizer.ElevatorSellBotBoss
|
alibaba/FlexGW
|
website/api/__init__.py
|
Python
|
bsd-3-clause
| 92
| 0
|
# -*
|
- coding: utf-8 -*-
"""
website.api
~~~~~~~~~~~
website api bluepri
|
nt.
"""
|
olivierlemasle/keystone-playground
|
keystoneplayground/tests/base.py
|
Python
|
apache-2.0
| 661
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with
|
the License. You may obtain
# a copy of the License at
#
# http:
|
//www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslotest import base
class TestCase(base.BaseTestCase):
"""Test case base class for all unit tests."""
|
staffanm/layeredconfig
|
tests/test_withFuture.py
|
Python
|
bsd-3-clause
| 984
| 0.005081
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
try:
from builtins import *
from future import standard_library
standard_library.install_aliases()
except:
# we might be on py3.2, which the future library doesn't support
pass
import os
import sys
if sys.version_info < (2, 7, 0): # pragma: no cover
import unittest2 as unittest
else:
import unittest
from layeredconfig import LayeredConfig, Defaults, Environment, INIFile
@unittest.skipIf (sys.version_info[0] == 3 and sys.version_info[1] < 3,
"Python 3.2 and lower doesn't support the future module")
class TestFuture(unittest.TestCase):
def test_newint(self):
os.environ['FERENDA_DOWNLOADMAX'] = '3'
config = LayeredConfig(Defaults({'downloadmax'
|
: int}),
Environment(prefix="FERENDA_"))
self.ass
|
ertEqual(3, config.downloadmax)
self.assertIsInstance(config.downloadmax, int)
|
guduchango/pyafipws
|
wsbfev1.py
|
Python
|
gpl-3.0
| 25,642
| 0.006318
|
#!/usr/bin/python
# -*- coding: latin-1 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
"""Módulo para obten
|
er código de autorización electrónico (CAE) del web service
WSBFEv1 de AFIP (Bonos Fiscales electronicos v1.1 - Factu
|
ra Electrónica RG)
a fin de gestionar los Bonos en la Secretaría de Industria según RG 2557
"""
__author__ = "Mariano Reingart (reingart@gmail.com)"
__copyright__ = "Copyright (C) 2013-2015 Mariano Reingart"
__license__ = "GPL 3.0"
__version__ = "1.06c"
import datetime
import decimal
import os
import sys
from utils import inicializar_y_capturar_excepciones, BaseWS, get_install_dir
HOMO = False
LANZAR_EXCEPCIONES = True # valor por defecto: True
WSDL="https://wswhomo.afip.gov.ar/wsbfev1/service.asmx?WSDL"
class WSBFEv1(BaseWS):
"Interfaz para el WebService de Bono Fiscal Electrónico V1 (FE Bs. Capital)"
_public_methods_ = ['CrearFactura', 'AgregarItem', 'Authorize', 'GetCMP',
'GetParamMon', 'GetParamTipoCbte', 'GetParamUMed',
'GetParamTipoIVA', 'GetParamNCM', 'GetParamZonas',
'Dummy', 'Conectar', 'GetLastCMP', 'GetLastID',
'GetParamCtz', 'LoadTestXML',
'AnalizarXml', 'ObtenerTagXml', 'DebugLog',
'SetParametros', 'SetTicketAcceso', 'GetParametro',
'Dummy', 'Conectar', 'SetTicketAcceso']
_public_attrs_ = ['Token', 'Sign', 'Cuit',
'AppServerStatus', 'DbServerStatus', 'AuthServerStatus',
'XmlRequest', 'XmlResponse', 'Version',
'Resultado', 'Obs', 'Reproceso', 'FechaCAE',
'CAE','Vencimiento', 'Eventos', 'ErrCode', 'ErrMsg', 'FchVencCAE',
'Excepcion', 'LanzarExcepciones', 'Traceback', "InstallDir",
'PuntoVenta', 'CbteNro', 'FechaCbte', 'ImpTotal', 'ImpNeto', 'ImptoLiq',
]
_reg_progid_ = "WSBFEv1"
_reg_clsid_ = "{EE4ABEE2-76DD-450F-880B-66710AE464D6}"
# Variables globales para BaseWS:
HOMO = HOMO
WSDL = WSDL
Version = "%s %s" % (__version__, HOMO and 'Homologación' or '')
factura = None
def inicializar(self):
BaseWS.inicializar(self)
self.AppServerStatus = self.DbServerStatus = self.AuthServerStatus = None
self.Resultado = self.Motivo = self.Reproceso = ''
self.LastID = self.LastCMP = self.CAE = self.Vencimiento = ''
self.CbteNro = self.FechaCbte = self.PuntoVenta = self.ImpTotal = None
self.ImpNeto = self.ImptoLiq = None
self.LanzarExcepciones = LANZAR_EXCEPCIONES
self.InstallDir = INSTALL_DIR
self.FechaCAE = self.FchVencCAE = "" # retrocompatibilidad
def __analizar_errores(self, ret):
"Comprueba y extrae errores si existen en la respuesta XML"
if 'BFEErr' in ret:
errores = [ret['BFEErr']]
for error in errores:
self.Errores.append("%s: %s" % (
error['ErrCode'],
error['ErrMsg'],
))
self.ErrCode = ' '.join([str(error['ErrCode']) for error in errores])
self.ErrMsg = '\n'.join(self.Errores)
if 'BFEEvents' in ret:
events = [ret['BFEEvents']]
self.Eventos = ['%s: %s' % (evt['EventCode'], evt.get('EventMsg',"")) for evt in events]
def CrearFactura(self, tipo_doc=80, nro_doc=23111111113,
zona=0, tipo_cbte=1, punto_vta=1, cbte_nro=0, fecha_cbte=None,
imp_total=0.0, imp_neto=0.0, impto_liq=0.0,
imp_tot_conc=0.0, impto_liq_rni=0.00, imp_op_ex=0.00,
imp_perc=0.00, imp_iibb=0.00, imp_perc_mun=0.00, imp_internos=0.00,
imp_moneda_id=0, imp_moneda_ctz=1.0, **kwargs):
"Creo un objeto factura (interna)"
# Creo una factura para bonos fiscales electrónicos
fact = {'tipo_cbte': tipo_cbte, 'punto_vta': punto_vta,
'cbte_nro': cbte_nro, 'fecha_cbte': fecha_cbte, 'zona': zona,
'tipo_doc': tipo_doc, 'nro_doc': nro_doc,
'imp_total': imp_total, 'imp_neto': imp_neto,
'impto_liq': impto_liq, 'impto_liq_rni': impto_liq_rni,
'imp_op_ex': imp_op_ex, 'imp_tot_conc': imp_tot_conc,
'imp_perc': imp_perc, 'imp_perc_mun': imp_perc_mun,
'imp_iibb': imp_iibb, 'imp_internos': imp_internos,
'imp_moneda_id': imp_moneda_id, 'imp_moneda_ctz': imp_moneda_ctz,
'cbtes_asoc': [],
'iva': [],
'detalles': [],
}
self.factura = fact
return True
def AgregarItem(self, ncm, sec, ds, qty, umed, precio, bonif, iva_id, imp_total, **kwargs):
"Agrego un item a una factura (interna)"
##ds = unicode(ds, "latin1") # convierto a latin1
# Nota: no se calcula neto, iva, etc (deben venir calculados!)
self.factura['detalles'].append({
'ncm': ncm, 'sec': sec,
'ds': ds,
'qty': qty,
'umed': umed,
'precio': precio,
'bonif': bonif,
'iva_id': iva_id,
'imp_total': imp_total,
})
return True
@inicializar_y_capturar_excepciones
def Authorize(self, id):
"Autoriza la factura cargada en memoria"
f = self.factura
ret = self.client.BFEAuthorize(
Auth={'Token': self.Token, 'Sign': self.Sign, 'Cuit': self.Cuit},
Cmp={
'Id': id,
'Zona': f['zona'],
'Fecha_cbte': f['fecha_cbte'],
'Tipo_cbte': f['tipo_cbte'],
'Punto_vta': f['punto_vta'],
'Cbte_nro': f['cbte_nro'],
'Tipo_doc': f['tipo_doc'], 'Nro_doc': f['nro_doc'],
'Imp_moneda_Id': f['imp_moneda_id'],
'Imp_moneda_ctz': f['imp_moneda_ctz'],
'Imp_total': f['imp_total'],
'Imp_tot_conc': f['imp_tot_conc'], 'Imp_op_ex': f['imp_op_ex'],
'Imp_neto': f['imp_neto'], 'Impto_liq': f['impto_liq'],
'Impto_liq_rni': f['impto_liq_rni'],
'Imp_perc': f['imp_perc'], 'Imp_perc_mun': f['imp_perc_mun'],
'Imp_iibb': f['imp_iibb'],
'Imp_internos': f['imp_internos'],
'Items': [
{'Item': {
'Pro_codigo_ncm': d['ncm'],
'Pro_codigo_sec': d['sec'],
'Pro_ds': d['ds'],
'Pro_qty': d['qty'],
'Pro_umed': d['umed'],
'Pro_precio_uni': d['precio'],
'Imp_bonif': d['bonif'],
'Imp_total': d['imp_total'],
'Iva_id': d['iva_id'],
}} for d in f['detalles']],
})
result = ret['BFEAuthorizeResult']
self.__analizar_errores(result)
if 'BFEResultAuth' in result:
auth = result['BFEResultAuth']
# Resultado: A: Aceptado, R: Rechazado
self.Resultado = auth.get('Resultado', "")
# Obs:
self.Obs = auth.get('Obs', "")
self.Reproceso = auth.get('Reproceso', "")
self.CAE = auth.get('Cae', "")
self.CbteNro = auth.get('Fch_cbte', "")
self.ImpTotal = str(auth.get('Imp_total', ''))
self.ImptoLiq = s
|
YJoe/SpaceShips
|
Desktop/Python/space_scroll/Game.py
|
Python
|
gpl-3.0
| 11,279
| 0.002394
|
from Start_up import*
from Player import Player, HealthBar
from Bullet import Bullet
from Enemy import Enemy
from Stopper import Stopper
from Particle import Particle
from Stars import Star, create_stars
from Package import EnemyDrop, HealthPack
from Notes import NoteController
class Game:
def __init__(self, play, health_bar):
self.own_state = game_state
self.next_state = self.own_state
self.player = play
self.enemy_id_tracker = 0
self.left_stop = Stopper((-30, 0), True, False)
self.right_stop = Stopper((width, 0), False, True)
self.bullet_list = []
self.enemy_list = []
self.kill_list = []
self.particle_list = []
self.package_list = []
self.to_update = []
self.to_display = []
self.to_text = []
self.star_list = create_stars("game")
self.info_bar = pygame.Surface((width, 30))
self.info_bar.fill(main_theme)
self.info_bar.set_alpha(100)
self.health_bar = health_bar
self.note_controller = NoteController((width - 10, 40))
def reset(self, play):
self.own_state = game_state
self.next_state = self.own_state
self.player = play
self.health_bar = HealthBar(self.player)
self.enemy_id_tracker = 0
self.bullet_list = []
self.enemy_list = []
self.kill_list = []
self.particle_list = []
self.package_list = []
self.to_update = []
self.to_display = []
self.to_text = []
self.star_list = create_stars("game")
self.note_controller = NoteController((width - 10, 40))
def update_all(self):
# a check for all update elements, providing the
# relevant information for the objects update
for x in range(0, len(self.to_update)):
if isinstance(self.to_update[x], Particle):
self.to_update[x].update()
elif isinstance(self.to_update[x], Star):
self.to_update[x].update()
elif isinstance(self.to_update[x], Enemy):
self.to_update[x].update(self.bullet_list)
elif isinstance(self.to_update[x], Player):
self.to_update[x].update(self.package_list, self.note_controller, self.bullet_list, self.health_bar)
elif isinstance(self.to_update[x], Bullet):
self.to_update[x].update()
elif isinstance(self.to_update[x], EnemyDrop):
self.to_update[x].update()
elif isinstance(self.to_update[x], HealthPack):
self.to_update[x].update()
elif isinstance(self.to_update[x], NoteController):
self.to_update[x].update()
elif isinstance
|
(self.to_update[x], Stopper):
self.to_update[x].update(self.bullet_list, self.enemy_list, self.player, self.note_controller)
elif isinstance(self.to_update[x], HealthBar):
self.to_update[x].update()
def display_all(
|
self):
# fill screen with black and display all game information
main_s.fill((20, 20, 20))
for x in range(0, len(self.to_display)):
if isinstance(self.to_display[x], Player):
if self.to_display[x].alive:
self.to_display[x].display()
else:
self.to_display[x].display()
main_s.blit(self.info_bar, (0, 0))
main_s.blit(font.render("ESC TO PAUSE", True, (255, 255, 255)), (width - 115, 5))
def text_all(self):
# display all text needed at the top of the screen
total_length = 0
for x in range(0, len(self.to_text)):
main_s.blit(font.render(str(self.to_text[x]), True, (255, 255, 255)), (5 + (15 * total_length), 5))
total_length += len(self.to_text[x])
def hit_particles(self, rect_hit, colour):
# create particles with random speeds, directions and sizes
numbers_z = range(-10, 10)
numbers_nz = range(-10, -1) + range(1, 10)
for x in range(0, settings.loaded_enemy_particles):
x_temp = random.choice(numbers_z)
y_temp = random.choice(numbers_z)
dy = y_temp
dx = x_temp
# make sure that dx and dy are not both 0 so that there
# are no particles static on the screen
if x_temp == 0 and y_temp != 0:
dy = y_temp
dx = x_temp
if y_temp == 0 and x_temp != 0:
dy = y_temp
dx = x_temp
if x_temp == y_temp == 0:
dy = random.choice(numbers_nz)
dx = random.choice(numbers_nz)
particle = Particle(random.randint(1, 3), (dx, dy), rect_hit, colour)
self.particle_list.append(particle)
def remove_particles(self):
# remove particles that are no longer colliding with the screen
# removed from the end first so that the list does not effect
# later elements to remove
for x in range(0, len(self.particle_list)):
try:
if not pygame.sprite.collide_rect(screen_rect, self.particle_list[len(self.particle_list) - x - 1]):
del self.particle_list[len(self.particle_list) - x - 1]
except:
# break in case [len(p_list) - x - 1] is out of range
break
def remove_stars(self):
# remove stars that are no longer colliding with the screen
# removed from the end first so that the list does not effect
# later elements to remove
for x in range(0, len(self.star_list)):
try:
if not pygame.sprite.collide_rect(screen_rect, self.star_list[len(self.star_list) - x - 1]):
del self.star_list[len(self.star_list) - x - 1]
except:
# break in case [len(p_list) - x - 1] is out of range
break
def remove_packages(self):
print(len(self.package_list))
for i in range(0, len(self.package_list)):
try:
if not pygame.sprite.collide_rect(screen_rect, self.package_list[len(self.package_list) - i - 1]):
del self.package_list[len(self.package_list) - i - 1]
except IndexError:
# break in case [len(p_list) - x - 1] is out of range
break
def check_enemy_alive(self):
# add enemies to a removal list if they are dead
for x in range(0, len(self.enemy_list)):
if self.enemy_list[x].dead:
self.kill_list.append(self.enemy_list[x])
def kill_enemies(self):
# remove enemies from enemy list that are on the kill list
# create a package and give the player the coins dropped
# create particles originating from the now dead enemy
# create a notification for the user saying they have found money
for x in range(0, len(self.kill_list)):
for y in range(0, len(self.enemy_list)):
try:
if self.kill_list[len(self.kill_list) - x - 1].id == self.enemy_list[len(self.enemy_list) - y - 1].id:
del self.kill_list[len(self.kill_list) - x - 1]
self.note_controller.add_note("+ " + str(self.enemy_list[len(self.enemy_list) - y - 1].money * self.player.money_collection) + " coins", main_theme)
self.player.get_coins(self.enemy_list[len(self.enemy_list) - y - 1].money)
self.hit_particles(self.enemy_list[len(self.enemy_list) - y - 1].rect, white)
self.random_enemy_drop(self.enemy_list[len(self.enemy_list) - y - 1].dx,
self.enemy_list[len(self.enemy_list) - y - 1].rect.center)
del self.enemy_list[len(self.enemy_list) - y - 1]
break
except:
break
def random_event_enemy(self):
# create an enemy if the random variable is 1
if random.randint(1, settings.loaded_enemy_chance) == 1:
enemy = E
|
bslatkin/8-bits
|
appengine-ndb/mttest.py
|
Python
|
apache-2.0
| 947
| 0.019007
|
"""A torture test to ferret out problems with multi-threading."""
import sys
import threading
from ndb import tasklets
from ndb import eventloop
def main():
##sys.stdout.write('_State.__bases__ = %r\n' % (eventloop._State.__bases__,))
num = 10
try:
num = int(sys.argv[1])
except Exception:
pass
threads = []
for i in range(num):
t = threading.Thread(target=one_thread, args=(i, num,))
t.start()
threads.append(t)
for t in threads:
t.join()
@tasklets.toplevel
def one_thread(i, num):
##sys.stdout.write('eventloop = 0x%x\n' % id(eventloop.get_event_loop()))
x = yield fibonacci(num)
sys.stdout.write('%d: %d --> %d\n' % (i, num, x))
@tasklets.tasklet
def fibonacci(n):
"""A recursive Fibonacci to exercise task switching."""
if n <= 1:
raise tasklets.Return(n)
a = yield fibonacci(n - 1)
b = yield fibonacci(n - 2)
raise tasklets.Return(a + b
|
)
|
if __name__ == '__main__':
main()
|
calvinmetcalf/arcsqlite
|
wkb.py
|
Python
|
mit
| 3,451
| 0.031585
|
from struct import pack
from sqlite3 import Binary
def pts(c):
return ["dd",[c.X,c.Y]]
def pt4mp(c):
return ["Bidd",[1,1,c.X,c.Y]]
def mp(coordinates):
partCount=coordinates.partCount
i=0
out = ["I",[0]]
while i<partCount:
pt = coordinates.getPart(i)
[ptrn,c]=pt4mp(pt)
out[0]+=ptrn
out[1][0]+=1
out[1].extend(c)
i+=1
return out
def lineSt(coordinates):
partCount=coordinates.count
i=0
out = ["I",[0]]
while i<partCount:
pt = coordinates[i]
[ptrn,c]=pts(pt)
out[0]+=ptrn
out[1][0]+=1
out[1].extend(c)
i+=1
return out
def multiLine(coordinates):
partCount=coordinates.partCount
i=0
out = ["I",[0]]
while i<partCount:
part = coordinates.getPart(i)
[ptrn,c]=lineSt(part)
out[0]+="BI"
out[0]+=ptrn
out[1][0]+=1
out[1].extend([1,2])
out[1].extend(c)
i+=1
|
return out
def linearRing(coordinates):
partCount=coordinates.count
i=0
values =[0]
outnum = "I"
out = ["I",[0]]
while i<partCount:
pt = coordinates[i]
if pt:
[ptrn,c]=pts(pt)
|
outnum+=ptrn
values[0]+=1
values.extend(c)
else:
if values[0]<4:
return False
out[0]+=outnum
out[1][0]+=1
out[1].extend(values)
values =[0]
outnum = "I"
i+=1
if values[0]<4:
return False
out[0]+=outnum
out[1][0]+=1
out[1].extend(values)
return out
def multiRing(coordinates):
partCount=coordinates.partCount
i=0
out = ["I",[0]]
while i<partCount:
part = coordinates.getPart(i)
[ptrn,c]=linearRing(part)
out[0]+="BI"
out[0]+=ptrn
out[1][0]+=1
out[1].extend([1,3])
out[1].extend(c)
i+=1
return out
return out
def makePoint(c):
values = ["<BI",1,1]
[ptrn,coords] = pts(c.getPart(0))
values[0]+=ptrn
values.extend(coords)
return Binary(pack(*values))
def makeMultiPoint(c):
values = ["<BI",1,4]
[ptrn,coords]=mp(c)
values[0]+=ptrn
values.extend(coords)
return Binary(pack(*values))
def makeMultiLineString(c):
if c.partCount==1:
values = ["<BI",1,2]
[ptrn,coords]=lineSt(c.getPart(0))
elif c.partCount>1:
values = ["<BI",1,5]
[ptrn,coords]=multiLine(c)
else:
return False
values[0]+=ptrn
values.extend(coords)
return Binary(pack(*values))
def makeMultiPolygon(c):
if c.partCount==1:
values = ["<BI",1,3]
[ptrn,coords]=linearRing(c.getPart(0))
elif c.partCount>1:
values = ["<BI",1,6]
[ptrn,coords]=multiRing(c)
else:
return False
values[0]+=ptrn
values.extend(coords)
return Binary(pack(*values))
def getWKBFunc(type,field):
if type == "point":
return lambda row:makePoint(row.getValue(field))
elif type == "multipoint":
return lambda row: makeMultiPoint(row.getValue(field))
elif type == "polyline":
return lambda row: makeMultiLineString(row.getValue(field))
elif type == "polygon":
return lambda row: makeMultiPolygon(row.getValue(field))
|
pombredanne/project_index
|
index/migrations/0005_project_related_project.py
|
Python
|
gpl-3.0
| 471
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migrat
|
ion):
dependencies = [
('index', '0004_repository_public'),
]
operations =
|
[
migrations.AddField(
model_name='project',
name='related_project',
field=models.ForeignKey(blank=True, to='index.Project', null=True),
preserve_default=True,
),
]
|
onitake/Uranium
|
UM/Qt/QtApplication.py
|
Python
|
agpl-3.0
| 13,585
| 0.005962
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import sys
import os
import signal
import platform
from PyQt5.QtCore import Qt, QObject, QCoreApplication, QEvent, pyqtSlot, QLocale, QTranslator, QLibraryInfo, QT_VERSION_STR, PYQT_VERSION_STR
from PyQt5.QtQml import QQmlApplicationEngine, qmlRegisterType, qmlRegisterSingletonType
from PyQt5.QtWidgets import QApplication, QSplashScreen
from PyQt5.QtGui import QGuiApplication, QPixmap
from PyQt5.QtCore import QTimer
from UM.Application import Application
from UM.Qt.QtRenderer import QtRenderer
from UM.Qt.Bindings.Bindings import Bindings
from UM.Signal import Signal, signalemitter
from UM.Resources import Resources
from UM.Logger import Logger
from UM.Preferences import Preferences
from UM.i18n import i18nCatalog
import UM.Settings.InstanceContainer #For version upgrade to know the version number.
import UM.Settings.ContainerStack #For version upgrade to know the version number.
import UM.Preferences #For version upgrade to know the version number.
# Raised when we try to use an unsupported version of a dependency.
class UnsupportedVersionError(Exception):
pass
# Check PyQt version, we only support 5.4 or higher.
major, minor = PYQT_VERSION_STR.split(".")[0:2]
if int(major) < 5 or int(minor) < 4:
raise UnsupportedVersionError("This application requires at least PyQt 5.4.0")
## Application subclass that provides a Qt application object.
@signalemitter
class QtApplication(QApplication, Application):
def __init__(self, **kwargs):
plugin_path = ""
if sys.platform == "win32":
if hasattr(sys, "frozen"):
plugin_path = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), "PyQt5", "plugins")
Logger.log("i", "Adding QT5 plugin path: %s" % (plugin_path))
QCoreApplication.addLibraryPath(plugin_path)
else:
import site
for dir in site.getsitepackages():
QCoreApplication.addLibraryPath(os.path.join(dir, "PyQt5", "plugins"))
elif sys.platform == "darwin":
plugin_path = os.path.join(Application.getInstallPrefix(), "Resources", "plugins")
if plugin_path:
Logger.log("i", "Adding QT5 plugin path: %s" % (plugin_path))
QCoreApplication.addLibraryPath(plugin_path)
os.environ["QSG_RENDER_LOOP"] = "basic"
super().__init__(sys.argv, **kwargs)
self._plugins_loaded = False #Used to determine when it's safe to use the plug-ins.
self._main_qml = "main.qml"
self._engine = None
self._renderer = None
self._main_window = None
self._shutting_down = False
self._qml_import_paths = []
self._qml_import_paths.append(os.path.join(os.path.dirname(sys.executable), "qml"))
self._qml_import_paths.append(os.path.join(Application.getInstallPrefix(), "Resources", "qml"))
self.setAttribute(Qt.AA_UseDesktopOpenGL)
try:
self._splash = self._createSplashScreen()
except FileNotFoundError:
self._splash = None
else:
self._splash.show()
self.processEvents()
signal.signal(signal.SIGINT, signal.SIG_DFL)
# This is done here as a lot of plugins require a correct gl context. If you want to change the framework,
# these checks need to be done in your <framework>Application.py class __init__().
i18n_catalog = i18nCatalog("uranium")
self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading plugins..."))
self._loadPlugins()
self.parseCommandLine()
Logger.log("i", "Command line arguments: %s", self._parsed_command_line)
self._plugin_registry.checkRequiredPlugins(self.getRequiredPlugins())
self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Updating configuration..."))
upgraded = UM.VersionUpgradeManager.VersionUpgradeManager.getInstance().upgrade()
if upgraded:
preferences = UM.Preferences.getInstance() #Preferences might have changed. Load them again.
#Note that the language can't be updated, so that will always revert to English.
try:
preferences.readFromFile(Resources.getPath(Resources.Preferences, self._application_name + ".cfg"))
except FileNotFoundError:
pass
self.showSplashMessage(i18n_catalog.i18nc("@info:progress", "Loading preferences..."))
try:
file = Resources.getPath(Resources.Preferences, self.getApplicationName() + ".cfg")
Preferences.getInstance().readFromFile(file)
except FileNotFoundError:
pass
def run(self):
pass
def hideMessage(self, message):
with self._message_lock:
if message in self._visible_messages:
self._visible_messages.remove(message)
self.visibleMessageRemoved.emit(message)
def showMessage(self, message):
with self._message_lock:
if message not in self._visible_messages:
self._visible_messages.append(message)
message.setTimer(QTimer())
self.visibleMessageAdded.emit(message)
def setMainQml(self, path):
self._main_qml = path
def initializeEngine(self):
# TODO: Document native/qml import trickery
Bindings.register()
self._engine = QQmlApplicationEngine()
for path in self._qml_import_paths:
self._engine.addImportPath(path)
if not hasattr(sys, "frozen"):
self._engine.addImportPath(os.path.join(os.path.dirname(__file__), "qml"))
self._engine.rootContext().setContextProperty("QT_VERSION_STR", QT_VERSION_STR)
self._engine.rootContext().setContextProperty("screenScaleFactor", self._screenScaleFactor())
self.registerObjects(self._engine)
self._engine.load(self._main_qml)
self.engineCreatedSignal.emit()
engineCreatedSignal = Signal()
def isShuttingDown(self):
return self._shutting_down
def registerObjects(self, engine):
pass
def getRenderer(self):
if not self._renderer:
self._renderer = QtRenderer()
return self._renderer
def addCommandLineOptions(self, parser):
parser.add_argument("--disable-textures",
dest="disable-textures",
action="store_true", default=False,
help="Disable Qt texture loading as a workaround for certain crashes.")
# Overridden from QApplication::setApplicationName to call our internal setApplicationName
def setApplicationName(self, name):
Application.setApplicationName(self, name)
mainWindowChanged = Signal()
def getMainWindow(self):
return self._main_window
def setMainWindow(self, window):
if window != self._main_window:
self._main_window = window
s
|
elf.mainWindowChanged.emit()
# Handle a function that should be called later.
def functionEvent(self, event):
e = _QtFunctionEvent(event)
QCoreApplication.postEvent(self, e)
# Handle Qt events
def event(self, event):
if event.type() == _QtFunctionEvent.QtFunctionEvent:
event._function_event.call()
return True
return super().event(event)
def windowClosed(self):
Logger.log("d", "Shu
|
tting down %s", self.getApplicationName())
self._shutting_down = True
try:
Preferences.getInstance().writeToFile(Resources.getStoragePath(Resources.Preferences, self.getApplicationName() + ".cfg"))
except Exception as e:
Logger.log("e", "Exception while saving preferences: %s", repr(e))
try:
self.applicationShuttingDown.emit()
except Exception as e:
Logger.log("e", "Exception while emitting shutdown signal: %s", repr(e))
try:
self.getBackend().close()
|
er1iang/hfut_stu_lib
|
hfut/util.py
|
Python
|
mit
| 8,546
| 0.000944
|
# -*- coding:utf-8 -*-
"""
一些能够帮你提升效率的辅助函数
"""
from __future__ import unicode_literals, division
from copy import deepcopy
from datetime import timedelta
from threading import Thread
import requests
import requests.exceptions
from six.moves import urllib
from .log import logger
from .value import ENV
__all__ = ['get_point', 'cal_gpa', 'cal_term_code', 'term_str2code', 'sort_hosts', 'filter_curriculum']
def get_point(grade_str):
"""
根据成绩判断绩点
:param grade_str: 一个字符串,因为可能是百分制成绩或等级制成绩
:return: 成绩绩点
:rtype: float
"""
try:
grade = float(grade_str)
assert 0 <= grade <= 100
if 95 <= grade <= 100:
return 4.3
elif 90 <= grade < 95:
return 4.0
elif 85 <= grade < 90:
return 3.7
elif 82 <= grade < 85:
return 3.3
elif 78 <= grade < 82:
return 3.0
elif 75 <= grade < 78:
return 2.7
elif 72 <= grade < 75:
return 2.3
elif 68 <= grade < 72:
return 2.0
elif 66 <= grade < 68:
return 1.7
elif 64 <= grade < 66:
return 1.3
elif 60 <= grade < 64:
return 1.0
else:
return 0.0
except ValueError:
if grade_str == '优':
return 3.9
elif grade_str == '良':
return 3.0
elif grade_str == '中':
return 2.0
elif grade_str == '及格':
return 1.2
elif grade_str in ('不及格', '免修', '未考'):
return 0.0
else:
raise ValueError('{:s} 不是有效的成绩'.format(grade_str))
def cal_gpa(grades):
"""
根据成绩数组计算课程平均绩点和 gpa, 算法不一定与学校一致, 结果仅供参考
:param grades: :meth:`models.StudentSession.get_my_achievements` 返回的成绩数组
:return: 包含了课程平均绩点和 gpa 的元组
"""
# 课程总数
courses_sum = len(grades)
# 课程绩点和
points_sum = 0
# 学分和
credit_sum = 0
# 课程学分 x 课程绩点之和
gpa_points_sum = 0
for grade in grades:
point = get_point(grade.get('补考成绩') or grade['成绩'])
credit = float(grade['学分'])
points_sum += point
credit_sum += credit
gpa_points_sum += credit * point
ave_point = points_sum / courses_sum
gpa = gpa_points_sum / credit_sum
return round(ave_point, 5), round(gpa, 5)
def cal_term_code(year, is_first_term=True):
"""
计算对应的学期代码
:param year: 学年开始年份,例如 "2012-2013学年第二学期" 就是 2012
:param is_first_term: 是否为第一学期
:type is_first_term: bool
:return: 形如 "022" 的学期代码
"""
if year <= 2001:
msg = '出现了超出范围年份: {}'.format(year)
raise ValueError(msg)
term_code = (year - 2001) * 2
if is_first_term:
term_code -= 1
return '%03d' % term_code
def term_str2code(term_str):
"""
将学期字符串转换为对应的学期代码串
:param term_str: 形如 "2012-2013学年第二学期" 的学期字符串
:return: 形如 "022" 的学期代码
"""
result = ENV['TERM_PATTERN'].match(term_str).groups()
year = int(result[0])
return cal_term_code(year, result[1] == '一')
def sort_hosts(hosts, method='GET', path='/', timeout=(5, 10), **kwargs):
"""
测试各个地址的速度并返回排名, 当出现错误时消耗时间为 INFINITY = 10000000
:param method: 请求方法
:param path: 默认的访问路径
:param hosts: 进行的主机地址列表, 如 `['http://222.195.8.201/']`
:param timeout: 超时时间, 可以是一个浮点数或 形如 ``(连接超时, 读取超时)`` 的元祖
:param kwargs: 其他传递到 ``requests.request`` 的参数
:return: 形如 ``[(访问耗时, 地址)]`` 的排名数据
"""
ranks = []
class HostCheckerThread(Thread):
def __init__(self, host):
super(HostCheckerThread, self).__init__()
self.host = host
def run(self):
INFINITY = 10000000
try:
url = urllib.parse.urljoin(self.host, path)
res = requests.request(method, url, timeout=timeout, **kwargs)
res.raise_for_status()
cost = res.elapsed.total_seconds() * 1000
except Exception as e:
logger.warning('访问出错: %s', e)
cost = INFINITY
# http://stackoverflow.com/questions/6319207/are-lists-thread-safe
ranks.append((cost, self.host))
threads = [HostCheckerThread(u) for u in hosts]
for t in threads:
t.start()
for t in threads:
t.join()
ranks.sort()
return ranks
def filter_curriculum(curriculum, week, weekday=None):
"""
筛选出指定星期[和指定星期几]的课程
:param curriculum: 课程表数据
:param week: 需要筛选的周数, 是一个代表周数的正整数
:param weekday: 星期几, 是一个代表星期的整数, 1-7 对应周一到周日
:return: 如果 weekday 参数没给出, 返回的格式与原课表一致, 但只包括了在指定周数的课程, 否则返回指定周数和星期几的当天课程
"""
if weekday:
c = [deepcopy(curriculum[weekday - 1])]
else:
c = deepcopy(curriculum)
for d in c:
l = len(d)
for t_idx in range(l):
t = d[t_idx]
if t is None:
continue
# 一般同一时间课程不会重复,重复时给出警告
t = list(filter(lambda k: week in k['上课周数'], t)) or None
if t is not None and len(t) > 1:
logger.warning('第 %d 周周 %d 第 %d 节课有冲突: %s', week, weekday or c.index(d) + 1, t_idx + 1, t)
d[t_idx] = t
return c[0] if weekday else c
def curriculum2schedule(curriculum, first_day, compress=False, time_table=None):
"""
将课程表转换为上课时间表, 如果 compress=False 结果是未排序的, 否则为压缩并排序后的上课时间表
:param curriculum: 课表
:param first_day: 第一周周一, 如 datetime.datetime(2016, 8, 29)
:param compress: 压缩连续的课时为一个
:param time_table: 每天上课的时间表, 形如 ``((start timedelta, end timedelta), ...)`` 的 11 × 2 的矩阵
:return: [(datetime.datetime, str) ...]
"""
schedule = []
time_table = time_table or (
(timedelta(hours=8), timedelta(hours=8, minutes=50)),
(timedelta(hours=9), timedelta(hours=9, minutes=50)),
(timedelta(hours=10, minutes=10
|
), timedelta(hours=11)),
(timedelta(hours=11, minutes=10), timedelta(hours=12)),
(timedelta(hours=14), timedelta(hours=14, minutes=50)),
(timedelta(hours=15), timedelta(hours=15, minutes=50)),
(timedelta(hours=16), timedelta(hours=16, minutes=50)),
(tim
|
edelta(hours=17), timedelta(hours=17, minutes=50)),
(timedelta(hours=19), timedelta(hours=19, minutes=50)),
(timedelta(hours=19, minutes=50), timedelta(hours=20, minutes=40)),
(timedelta(hours=20, minutes=40), timedelta(hours=21, minutes=30))
)
for i, d in enumerate(curriculum):
for j, cs in enumerate(d):
for c in cs or []:
course = '{name}[{place}]'.format(name=c['课程名称'], place=c['课程地点'])
for week in c['上课周数']:
day = first_day + timedelta(weeks=week - 1, days=i)
start, end = time_table[j]
item = (week, day + start, day + end, course)
schedule.append(item)
schedule.sort()
if compress:
new_schedule = [schedule[0]]
for i in range(1, len(schedule)):
sch = schedule[i]
# 同一天的连续课程
if new_schedule[-1][1].date() == sch[1].date() and new_schedule[-1][3] == sch[3]:
# 更新结束时间
old_item = new_schedule.pop()
# week, start, end, course
new_item = (old_item[0], old_item[1], sch[2], old_item[3])
else:
new_item = sch
new_schedule.append(new_item)
return new_schedule
return schedule
|
timlinux/qgis2web
|
olwriter.py
|
Python
|
gpl-2.0
| 18,993
| 0.002738
|
# qgis-ol3 Creates OpenLayers map from QGIS layers
# Copyright (C) 2014 Victor Olaya (volayaf@gmail.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import codecs
import os
import re
import math
import time
import shutil
from qgis.core import *
from utils import exportLayers, safeName
from qgis.utils import iface
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from utils import ALL_ATTRIBUTES
from basemaps import basemapOL, basemapAttributions
baseLayers = basemapOL()
basemapAttributions = basemapAttributions()
baseLayerGroup = "var baseLayer = new ol.layer.Group({'title': 'Base maps',layers: [%s]});"
def writeOL(iface, layers, groups, popup, visible, json, cluster, labels, settings, folder):
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
folder = os.path.join(folder, 'qgis2web_' + unicode(time.strftime("%Y_%m_%d-%H_%M_%S")))
# folder = os.path.join(os.getcwd(),folder)
try:
dst = os.path.join(folder, "resources")
if not os.path.exists(dst):
shutil.copytree(os.path.join(os.path.dirname(__file__), "resources"), dst)
precision = settings["Data export"]["Precision"]
optimize = settings["Data export"]["Minify GeoJSON files"]
cleanUnusedFields = settings["Data export"]["Delete unused fields"]
if not cleanUnusedFields:
usedFields = [ALL_ATTRIBUTES] * len(popup)
else:
usedFields = popup
exportLayers(layers, folder, precision, optimize, usedFields)
exportStyles(layers, folder)
writeLayersAndGroups(layers, groups, visible, folder, settings)
if settings["Data export"]["Mapping library location"] == "Local":
cssAddress = "./resources/ol.css"
jsAddress = "./resources/ol.js"
else:
cssAddress = "http://openlayers.org/en/v3.8.2/css/ol.css"
jsAddress = "http://openlayers.org/en/v3.8.2/build/ol.js"
geojsonVars = "\n".join(['<script src="layers/%s"></script>' % (safeName(layer.name()) + ".js")
for layer in layers if layer.type() == layer.VectorLayer])
styleVars = "\n".join(['<script src="styles/%s_style.js"></script>' % (safeName(layer.name()))
for layer in layers if layer.type() == layer.VectorLayer])
popupLayers = "popupLayers = [%s];" % ",".join(['"%s"' % field if isinstance(field, basestring) else unicode(field) for field in popup])
controls = []
if settings["Appearance"]["Add scale bar"]:
controls.append("new ol.control.ScaleLine({})")
if settings["Appearance"]["Add layers list"]:
controls.append('new ol.control.LayerSwitcher({tipLabel: "Layers"})')
mapbounds = bounds(iface, settings["Scale/Zoom"]["Extent"] == "Canvas extent", layers)
mapextent = "extent: %s," % mapbounds if settings["Scale/Zoom"]["Restrict to extent"] else ""
maxZoom = int(settings["Scale/Zoom"]["Max zoom level"])
minZoom = int(settings["Scale/Zoom"]["Min zoom level"])
onHover = unicode(settings["Appearance"]["Show popups on hover"]).lower()
highlight = unicode(settings["Appearance"]["Highlight features"]).lower()
view = "%s maxZoom: %d, minZoom: %d" % (mapextent, maxZoom, minZoom)
values = {"@CSSADDRESS@": cssAddress,
"@JSADDRESS@": jsAddress,
"@STYLEVARS@": styleVars,
"@GEOJSONVARS@": geojsonVars,
"@BOUNDS@": mapbounds,
"@CONTROLS@": ",".join(controls),
"@POPUPLAYERS@": popupLayers,
"@VIEW@": view,
"@ONHOVER@": onHover,
"@DOHIGHLIGHT@": highlight}
with open(os.path.join(folder, "index.html"), "w") as f:
f.write(replaceInTemplate(settings["Appearance"]["Template"] + ".html", values))
finally:
QApplication.restoreOverrideCursor()
return os.path.join(folder, "index.html")
def writeLayersAndGroups(layers, groups, visible, folder, settings):
baseLayer = baseLayerGroup % baseLayers[settings["Appearance"]["Base layer"]]
scaleVisibility = settings["Scale/Zoom"]["Use layer scale dependent visibility"]
layerVars = "\n".join([layerToJavascript(layer, scaleVisibility) for layer in layers])
groupVars = ""
groupedLayers = {}
for group, groupLayers in groups.iteritems():
groupVars += ('''var %s = new ol.layer.Group({
layers: [%s],
title: "%s"});\n''' %
("group_" + safeName(group), ",".join(["lyr_" + safeName(layer.name()) for layer in groupLayers]),
group))
for layer in groupLayers:
groupedLayers[layer.id()] = safeName(group)
mapLayers = []
if settings["Appearance"]["Base layer"] != "None":
mapLayers.append("baseLayer")
usedGroups = []
for layer in layers:
mapLayers.append("lyr_" + safeName(layer.name()))
visibility = "\n".join(["%s.setVisible(%s);" % (layer, unicode(v).lower()) for layer, v in zip(mapLayers[1:], visible)])
# ADD Group
group_list = ["baseLayer"]
no_group_list = []
for layer in layers:
if layer.id() in groupedLayers:
groupName = groupedLayers[layer.id()]
if groupName not in usedGroups:
group_list.append("group_" + safeName(groupName))
usedGroups.append(groupName)
else:
no_group_list.append("lyr_" + safeName(layer.name()))
layersList = "var layersList = [%s];" % ",".join([layer for layer in (group_list + no_group_list)])
path = os.path.join(folder, "layers", "layers.js")
with codecs.open(path, "w", "utf-8") as f:
f.write(baseLayer + "\n")
f.write(layerVars + "\n")
f.write(groupVars + "\n")
f.write(visibility + "\n")
f.write(layersList + "\n")
# f.write(write_group_list)
def replaceInTemplate(template, values):
path = os.path.join(os.path.dirname(__file__), "templates", template)
with open(path) as f:
lines = f.readlines()
s = "".join(lines)
for name, value in values.iteritems():
s = s.replace(name, value)
return s
def bounds(iface, useCanvas, layers):
if useCanvas:
canvas = iface.mapCanvas()
canvasCrs = canvas.mapRenderer().destinationCrs()
transform = QgsCoordinateTransform(canvasCrs, QgsCoordinateReferenceSystem("EPSG:3857"))
try:
extent = transform.transform(canvas.extent())
except QgsCsException:
extent = QgsRectangle(-20026376.39, -20048966.10, 20026376.39, 20048966.10)
else:
extent = None
for layer in layers:
transform = QgsCoordinateTransform(layer.crs(), QgsCoordinateReferenceSystem("EPSG:3857"))
try:
layerExtent = transform.transform(layer.extent())
|
except QgsCsException:
layerExtent = QgsRectangle(-20026376.39, -20048966.10, 20026376.39, 20048966.10)
if extent is None:
extent = layerExtent
else:
extent.combineExtentWith(layerExtent)
return "[%f, %f, %f, %f]" % (extent.xMinimum(), extent
|
.yMinimum(),
extent.xMaximum(), extent.yMaximum())
def layerToJavascript(layer, scaleVisibility):
# TODO: change scale to resolution
if scaleVisibility and layer.hasScaleBasedVi
|
ctogle/lattice
|
tests/test_lattice.py
|
Python
|
mit
| 1,022
| 0.036204
|
import unittest
import modular_core.libfundamental as lfu
from modular_core.libsimcomponents import ensemble_manager as mng
import os, sys, pdb
#log = open(os.path.join(os.getcwd(), 'test_ensemble.log'), 'w')
#sys.stdout = log
i
|
mport lattice as mod
class dummyTestCase(unittest.TestCase):
"""Tests for `dummy module`."""
simple_mcfg = os.path.join(os.getcwd(),
'lattice_dep_mcfgs',
'lattice_example.mcfg')
mn = mng()
en = mn.add_ensemble(module = mod.main.module_name)
|
def pause(self, *args, **kwargs):
sys.stdout = sys.__stdout__
pdb.set_trace()
sys.stdout = log
def test_can_make_ensemble(self):
"""module successfully imported?"""
self.assertFalse(mod.main == None)
mod_name = mod.main.module_name
ensem = self.mn.add_ensemble(module = mod_name)
self.assertFalse(ensem is None)
def test_can_run_mcfg(self):
ran = self.en.run_mcfg(self.simple_mcfg)
out = self.en.produce_output()
self.assertTrue(ran)
self.assertTrue(out)
if __name__ == '__main__':
unittest.main()
|
panoptes/POCS
|
src/panoptes/pocs/mount/ioptron/ieq30pro.py
|
Python
|
mit
| 12,613
| 0.001904
|
import re
import time
from astropy import units as u
from astropy.coordinates import SkyCoord
from panoptes.utils.time import current_time
from panoptes.utils import error as error
from panoptes.pocs.mount.serial import AbstractSerialMount
class Mount(AbstractSerialMount):
"""
Mount class for iOptron mounts. Overrides the base `initialize` method
and providers some helper methods to convert coordinates.
"""
def __init__(self, *args, **kwargs):
super(Mount, self).__init__(*args, **kwargs)
self.logger.info(
|
'Creating iOptron mount')
# Regexp to match the iOptron RA/Dec format
self._ra_format = r'(?P<ra_millisecond>\d{8})'
self._dec_format = r'(?P<dec_sign>[\+\-])(?P<dec_arcsec>\d{8})'
self
|
._coords_format = re.compile(self._dec_format + self._ra_format)
self._raw_status = None
self._status_format = re.compile(
'(?P<gps>[0-2]{1})' +
'(?P<state>[0-7]{1})' +
'(?P<tracking>[0-4]{1})' +
'(?P<movement_speed>[1-9]{1})' +
'(?P<time_source>[1-3]{1})' +
'(?P<hemisphere>[01]{1})'
)
self._status_lookup = {
'gps': {
'0': 'Off',
'1': 'On',
'2': 'Data Extracted'
},
'state': {
'0': 'Stopped - Not at Zero Position',
'1': 'Tracking (PEC disabled)',
'2': 'Slewing',
'3': 'Guiding',
'4': 'Meridian Flipping',
'5': 'Tracking (PEC enabled)',
'6': 'Parked',
'7': 'Stopped - Zero Position'
},
'tracking': {
'0': 'Sidereal',
'1': 'Lunar',
'2': 'Solar',
'3': 'King',
'4': 'Custom'
},
'movement_speed': {
'1': '1x sidereal',
'2': '2x sidereal',
'3': '8x sidereal',
'4': '16x sidereal',
'5': '64x sidereal',
'6': '128x sidereal',
'7': '256x sidereal',
'8': '512x sidereal',
'9': 'Max sidereal',
},
'time_source': {
'1': 'RS-232',
'2': 'Hand Controller',
'3': 'GPS'
},
'hemisphere': {
'0': 'Southern',
'1': 'Northern'
}
}
self.logger.info('Mount created')
################################################################################################
# Properties
################################################################################################
@property
def is_home(self):
""" bool: Mount home status. """
self._is_home = 'Stopped - Zero Position' in self.status.get('state', '')
return self._is_home
@property
def is_tracking(self):
""" bool: Mount tracking status. """
self._is_tracking = 'Tracking' in self.status.get('state', '')
return self._is_tracking
@property
def is_slewing(self):
""" bool: Mount slewing status. """
self._is_slewing = 'Slewing' in self.status.get('state', '')
return self._is_slewing
################################################################################################
# Public Methods
################################################################################################
def initialize(self, set_rates=True, unpark=False, *arg, **kwargs):
""" Initialize the connection with the mount and setup for location.
iOptron mounts are initialized by sending the following two commands
to the mount:
* Version
* MountInfo
If the mount is successfully initialized, the `_setup_location_for_mount` method
is also called.
Returns:
bool: Returns the value from `self.is_initialized`.
"""
if not self.is_connected:
self.logger.info(f'Connecting to mount {__name__}')
self.connect()
if self.is_connected and not self.is_initialized:
self.logger.info(f'Initializing {__name__} mount')
# We trick the mount into thinking it's initialized while we
# initialize otherwise the `query` method will test
# to see if initialized and be put into loop.
self._is_initialized = True
actual_version = self.query('version')
actual_mount_info = self.query('mount_info')
expected_version = self.commands.get('version').get('response')
expected_mount_info = self.commands.get('mount_info').get('response')
self._is_initialized = False
# Test our init procedure for iOptron
if actual_version != expected_version or actual_mount_info != expected_mount_info:
self.logger.debug(f'{actual_version} != {expected_version}')
self.logger.debug(f'{actual_mount_info} != {expected_mount_info}')
raise error.MountNotFound('Problem initializing mount')
else:
self._is_initialized = True
self._setup_location_for_mount()
if set_rates:
self._set_initial_rates()
self.logger.info(f'Mount initialized: {self.is_initialized}')
return self.is_initialized
def park(self,
ra_direction='west',
ra_seconds=11.,
dec_direction='south',
dec_seconds=15.,
*args, **kwargs):
"""Slews to the park position and parks the mount.
This will first move the mount to the home position, then move the RA axis
in the direction specified at 0.9x sidereal rate (the fastest) for the number
of seconds requested. Then move the Dec axis in a similar manner. This should
be adjusted for the particular parking position desired.
Note:
When mount is parked no movement commands will be accepted.
Args:
ra_direction (str, optional): The direction to move the RA axis from
the home position. Defaults to 'west' for northern hemisphere.
ra_seconds (float, optional): The number of seconds at fastest move
speed to move the RA axis from the home position.
dec_direction (str, optional): The direction to move the Dec axis
from the home position. Defaults to 'south' for northern hemisphere.
dec_seconds (float, optional): The number of seconds at the fastest
move speed to move the Dec axis from the home position.
Returns:
bool: indicating success
"""
if self.is_parked:
self.logger.info('Mount is parked')
return self._is_parked
if self.slew_to_home(blocking=True):
# The mount is currently not parking in correct position so we manually move it there.
self.query('set_button_moving_rate', 9)
self.move_direction(direction=ra_direction, seconds=ra_seconds)
while self.is_slewing:
self.logger.debug('Slewing RA axis to park position...')
time.sleep(3)
self.move_direction(direction=dec_direction, seconds=dec_seconds)
while self.is_slewing:
self.logger.debug('Slewing Dec axis to park position...')
time.sleep(3)
self._is_parked = True
self.logger.debug(f'Mount parked: {self.is_parked}')
return self._is_parked
################################################################################################
# Private Methods
################################################################################################
def _set_initial_rates(self):
# Make sure we start at sidereal
self.set_tracking_rate()
self.logger.debug('Setting manual moving rate to max')
self.q
|
hbbhbbh/TmallSingleCrawler
|
TmallSingleCrawler/spiders/tmall.py
|
Python
|
mit
| 11,888
| 0.003456
|
# -*- coding: utf-8 -*-
import scrapy
from scrapy.selector import Selector
from scrapy.loader import ItemLoader
from scrapy.xlib.pydispatch import dispatcher
from scrapy import signals
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.common.exceptions import UnexpectedAlertPresentException, ElementNotVisibleException
from TmallSingleCrawler.utils.TMallUtils import get_captcha_image, get_favorite_count, get_comment_count, \
get_comment_images
from TmallSingleCrawler.custom_items.TMallGoodsItem import TMallGoodsItem
import math
import time
import re
import os
class TmallSpider(scrapy.Spider):
handle_httpstatus_list = [302]
brower = None
cookies = ""
name = 'tmall'
start_urls = [
'https://list.tmall.com/search_product.htm?q=%C5%AE%D7%B0&type=p&vmarket=&spm=875.7931836%2FB.a2227oh.d100&from=mallfp..pc_1_searchbutton',
'https://list.tmall.com/search_product.htm?q=%C5%AE%D0%AC&type=p&spm=a220m.1000858.a2227oh.d100&from=.list.pc_1_searchbutton',
'https://list.tmall.com/search_product.htm?q=%C4%D0%D7%B0&type=p&spm=a220m.1000858.a2227oh.d100&from=.list.pc_1_searchbutton',
'https://list.tmall.com/search_product.htm?q=%C4%D0%D0%AC&type=p&spm=a220m.1000858.a2227oh.d100&from=.list.pc_1_searchbutton']
def __init__(self):
super(TmallSpider, self).__init__()
dispatcher.connect(self.spider_closed, signals.spider_closed)
# 先进行登陆,登陆成功后才能进行爬取
def start_requests(self):
if self.login():
for url in self.start_urls:
yield scrapy.Request(url, cookies=self.cookies, callback=self.parse)
else:
return []
def parse(self, response):
"""
在爬取过程中,可能会因为各种问题,例如爬取速度导致
页面会重定向到 需要输入验证码的页面
这时将请求返回给 chrome 从 chrome 上截取屏幕图片并让 用户输入验证码
"""
if response.status == 302:
# if response.headers.has_key("location"):
# location = str(response.headers['Location'], encoding="utf-8")
# redirect_url = ""
#
# if location.find("sec.taobao") > 0:
# self.parse_code(url=location, origin_url=response.url)
# redirect_url = response.url
# else:
# redirect_url = location
redirect_url = response.request.url
yield scrapy.Request(url=redirect_url, cookies=self.cookies, dont_filter=True, callback=self.parse)
else:
for item_block in resp
|
onse.css("#J_ItemList .product"):
try:
item_loader = ItemLoader(item=TMallGoodsItem(), selector=item_blo
|
ck)
id = item_block.css("::attr(data-id)").extract_first()
# 如果 id 不存在,说明这个 商品块 是天猫的推荐商品,直接跳过
if id is None:
continue
item_loader.add_value("id", id)
item_loader.add_css("main_image", ".productImg-wrap img::attr(src)")
item_loader.add_css("thumb_image", ".proThumb-img img::attr(data-ks-lazyload)")
item_loader.add_css("price", ".productPrice em::attr(title)")
item_loader.add_css("title", ".productTitle a::attr(title)")
item_loader.add_css("shop", ".productShop a::text")
item_loader.add_css("moon_sell", ".productStatus span:nth-child(1) em::text")
item_loader.add_css("comment_number", ".productStatus span:nth-child(2) a::text")
goods_item = item_loader.load_item()
detail_url = "https:" + item_block.css(".productTitle a::attr(href)").extract_first()
yield scrapy.Request(url=detail_url, callback=self.parse_detail, meta={"item": goods_item})
except:
continue
# 进入下一页
next_page = "https://list.tmall.com/search_product.htm" + response.css(
".ui-page-next::attr(href)").extract_first("")
yield scrapy.Request(url=next_page, callback=self.parse)
def parse_detail(self, response):
# 检查重定向,跟上面同理
if response.status == 302:
# if response.headers.has_key("location"):
# location = str(response.headers['Location'], encoding="utf-8")
# redirect_url = ""
#
# if location.find("sec.taobao") > 0:
# self.parse_code(url=location, origin_url=response.url)
# redirect_url = response.url
# else:
# redirect_url = location
redirect_url = response.request.url
yield scrapy.Request(url=redirect_url, cookies=self.cookies, dont_filter=True,
callback=self.parse_detail, meta=response.meta)
else:
# 从 url 提取出id,进行配对
goods_item = response.meta['item']
item_loader = ItemLoader(item=goods_item, response=response)
item_loader.add_css("params", "#J_AttrUL li::text")
# 此时需要自己获取 itemId spuId 和 sellerId 进行拼接请求
# 拼接的请求有 收藏数, 评论数, 带图片的评论内容
# sellerId 和 spuId
sellerId = re.findall(r"sellerId=(\d+)", response.text)[0]
spuId = re.findall(r"spuId=(\d+)", response.text)[0]
favorite_count_url = "https:" + re.findall(r"apiBeans\":\"(.*?)\"", response.text)[0] + "&callback=jsonp259"
comment_count_url = "https://dsr-rate.tmall.com/list_dsr_info.htm?itemId={itemId}&spuId={spuId}&sellerId={sellerId}&callback=jsonp206" \
"".format(itemId=goods_item['id'][0], spuId=spuId, sellerId=sellerId)
comment_imags_url = "https://rate.tmall.com/list_detail_rate.htm?itemId={itemId}&spuId={spuId}&sellerId={sellerId}&order=3¤tPage=1&append=0&content=1&tagId=&posi=&picture=1&ua=238UW5TcyMNYQwiAiwZTXFIdUh1SHJOe0BuOG4%3D%7CUm5Ockp3TntCekN2QnlBfSs%3D%7CU2xMHDJ7G2AHYg8hAS8XIw0tA18%2BWDRTLVd5L3k%3D%7CVGhXd1llXWBZbFVtVGFVblZqXWBCfUZzSHxAfUd5TXVBekZ4Rn9RBw%3D%3D%7CVWldfS0SMg4yDy8bOxVgS2wLcCBCIAQqfCo%3D%7CVmhIGCUFOBgkHSAaOgE6DjUVKRApFDQAPQAgHCUcIQE0DzJkMg%3D%3D%7CV25OHjAePgc%2BAyMfIh4jAzwDPwtdCw%3D%3D%7CWGFBET8RMQg2CysXKhYrCzIMMApcCg%3D%3D%7CWWFBET8RMWFYZlt7R3pGey0NMBA%2BEDAJNww2YDY%3D%7CWmNeY0N%2BXmFBfUR4WGZeZER9XWFcfEhoVG44&isg=AuDgX6vXrZIrfRECc7u5mTzxse5yQcXX4sPaQlrw4Pu6VYB_APmUQ7Ylm8qv&itemPropertyId=&itemPropertyIndex=&userPropertyId=&userPropertyIndex=&rateQuery=&location=&needFold=0&callback=jsonp981" \
"".format(itemId=goods_item['id'][0], spuId=spuId, sellerId=sellerId)
favorite_count = get_favorite_count(favorite_count_url)
comment_count = get_comment_count(comment_count_url)
comment_imags = get_comment_images(comment_imags_url)
item_loader.add_value("favorite_count", favorite_count)
item_loader.add_value("comment_count", comment_count)
item_loader.add_value("comment_images", comment_imags)
goods_item = item_loader.load_item()
yield goods_item
def spider_closed(self, spider):
self.log("closing spider")
self.brower.quit()
# 通过 chrome 来进行登陆
def login(self):
self.brower = webdriver.Chrome(
executable_path="C:/Program Files (x86)/Google/Chrome/Application/chromedriver.exe")
# self.brower.implicitly_wait(30)
self.brower.get(
"https://login.taobao.com/member/login.jhtml?tpl_redirect_url=https%3A%2F%2Fwww.tmall.com%2F&style=miniall&full_redirect=true")
self.brower.find_element_by_id("TPL_username_1").send_keys("天猫用户名")
self.brower.find_element_by_id("TPL_password_1").send_keys("密码")
# 检查是否需要滑动验证
self.drag_bar()
# 检查是否可以进行登陆
self.check_success()
# 检查是否需要输入手机验证码
self.type_safe_code()
# 提取chrome的cookies
|
arsenovic/clifford
|
clifford/_conformal_layout.py
|
Python
|
bsd-3-clause
| 2,999
| 0.001
|
import numpy as np
from ._layout import Layout
from ._multivector import MultiVector
class ConformalLayout(Layout):
r"""
A layout for a
|
conformal algebra, which adds extra constants and helpers.
Typically these should be constructed via :func:`clifford.conformalize`.
.. versionadded:: 1.2.0
Attributes
----------
ep : MultiVector
The first added basis element, :math:`e_{+}`, usually with :math:`e_{+}^2 = +1`
en : MultiVector
The first added basis element, :math:`e_{-}`, usually with :math:`e_{-}^2 = -1`
eo : MultiVector
|
The null basis vector at the origin, :math:`e_o = 0.5(e_{-} - e_{+})`
einf : MultiVector
The null vector at infinity, :math:`e_\infty = e_{-} + e_{+}`
E0 : MultiVector
The minkowski subspace bivector, :math:`e_\infty \wedge e_o`
I_base : MultiVector
The pseudoscalar of the base ga, in cga layout
"""
def __init__(self, *args, layout=None, **kwargs):
super().__init__(*args, **kwargs)
self._base_layout = layout
ep, en = self.basis_vectors_lst[-2:]
# setup null basis, and minkowski subspace bivector
eo = .5 ^ (en - ep)
einf = en + ep
E0 = einf ^ eo
I_base = self.pseudoScalar*E0
# helper properties
self.ep = ep
self.en = en
self.eo = eo
self.einf = einf
self.E0 = E0
self.I_base = I_base
@classmethod
def _from_base_layout(cls, layout, added_sig=[1, -1], **kwargs) -> 'ConformalLayout':
""" helper to implement :func:`clifford.conformalize` """
sig_c = list(layout.sig) + added_sig
return cls(
sig_c,
ids=layout._basis_vector_ids.augmented_with(len(added_sig)),
layout=layout, **kwargs)
# some convenience functions
def up(self, x: MultiVector) -> MultiVector:
""" up-project a vector from GA to CGA """
try:
if x.layout == self._base_layout:
# vector is in original space, map it into conformal space
old_val = x.value
new_val = np.zeros(self.gaDims)
new_val[:len(old_val)] = old_val
x = self.MultiVector(value=new_val)
except(AttributeError):
# if x is a scalar it doesnt have layout but following
# will still work
pass
# then up-project into a null vector
return x + (.5 ^ ((x**2)*self.einf)) + self.eo
def homo(self, x: MultiVector) -> MultiVector:
""" homogenize a CGA vector """
return x/(-x | self.einf)[()]
def down(self, x: MultiVector) -> MultiVector:
""" down-project a vector from CGA to GA """
x_down = (self.homo(x) ^ self.E0)*self.E0
# new_val = x_down.value[:self.base_layout.gaDims]
# create vector in self.base_layout (not cga)
# x_down = self.base_layout.MultiVector(value=new_val)
return x_down
|
jamespcole/home-assistant
|
tests/components/deconz/test_climate.py
|
Python
|
apache-2.0
| 6,215
| 0
|
"""deCONZ climate platform tests."""
from unittest.mock import Mock, patch
import asynctest
from homeassistant import config_entries
from homeassistant.components import deconz
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.setup import async_setup_component
import homeassistant.components.climate as climate
from tests.common import mock_coro
SENSOR = {
"1": {
"id": "Climate 1 id",
"name": "Climate 1 name",
"type": "ZHAThermostat",
"state": {"on": True, "temperature": 2260},
"config": {"battery": 100, "heatsetpoint": 2200, "mode": "auto",
"offset": 10, "reachable": True, "valve": 30},
"uniqueid": "00:00:00:00:00:00:00:00-00"
},
"2": {
"id": "Sensor 2 id",
"name": "Sensor 2 name",
"type": "ZHAPresence",
"state": {"presence": False},
"config": {}
}
}
ENTRY_CONFIG = {
deconz.const.CONF_ALLOW_CLIP_SENSOR: True,
deconz.const.CONF_ALLOW_DECONZ_GROUPS: True,
deconz.config_flow.CONF_API_KEY: "ABCDEF",
deconz.config_flow.CONF_BRIDGEID: "0123456789",
deconz.config_flow.CONF_HOST: "1.2.3.4",
deconz.config_flow.CONF_PORT: 80
}
async def setup_gateway(hass, data, allow_clip_sensor=True):
"""Load the deCONZ sensor platform."""
from pydeconz import DeconzSession
response = Mock(
status=200, json=asynctest.CoroutineMock(),
text=asynctest.CoroutineMock())
response.content_type = 'application/json'
session = Mock(
put=asynctest.CoroutineMock(
return_value=response
)
)
ENTRY_CONFIG[deconz.const.CONF_ALLOW_CLIP_SENSOR] = allow_clip_sensor
config_entry = config_entries.ConfigEntry(
1, deconz.DOMAIN, 'Mock Title', ENTRY_CONFIG, 'test',
config_entries.CONN_CLASS_LOCAL_PUSH)
gateway = deconz.DeconzGateway(hass, config_entry)
gateway.api = DeconzSession(hass.loop, session, **config_entry.data)
gateway.api.config = Mock()
hass.data[deconz.DOMAIN] = gateway
with patch('pydeconz.DeconzSession.async_get_state',
return_value=mock_coro(data)):
await gateway.api.async_load_parameters()
await hass.config_entries.async_forward_entry_setup(
config_entry, 'climate')
# To flush out the service call to update the group
await hass.async_block_till_done()
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a gateway."""
assert await async_setup_component(hass, climate.DOMAIN, {
'climate': {
'platform': deconz.DOMAIN
}
}) is True
assert deconz.DOMAIN not in hass.data
async def test_no_sensors(hass):
"""Test that no sensors in deconz results in no climate entities."""
await setup_gateway(hass, {})
assert not hass.data[deconz.DOMAIN].deconz_ids
assert not hass.states.async_all()
async def test_climate_devices(hass):
"""Test successful creation of sensor entities."""
await setup_gateway(hass, {"sensors": SENSOR})
assert "climate.climate_1_name" in hass.data[deconz.DOMAIN].deconz_ids
assert "sensor.sensor_2_name" not in hass.data[deconz.DOMAIN].deconz_ids
assert len(hass.states.async_all()) == 1
hass.data[deconz.DOMAIN].api.sensors['1'].async_update(
{'state': {'on': False}})
await hass.services.async_call(
'climate', 'turn_on', {'entity_id': 'climate.climate_1_name'},
blocking=True
)
hass.data[deconz.DOMAIN].api.session.put.assert_called_with(
'http://1.2.3.4:80/api/ABCDEF/sensors/1/config',
data='{"mode": "auto"}'
)
await hass.services.async_call(
'climate', 'turn_off', {'entity_id': 'climate.climate_1_name'},
blocking=True
)
hass.data[deconz.DOMAIN].api.session.put.assert_called_with(
'http://1.2.3.4:80/api/ABCDEF/sensors/1/config',
data='{"mode": "off"}'
)
await hass.services.async_call(
'climate', 'set_temperature',
{'entity_id': 'climate.climate_1_name', 'temperature': 20},
blocking=True
)
hass.data[deconz.DOMAIN].api.session.put.assert_called_with(
'http://1.2.3.4:80/api/ABCDEF/sensors/1/config',
data='{"heatsetpoint": 2000.0}'
)
assert len(hass.data[deconz.DOMAIN].api.session.put.mock_calls) == 3
async def test_verify_state_update(hass):
"""Test that state update properly."""
await setup_gateway(hass, {"sensors": SENSOR})
assert "climate.climate_1_name" in hass.data[deconz.DOMAIN].deconz_ids
thermostat = hass.states.get('climate.climate_1_name')
assert thermostat.state == 'on'
state_update = {
"t": "event",
"e": "changed",
"r": "sensors",
"id": "1",
"config": {"on": False}
}
hass.data[deconz.DOMAIN].api.async_event_handler(state_update)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
thermostat = hass.states.get('climate.climate_1_name')
assert thermostat.state == 'off'
async def test_add_new_climate_device(hass):
"""Test successful creation of climate entities."""
await setup_gateway(hass, {})
sensor = Mock()
sensor.name = 'name'
sensor.type = 'ZHAThermostat'
sensor.register_async_callback = Mock()
async_dispatcher_send(hass, 'deconz_new_sensor', [sensor])
await hass.async_block_till_done()
|
assert "climate.name" in hass.data[deconz.DOMAIN].deconz_ids
async def test_do_not_allow_clipsensor(hass):
"""Test that clip sensors can be ignored."""
await setup_gateway(hass, {}, allow_clip_sensor=False)
sensor = Mock()
sensor.name = 'name'
sensor.type = 'CLIPThermostat'
s
|
ensor.register_async_callback = Mock()
async_dispatcher_send(hass, 'deconz_new_sensor', [sensor])
await hass.async_block_till_done()
assert len(hass.data[deconz.DOMAIN].deconz_ids) == 0
async def test_unload_sensor(hass):
"""Test that it works to unload sensor entities."""
await setup_gateway(hass, {"sensors": SENSOR})
await hass.data[deconz.DOMAIN].async_reset()
assert len(hass.states.async_all()) == 0
|
Anderson-Lab/anderson-lab.github.io
|
csc_466_2021_spring/MLCode/Ch18/gp.py
|
Python
|
mit
| 13,209
| 0.059959
|
# Code from Chapter 18 of Machine Learning: An Algorithmic Perspective (2nd Edition)
# by Stephen Marsland (http://stephenmonika.net)
# You are free to use, change, or redistribute the code in any way you wish for
# non-commercial purposes, but please maintain the name of the original author.
# This code comes with no warranty of any kind.
# Stephen Marsland, 2014
import pylab as pl
import numpy as np
import scipy.optimize as so
def kernel4(data1,data2,theta,wantderiv=True,measnoise=1.):
theta = np.squeeze(theta)
# Periodic
if np.shape(data1)[0] == len(data1):
d1 = np.shape(data1)[0]
n = 1
else:
(d1,n) = np.shape(data1)
d2 = np.shape(data2)[0]
sumxy = np.zeros((d1,d2))
for d in range(n):
D1 = np.transpose([data1[:,d]]) * np.ones((d1,d2))
D2 = [data2[:,d]] * np.ones((d1,d2))
sumxy += (D1-D2)**2
k = theta[0]**2 * np.exp(- 2.0*np.sin(np.pi*sumxy)**2/(theta[1]**2))
if wantderiv:
K = np.zeros((d1,d2,len(theta)+1))
K[:,:,0] = k + measnoise*theta[2]**2*np.eye(d1,d2)
K[:,:,1] = 2.0 *k /theta[0]
K[:,:,2] = 4.0*k*np.sin(np.pi*sumxy)**2/(theta[2]**3)
K[:,:,3] = 2.0*theta[2]*np.eye(d1,d2)
return K
else:
return k + measnoise*theta[2]**2*np.eye(d1,d2)
def kernel3(data1,data2,theta,wantderiv=True,measnoise=1.):
theta = np.squeeze(theta)
# Periodic and a squared exponential
if np.shape(data1)[0] == len(data1):
d1 = np.shape(data1)[0]
n = 1
else:
(d1,n) = np.shape(data1)
d2 = np.shape(data2)[0]
sumxy = np.zeros((d1,d2))
for d in range(n):
D1 = np.transpose([data1[:,d]]) * np.ones((d1,d2))
D2 = [data2[:,d]] * np.ones((d1,d2))
sumxy += (D1-D2)**2
k = theta[0]**2 * np.exp(-sumxy/(2.0*theta[1]**2) - 2.0*np.sin(np.pi*sumxy)**2/(theta[2]**2))
#print k
#print measnoise*theta[2]**2*np.eye(d1,d2)
if wantderiv:
K = np.zeros((d1,d2,len(theta)+1
|
))
K[:,:,0] = k + measnoise*theta[2]**2*np.eye(d1,d2)
K[:,:,1] = 2.0 *k /theta[0]
K[:,:,2] = k*sumxy/(theta[1]**3)
K[:,:,3] = -4.
|
0*k*np.sin(np.pi*sumxy)**2/(theta[2]**3)
K[:,:,4] = 2.0*theta[3]*np.eye(d1,d2)
return K
else:
return k + measnoise*theta[2]**2*np.eye(d1,d2)
def kernel2(data1,data2,theta,wantderiv=True,measnoise=1.):
# Uses exp(theta) to ensure positive hyperparams
theta = np.squeeze(theta)
theta = np.exp(theta)
# Squared exponential
if np.ndim(data1) == 1:
d1 = np.shape(data1)[0]
n = 1
data1 = data1*np.ones((d1,1))
data2 = data2*np.ones((np.shape(data2)[0],1))
else:
(d1,n) = np.shape(data1)
d2 = np.shape(data2)[0]
sumxy = np.zeros((d1,d2))
for d in range(n):
D1 = np.transpose([data1[:,d]]) * np.ones((d1,d2))
D2 = [data2[:,d]] * np.ones((d1,d2))
sumxy += (D1-D2)**2*theta[d+1]
k = theta[0] * np.exp(-0.5*sumxy)
#k = theta[0]**2 * np.exp(-sumxy/(2.0*theta[1]**2))
#print k
#print measnoise*theta[2]**2*np.eye(d1,d2)
if wantderiv:
K = np.zeros((d1,d2,len(theta)+1))
K[:,:,0] = k + measnoise*theta[2]*np.eye(d1,d2)
K[:,:,1] = k
K[:,:,2] = -0.5*k*sumxy
K[:,:,3] = theta[2]*np.eye(d1,d2)
return K
else:
return k + measnoise*theta[2]*np.eye(d1,d2)
def kernel(data1,data2,theta,wantderiv=True,measnoise=1.):
theta = np.squeeze(theta)
# Squared exponential and periodic
if np.shape(data1)[0] == len(data1):
d1 = np.shape(data1)[0]
n = 1
else:
(d1,n) = np.shape(data1)
d2 = np.shape(data2)[0]
sumxy = np.zeros((d1,d2))
for d in range(n):
D1 = np.transpose([data1[:,d]]) * np.ones((d1,d2))
D2 = [data2[:,d]] * np.ones((d1,d2))
sumxy += (D1-D2)
k = theta[0]**2 * np.exp(-sumxy**2/(2.0*theta[1]**2)) + np.exp(-2.*np.sin(theta[2]*np.pi*(sumxy))**2/theta[3]**2)
if wantderiv:
K = np.zeros((d1,d2,len(theta)+1))
K[:,:,0] = k + measnoise*theta[4]**2*np.eye(d1,d2)
K[:,:,1] = 2.0 *k /theta[0]
K[:,:,2] = k*sumxy**2/(theta[1]**3)
K[:,:,3] = -4.0/(theta[3]**2)*np.pi*sumxy*np.sin(theta[2]*np.pi*sumxy)*np.cos(theta[2]*np.pi*sumxy)*np.exp(-2.*np.sin(theta[2]*np.pi*(sumxy))**2/theta[3]**2)
K[:,:,4] = 4.0*np.sin(theta[2]*np.pi*sumxy)**2/(theta[3]**3)*np.exp(-2.*np.sin(theta[2]*np.pi*(sumxy))**2)
K[:,:,5] = 2.0*theta[4]*np.eye(d1,d2)
return K
else:
return k + measnoise*theta[3]**2*np.eye(d1,d2)
def predict(xstar,data,k,t,theta,L=None,beta=None):
if L==None:
L = np.linalg.cholesky(k)
beta = np.linalg.solve(L.transpose(), np.linalg.solve(L,t))
kstar = kernel2(data,xstar,theta,wantderiv=False,measnoise=0)
f = np.dot(kstar.transpose(), beta)
v = np.linalg.solve(L,kstar)
V = kernel2(xstar,xstar,theta,wantderiv=False,measnoise=0)-np.dot(v.transpose(),v)
#logp = -0.5*np.dot(t.transpose(),beta) - np.sum(np.log(np.diag(L))) - np.shape(data)[0] /2. * np.log(2*np.pi)
return (f,V)
def logPosterior(theta,args):
data,t = args
k = kernel2(data,data,theta,wantderiv=False)
L = np.linalg.cholesky(k)
beta = np.linalg.solve(L.transpose(), np.linalg.solve(L,t))
logp = -0.5*np.dot(t.transpose(),beta) - np.sum(np.log(np.diag(L))) - np.shape(data)[0] /2. * np.log(2*np.pi)
return -logp
def gradLogPosterior(theta,args):
data,t = args
theta = np.squeeze(theta)
d = len(theta)
K = kernel2(data,data,theta,wantderiv=True)
L = np.linalg.cholesky(np.squeeze(K[:,:,0]))
invk = np.linalg.solve(L.transpose(),np.linalg.solve(L,np.eye(np.shape(data)[0])))
dlogpdtheta = np.zeros(d)
for d in range(1,len(theta)+1):
dlogpdtheta[d-1] = 0.5*np.dot(t.transpose(), np.dot(invk, np.dot(np.squeeze(K[:,:,d]), np.dot(invk,t)))) - 0.5*np.trace(np.dot(invk,np.squeeze(K[:,:,d])))
return -dlogpdtheta
def testopt():
theta = np.array([0.5,0.25,0.1]) # GP4
x = np.array([[-3.5, -2.5, -.5, .4, 2.25]]).transpose()
t = 0.55*np.array([[-2., 0., 1., 2., -1.]]).transpose()
args = (x,t)
print theta, -logPosterior(theta,args)
newTheta = so.fmin_cg(logPosterior, theta, fprime=gradLogPosterior, args=[args], gtol=1e-4,maxiter=50,disp=1)
print newTheta, -logPosterior(newTheta,args)
#theta = newTheta
xstar = np.reshape(np.linspace(-5,5,100),(100,1))
k = kernel2(x,x,theta,wantderiv=False)
kstar = [kernel2(x,xs*np.ones((1,1)),theta,wantderiv=False) for xs in xstar]
kstar = np.squeeze(kstar)
kstarstar = [kernel2(xs*np.ones((1,1)),xs*np.ones((1,1)),theta,wantderiv=False) for xs in xstar]
kstarstar = np.squeeze(kstarstar)
#kstarstar = kernel2(xstar,xstar,theta,wantderiv=False)
L = np.linalg.cholesky(k)
invk = np.linalg.solve(L.transpose(),np.linalg.solve(L,np.eye(np.shape(x)[0])))
#invL = np.linalg.inv(L)
#invk = np.dot(invL.T,invL)
mean = np.dot(kstar,np.dot(invk,t))
#print np.shape(kstarstar), np.shape(kstar), np.shape(invk)
var = kstarstar - np.diag(np.dot(kstar,np.dot(invk,kstar.T)))
#print np.shape(var)
#var = kstarstar - np.dot(kstar.transpose(),np.dot(invk,kstar))
var = np.reshape(var,(100,1))
#print mean
pl.figure()
pl.plot(xstar,mean,'-k')
#pl.plot(xstar,mean+2*np.sqrt(var),'x-')
#pl.plot(xstar,mean-2*np.sqrt(var),'x-')
#print np.shape(xstar), np.shape(mean), np.shape(var)
pl.fill_between(np.squeeze(xstar),np.squeeze(mean-2*np.sqrt(var)),np.squeeze(mean+2*np.sqrt(var)),color='0.75')
pl.plot(x,t,'ko')
pl.axis('tight')
pl.xlabel('x')
pl.ylabel('f(x)')
def showpost():
#theta = np.array([0.5,1.,0.0]) # GP1
#theta = np.array([0.5,1.,0.2]) # GP2
#theta = np.array([1.0,1.,0.0]) # GP3
theta = np.array([0.5,0.5,0.0]) # GP4
x = np.array([[-3.5, -2.5, -.5, .4, 2.25]]).transpose()
t = 0.55*np.array([[-2., 0., 1., 2., -1.]]).transpose()
xstar = np.reshape(np.linspace(-5,5,100),(100,1))
k = kernel2(x,x,theta,wantderiv=False)
kstar = [kernel2(x,xs*np.ones((1,1)),theta,wantderiv=False) for xs in xstar]
kstar = np.squeeze(kstar)
kstarstar = [kernel2(xs*np.ones((1,1)),xs*np.ones((1,1)),theta,wantderiv=False) for xs in xstar]
kstarstar = np.squeeze(kstarstar)
#kstarstar = kernel(xstar,xstar,theta,wantderiv=False)
#invk = np.linalg.inv(k)
L = np.linalg.cholesky(k)
invk = np.linalg.solve(L.transpose(),np.linalg.solve(L,np.eye(np.shape(x)[0])))
mean = np.dot(kstar,np.dot(invk,t))
var = kstarstar - np.diag(np.dot(kstar,np.dot(invk,kstar.T)))
var = np.reshape(var,(100,1))
pl.figure()
pl.plot(xstar,mean,'-k')
#pl.plot(xstar,mean+2*np.sqrt(var),'x-')
#pl.plot(xstar,mean-2*np.sqrt(var),'x-')
#print np.shape(xstar), np.shape(
|
janpipek/chagallpy
|
setup.py
|
Python
|
mit
| 782
| 0
|
#!/usr/bin/env python
from
|
setuptools import setup, find_packages
import chagallpy
setup(
name='chagallpy',
version=chagallpy.__version__,
packages=find_packages(),
license='MIT',
description='CHArming GALLery in PYthon',
long_description_content_type="text/markdown",
long_description=open('README.md').read(),
author='Jan Pipek',
author_email='jan.pipek@gmail.com',
url='https://github.com/janpipek/chagallpy',
install_requires=['wowp', 'pillow', "jinja2", "pyyaml", "
|
click"],
python_requires="~=3.6",
entry_points={
'console_scripts': [
'chagall = chagallpy:generate'
]
},
include_package_data=True,
package_data={
'resources': ['*.*'],
'templates': ['*.html']
},
)
|
mallconnectionorg/openerp
|
DTE/mc_dte_guia/__init__.py
|
Python
|
agpl-3.0
| 1,114
| 0
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
#
# DTE Chile OpenERP 7
# Copyright (C) 2016 Cesar Lopez Aguillon Mall Connection
# <http://www.mallconnection.org>.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Fr
|
ee Softw
|
are Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import model
import report
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
kmtoki/qmk_firmware
|
lib/python/qmk/cli/config.py
|
Python
|
gpl-2.0
| 3,720
| 0.002151
|
"""Read and write configuration settings
"""
from milc import cli
def print_config(section, key):
"""Print a single config setting to stdout.
"""
cli.echo('%s.%s{fg_cyan}={fg_reset}%s', section, key, cli.config[section][key])
def show_config():
"""Print the current configuration to stdout.
"""
for section in cli.config:
for key in cli.config[section]:
print_config(section, key)
def parse_config_token(config_token):
"""Split a user-supplied configuration-token into its components.
"""
section = option = value = None
if '=' in config_token and '.' not in config_token:
cli.log.error('Invalid configuration token, the key must be of the form <section>.<option>: %s', config_token)
return section, option, value
# Separate the key (<section>.<option>) from the value
if '=' in config_token:
key, value = config_token.split('=')
else:
key = config_token
# Extract the section and option from the key
if '.' in key:
section, option = key.split('.', 1)
else:
section = key
return section, option, value
def set_config(section, option, value):
"""Set a config key in the running config.
"""
log_string = '%s.%s{fg_cyan}:{fg_reset} %s {fg_cyan}->{fg_reset} %s'
if cli.args.read_only:
log_string += ' {fg_red}(change not written)'
cli.echo(log_string, section, option, cli.config[section][option], value)
if not cli.args.read_only:
if value == 'None':
del cli.config[section][option]
else:
cli.config[section][option] = value
@cli.argument('-ro', '--read-only', arg_only=True, action='store_true', help='Operate in read-only mode.')
@cli.argument('configs', nargs='*', arg_only=True, help='Configuration options to read or write.')
@cli.subcommand("Read and write configuration settings.")
def config(cli):
"""Read and write config settings.
This script iterates over the config_tokens supplied as argument. Each config_token has the following form:
section[.key][=value]
If only a section (EG 'compile') is supplied all keys for that section will be displayed.
If section.key is supplied the value for that single key will be displayed.
If section.key=value is supplied the value for that single key will be set.
If section.key=None is supplied the key will be deleted.
No validation is done to ensure that the supplied section.key is actually used by qmk scripts.
"""
if not cli.args.configs:
return show_config()
# Process config_tokens
save_config = False
for argument in cli.args.configs:
# Split on space in case they quoted multiple config tokens
for config_token in argument.split(' '):
section, option, value = parse_config_token(config_token)
# Validation
if option and '.' in option:
cli.log.error('Config keys may not have more than one period! "%s" is not valid.', config_token)
return False
# Do what the user wants
if section and option and value:
# Write a configuration option
set_config(section, option, value)
if not cli.args.read_only:
save_config = True
elif section and option:
# Display a single key
print_config(section, option)
elif section:
# Display an entire section
for
|
key in cli.config[section]:
print_config(section, key)
# Ending actions
if save_conf
|
ig:
cli.save_config()
return True
|
PrestonMonteWest/compmart
|
commerce/admin.py
|
Python
|
apache-2.0
| 1,286
| 0.004666
|
from django.contrib import admin
from . import models
class OrderItemInline(admin.TabularInline):
fields = ('product', 'quantity')
raw_id_fields = ('product',)
model = models.OrderItem
extra = 1
class ProductAdmin(admin.ModelAdmin):
list_display = ('name', 'price', 'description', 'stock', 'discontinued')
search_fields = ('name', 'description')
list_filter = ('discontinued',)
fields = ('name', 'description', 'price', 'stock', 'discontinued',
|
'image')
class ReviewAdmin(admin.ModelAdmin):
list_
|
display = ('user', 'product', 'title', 'body', 'rating', 'pub_date')
search_fields = ('title', 'body')
list_filter = ('pub_date', 'rating')
date_hierarchy = 'pub_date'
fields = ('user', 'product', 'title', 'body', 'rating')
raw_id_fields = ('user', 'product')
class OrderAdmin(admin.ModelAdmin):
list_display = ('user', 'street', 'city', 'state', 'total', 'purchase_date')
search_fields = ('street', 'city', 'state')
list_filter = ('purchase_date',)
date_hierarchy = 'purchase_date'
fields = ('street', 'city', 'state', 'zip_code')
inlines = (OrderItemInline,)
admin.site.register(models.Product, ProductAdmin)
admin.site.register(models.Review, ReviewAdmin)
admin.site.register(models.Order, OrderAdmin)
|
arteria/django-shop-simplenotifications
|
shop_simplenotifications/__init__.py
|
Python
|
unlicense
| 222
| 0.009009
|
#
|
-*- coding: utf-8 -*-
VERSION = (0, 1, 6, 'final')
if VERSION[-1] != "final": # pragma: no cover
__version__ = '.'.join(map(str, VERSION))
else: # pragma: no cover
__version__ = '.'.join(map(str,
|
VERSION[:-1]))
|
josenavas/QiiTa
|
qiita_db/handlers/tests/oauthbase.py
|
Python
|
bsd-3-clause
| 841
| 0
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita D
|
evelopment Team.
#
# Distributed under the terms of the BSD 3-claus
|
e License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from qiita_core.qiita_settings import r_client
from qiita_pet.test.tornado_test_base import TestHandlerBase
class OauthTestingBase(TestHandlerBase):
def setUp(self):
self.token = 'TESTINGOAUTHSTUFF'
self.header = {'Authorization': 'Bearer ' + self.token}
r_client.hset(self.token, 'timestamp', '12/12/12 12:12:00')
r_client.hset(self.token, 'grant_type', 'client')
r_client.expire(self.token, 20)
super(OauthTestingBase, self).setUp()
|
barthess/mavlink
|
pymavlink/tools/magfit_motors.py
|
Python
|
lgpl-3.0
| 4,813
| 0.009765
|
#!/usr/bin/env python
'''
fit best estimate of magnetometer offsets, trying to take into account motor interference
'''
import sys, time, os, math
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--no-timestamps",dest="notimestamps", action='store_true', help="Log doesn't have timestamps")
parser.add_argument("--condition",dest="condition", default=None, help="select packets by condition")
parser.add_argument("--noise", type=float, default=0, help="noise to add")
parser.add_argument("logs", metavar="LOG", nargs="+")
args = parser.parse_args()
from py
|
mavlink import mavutil
from pymavlink.rotmat import Vector3
def noise():
'''a noise vector'''
from random import gauss
v = Vector3(gauss(0, 1), gauss(0, 1), gauss(0, 1))
v.normalize()
return v *
|
args.noise
def select_data(data):
ret = []
counts = {}
for d in data:
(mag,motor) = d
key = "%u:%u:%u" % (mag.x/20,mag.y/20,mag.z/20)
if key in counts:
counts[key] += 1
else:
counts[key] = 1
if counts[key] < 3:
ret.append(d)
print(len(data), len(ret))
return ret
def radius(d, offsets, motor_ofs):
'''return radius give data point and offsets'''
(mag, motor) = d
return (mag + offsets + motor*motor_ofs).length()
def radius_cmp(a, b, offsets, motor_ofs):
'''return radius give data point and offsets'''
diff = radius(a, offsets, motor_ofs) - radius(b, offsets, motor_ofs)
if diff > 0:
return 1
if diff < 0:
return -1
return 0
def sphere_error(p, data):
from scipy import sqrt
x,y,z,mx,my,mz,r = p
ofs = Vector3(x,y,z)
motor_ofs = Vector3(mx,my,mz)
ret = []
for d in data:
(mag,motor) = d
err = r - radius((mag,motor), ofs, motor_ofs)
ret.append(err)
return ret
def fit_data(data):
import numpy, scipy
from scipy import optimize
p0 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
p1, ier = optimize.leastsq(sphere_error, p0[:], args=(data))
if not ier in [1, 2, 3, 4]:
raise RuntimeError("Unable to find solution")
return (Vector3(p1[0], p1[1], p1[2]), Vector3(p1[3], p1[4], p1[5]), p1[6])
def magfit(logfile):
'''find best magnetometer offset fit to a log file'''
print("Processing log %s" % filename)
mlog = mavutil.mavlink_connection(filename, notimestamps=args.notimestamps)
data = []
last_t = 0
offsets = Vector3(0,0,0)
motor_ofs = Vector3(0,0,0)
motor = 0.0
# now gather all the data
while True:
m = mlog.recv_match(condition=args.condition)
if m is None:
break
if m.get_type() == "PARAM_VALUE" and m.param_id == "RC3_MIN":
rc3_min = float(m.param_value)
if m.get_type() == "SENSOR_OFFSETS":
# update current offsets
offsets = Vector3(m.mag_ofs_x, m.mag_ofs_y, m.mag_ofs_z)
if m.get_type() == "SERVO_OUTPUT_RAW":
motor_pwm = m.servo1_raw + m.servo2_raw + m.servo3_raw + m.servo4_raw
motor_pwm *= 0.25
rc3_min = mlog.param('RC3_MIN', 1100)
rc3_max = mlog.param('RC3_MAX', 1900)
motor = (motor_pwm - rc3_min) / (rc3_max - rc3_min)
if motor > 1.0:
motor = 1.0
if motor < 0.0:
motor = 0.0
if m.get_type() == "RAW_IMU":
mag = Vector3(m.xmag, m.ymag, m.zmag)
# add data point after subtracting the current offsets
data.append((mag - offsets + noise(), motor))
print("Extracted %u data points" % len(data))
print("Current offsets: %s" % offsets)
data = select_data(data)
# do an initial fit with all data
(offsets, motor_ofs, field_strength) = fit_data(data)
for count in range(3):
# sort the data by the radius
data.sort(lambda a,b : radius_cmp(a,b,offsets,motor_ofs))
print("Fit %u : %s %s field_strength=%6.1f to %6.1f" % (
count, offsets, motor_ofs,
radius(data[0], offsets, motor_ofs), radius(data[-1], offsets, motor_ofs)))
# discard outliers, keep the middle 3/4
data = data[len(data)/8:-len(data)/8]
# fit again
(offsets, motor_ofs, field_strength) = fit_data(data)
print("Final : %s %s field_strength=%6.1f to %6.1f" % (
offsets, motor_ofs,
radius(data[0], offsets, motor_ofs), radius(data[-1], offsets, motor_ofs)))
print "mavgraph.py '%s' 'mag_field(RAW_IMU)' 'mag_field_motors(RAW_IMU,SENSOR_OFFSETS,(%f,%f,%f),SERVO_OUTPUT_RAW,(%f,%f,%f))'" % (
filename,
offsets.x,offsets.y,offsets.z,
motor_ofs.x, motor_ofs.y, motor_ofs.z)
total = 0.0
for filename in args.logs:
magfit(filename)
|
RCIX/RogueP
|
logic/menu_main.py
|
Python
|
mit
| 668
| 0.02994
|
import libtcodpy as libtcod
from menu import Menu
from menu_manager import MenuStatus
from menu_game import MenuGame
from ui.frame_main_menu import FrameMainMenu
class MenuMain(Menu)
|
:
def __init__(self, width, height):
Menu.__init__(self, width, height)
self.menu_frame = FrameMainMenu(width, height)
def update(self, delta):
self.menu_frame.update(delta)
key = libtcod.console_check_for_keypress(True) #libtcod.co
|
nsole_check_for_keypress
if key.c == ord("a"):
return MenuGame(self.width, self.height)
if key.c == ord("b"):
return MenuStatus.Exit
return MenuStatus.Okay
def draw(self):
#print("drawing MenuMain")
self.menu_frame.draw()
|
cypreess/django-getpaid
|
example/paywall/admin.py
|
Python
|
mit
| 195
| 0
|
from django.contrib import admin
from django.contrib.admin
|
import ModelAdmin
from paywall.models import PaymentEntry
@admin.register(PaymentEntry)
class PaymentEn
|
tryAdmin(ModelAdmin):
pass
|
ashwinpilgaonkar/Flick
|
Bluetooth/Bluetooth.py
|
Python
|
apache-2.0
| 25,172
| 0.012871
|
import serial
from sys import platform as platform
import serial.tools.list_ports
import serial.threaded
from pymouse import PyMouse
from Voice.GoogleTTS import speak
import threading
import math
import copy
import time
import json
data_repository_right = {
"id" : [],
"name" : [],
"shortcuts" : [],
"time_period": [],
"0":[], # "max_acc_@R_x" : [],
"1":[], # "max_acc_^R_x": [],
"2":[], # "max_acc_#R_x": [],
"3":[], # "max_acc_$R_x": [],
"4":[], # "max_acc_%R_x": [],
"5":[], # "max_acc_@R_y" : [],
"6":[], # "max_acc_^R_y": [],
"7":[], # "max_acc_#R_y": [],
"8":[], # "max_acc_$R_y": [],
"9":[], # "max_acc_%R_y": [],
"10":[], # "max_acc_@R_z": [],
"11":[], # "max_acc_^R_z": [],
"12":[], # "max_acc_#R_z": [],
"13":[], # "max_acc_$R_z": [],
"14":[], # "max_acc_%R_z": [],
"15":[], # "min_acc_@R_x": [],
"16":[], # "min_acc_^R_x": [],
"17":[], # "min_acc_#R_x": [],
"18":[], # "min_acc_$R_x": [],
"19":[], # "min_acc_%R_x": [],
"20":[], # "min_acc_@R_y": [],
"21":[], # "min_acc_^R_y": [],
"22":[], # "min_acc_#R_y": [],
"23":[], # "min_acc_$R_y": [],
"24":[], #
|
"min_acc_%R_y": [],
"25":[]
|
, # "min_acc_@R_z": [],
"26":[], # "min_acc_^R_z": [],
"27":[], # "min_acc_#R_z": [],
"28":[], # "min_acc_$R_z": [],
"29":[], # "min_acc_%R_z": [],
"30":[], # "start_angle_@R_x":[],
"31":[], # "start_angle_^R_x": [],
"32":[], # "start_angle_#R_x": [],
"33":[], # "start_angle_$R_x": [],
"34":[], # "start_angle_%R_x": [],
"35":[], # "start_angle_@R_y": [],
"36":[], # "start_angle_^R_y": [],
"37":[], # "start_angle_#R_y": [],
"38":[], # "start_angle_$R_y": [],
"39":[], # "start_angle_%R_y": [],
"40":[], # "start_angle_@R_z": [],
"41":[], # "start_angle_^R_z": [],
"42":[], # "start_angle_#R_z": [],
"43":[], # "start_angle_$R_z": [],
"44":[], # "start_angle_%R_z": [],
"45":[], # "end_angle_@R_x": [],
"46":[], # "end_angle_^R_x": [],
"47":[], # "end_angle_#R_x": [],
"48":[], # "end_angle_$R_x": [],
"49":[], # "end_angle_%R_x": [],
"50":[], # "end_angle_@R_y": [],
"51":[], # "end_angle_^R_y": [],
"52":[], # "end_angle_#R_y": [],
"53":[], # "end_angle_$R_y": [],
"54":[], # "end_angle_%R_y": [],
"55":[], # "end_angle_@R_z": [],
"56":[], # "end_angle_^R_z": [],
"57":[], # "end_angle_#R_z": [],
"58":[], # "end_angle_$R_z": [],
"59":[], # "end_angle_%R_z": [],
}
data_repository_left = {
"id": [],
"name": [],
"shortcuts": [],
"time_period": [],
0: [], # "max_acc_@L_x" : [],
1: [], # "max_acc_^L_x": [],
2: [], # "max_acc_#L_x": [],
3: [], # "max_acc_$L_x": [],
4: [], # "max_acc_%L_x": [],
5: [], # "max_acc_@L_y" : [],
6: [], # "max_acc_^L_y": [],
7: [], # "max_acc_#L_y": [],
8: [], # "max_acc_$L_y": [],
9: [], # "max_acc_%L_y": [],
10: [], # "max_acc_@L_z": [],
11: [], # "max_acc_^L_z": [],
12: [], # "max_acc_#L_z": [],
13: [], # "max_acc_$L_z": [],
14: [], # "max_acc_%L_z": [],
15: [], # "min_acc_@L_x": [],
16: [], # "min_acc_^L_x": [],
17: [], # "min_acc_#L_x": [],
18: [], # "min_acc_$L_x": [],
19: [], # "min_acc_%L_x": [],
20: [], # "min_acc_@L_y": [],
21: [], # "min_acc_^L_y": [],
22: [], # "min_acc_#L_y": [],
23: [], # "min_acc_$L_y": [],
24: [], # "min_acc_%L_y": [],
25: [], # "min_acc_@L_z": [],
26: [], # "min_acc_^L_z": [],
27: [], # "min_acc_#L_z": [],
28: [], # "min_acc_$L_z": [],
29: [], # "min_acc_%L_z": [],
30: [], # "start_angle_@L_x":[],
31: [], # "start_angle_^L_x": [],
32: [], # "start_angle_#L_x": [],
33: [], # "start_angle_$L_x": [],
34: [], # "start_angle_%L_x": [],
35: [], # "start_angle_@L_y": [],
36: [], # "start_angle_^L_y": [],
37: [], # "start_angle_#L_y": [],
38: [], # "start_angle_$L_y": [],
39: [], # "start_angle_%L_y": [],
40: [], # "start_angle_@L_z": [],
41: [], # "start_angle_^L_z": [],
42: [], # "start_angle_#L_z": [],
43: [], # "start_angle_$L_z": [],
44: [], # "start_angle_%L_z": [],
45: [], # "end_angle_@L_x": [],
46: [], # "end_angle_^L_x": [],
47: [], # "end_angle_#L_x": [],
48: [], # "end_angle_$L_x": [],
49: [], # "end_angle_%L_x": [],
50: [], # "end_angle_@L_y": [],
51: [], # "end_angle_^L_y": [],
52: [], # "end_angle_#L_y": [],
53: [], # "end_angle_$L_y": [],
54: [], # "end_angle_%L_y": [],
55: [], # "end_angle_@L_z": [],
56: [], # "end_angle_^L_z": [],
57: [], # "end_angle_#L_z": [],
58: [], # "end_angle_$L_z": [],
59: [], # "end_angle_%L_z": [],
}
right_data = {
0: 0, # "acc_@R_x"
1: 0, # "acc_^R_x"
2: 0, # "acc_#R_x"
3: 0, # "acc_$R_x"
4: 0, # "acc_%R_x"
5: 0, # "acc_@R_y"
6: 0, # "acc_^R_y"
7: 0, # "acc_#R_y"
8: 0, # "acc_$R_y"
9: 0, # "acc_%R_y"
10: 0, # "acc_@R_z"
11: 0, # "acc_^R_z"
12: 0, # "acc_#R_z"
13: 0, # "acc_$R_z"
14: 0, # "acc_%R_z"
15: 0, # "angle_@R_x"
16: 0, # "angle_^R_x"
17: 0, # "angle_#R_x"
18: 0, # "angle_$R_x"
19: 0, # "angle_%R_x"
20: 0, # "angle_@R_y"
21: 0, # "angle_^R_y"
22: 0, # "angle_#R_y"
23: 0, # "angle_$R_y"
24: 0, # "angle_%R_y"
25: 0, # "angle_@R_z"
26: 0, # "angle_^R_z"
27: 0, # "angle_#R_z"
28: 0, # "angle_$R_z"
29: 0 # "angle_%R_z"
}
left_data = {
0: 0, # "acc_@L_x"
1: 0, # "acc_^L_x"
2: 0, # "acc_#L_x"
3: 0, # "acc_$L_x"
4: 0, # "acc_%L_x"
5: 0, # "acc_@L_y"
6: 0, # "acc_^L_y"
7: 0, # "acc_#L_y"
8: 0, # "acc_$L_y"
9: 0, # "acc_%L_y"
10: 0, # "acc_@L_z"
11: 0, # "acc_^L_z"
12: 0, # "acc_#L_z"
13: 0, # "acc_$L_z"
14: 0, # "acc_%L_z"
15: 0, # "angle_@L_x"
16: 0, # "angle_^L_x"
17: 0, # "angle_#L_x"
18: 0, # "angle_$L_x"
19: 0, # "angle_%L_x"
20: 0, # "angle_@L_y"
21: 0, # "angle_^L_y"
22: 0, # "angle_#L_y"
23: 0, # "angle_$L_y"
24: 0, # "angle_%L_y"
25: 0, # "angle_@L_z"
26: 0, # "angle_^L_z"
27: 0, # "angle_#L_z"
28: 0, # "angle_$L_z"
29: 0 # "angle_%L_z"
}
pre_right_data = copy.deepcopy(right_data)
pre_left_data = copy.deepcopy(left_data)
average_right_data = copy.deepcopy(right_data)
movement_Sensitivity_x= 2
movement_Sensitivity_y= 2
movement_Sensitivity_z= 2
threshold_movement_Sensitivity = 14000
recognition_Gap_Interval = 200
initial_Gap_Interval = 200
angle_tolerance = 5
acc_tolerance = 0.5
def get_OS_Right():
port = "/dev/tty.Right-DevB"
# LINUX
if platform == "linux" or platform == "linux2":
port = "/dev/tty.Right-DevB"
# MAC OS
elif platform == "darwin":
port = "/dev/tty.Right-DevB"
# WINDOWS
elif platform == "win32":
port = "COM4"
return port
def get_OS_Left():
port = "/dev/tty.LEFT-DevB"
# LINUX
if platform == "linux" or platform == "linux2":
port = "/dev/tty.LEFT-DevB"
# MAC OS
elif platform == "darwin":
port = "/dev/tty.LEFT-DevB"
# WINDOWS
elif platform == "win32":
port = "COM4"
return port
def bluetooth(serRight, serLeft, recognitionFlag=0):
global pre_right_data
global pre_l
|
brechin/pyrollout
|
pyrollout/rollout.py
|
Python
|
mit
| 2,301
| 0.002608
|
import logging
# noinspection PyUnresolvedReferences
import feature #noqa
logging.basicConfig(level=logging.DEBUG)
class Rollout(object):
__version__ = '0.3.5'
def __init__(self, feature_storage=None, user_storage=None, undefined_feature_access=False):
"""
Manage feature flags for groups, users, or on a percentage basis. Use your own
user models and persistence with replaceable modules.
:param feature_storage: Object to manage storage of feature definitions
:type feature_storage: pyrollout.storage.FeatureStorageManager
:param user_storage: Object to manage storage of users
:type user_storage: pyrollout.storage.UserStorageManager
:param undefined_feature_access: Should undefined features be allowed (default:True) or denied (False) access?
:type undefined_feature_access: bool
"""
if feature_storage is None:
from storage.memory import MemoryFeatureStorage
self.feature_storage = MemoryFeatureStorage()
else:
self.feature_storage = feature_storage
if user_storage is None:
from storage.memory import MemoryUserStorage
self.user_storage = MemoryUserStorage()
else:
self.user_storage = user_storage
self.default_undefined_feature = undefined_feature_access
def add_feature(self, feature):
"""
Add a feature to be handled by this instance
:param feature: New feature to add
:type feature: pyrollout.feature.Feature
"""
self.feature_storage.set_feature_config(feature.name, feature_data=feature)
def can(self, user, feature_name):
|
"""
Check whether
|
user has access to the given feature.
:param user: User object to check, must be compatible with user storage manager in use
:type user: dict or object
:param feature_name: Name of feature to check against
:type feature_name: basestring
:return: True if user has access, False otherwise
:rtype: bool
"""
feature = self.feature_storage.get_feature_config(feature_name)
if feature is None:
return self.default_undefined_feature
return feature.can(self.user_storage, user)
|
logithr/djangocms-widgetbox
|
widgetbox/migrations/0011_auto_20150731_0958.py
|
Python
|
mit
| 447
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unico
|
de_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('widgetbox', '0010_html'),
]
operations = [
migrations.AddField(
model_name='button',
name='extra_css_classes',
field=models.CharField(max_length=200, blank=True),
preserve_default=True,
|
),
]
|
ushatil/wellness-tracker
|
ws/manage.py
|
Python
|
mit
| 253
| 0
|
#!
|
/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wellspring.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(s
|
ys.argv)
|
bkosciow/gfxlcd
|
gfxlcd/tests/test_ssd1306.py
|
Python
|
mit
| 464
| 0.006466
|
import sys
sys.path.append("../../")
from unittest.mock import patch, MagicMock
MockRPi = MagicMock()
MockSpidev = MagicMoc
|
k()
modules = {
"RPi": MockRPi,
"RPi.GPIO": MockRPi.GPIO,
"spidev": MockSpidev
}
patcher = patch.dict("sys.modules",
|
modules)
patcher.start()
from gfxlcd.driver.ssd1306.spi import SPI
from gfxlcd.driver.ssd1306.ssd1306 import SSD1306
class TestNJU6450(object):
def test_initialize(self):
SSD1306(128, 64, SPI())
|
NIASC/VirusMeta
|
diginorm_module/abundance-dist.py
|
Python
|
gpl-3.0
| 6,918
| 0
|
#! /usr/bin/env python
# This file is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) 2010-2015, Michigan State University.
# Copyright (C) 2015, The Regents of the University of California.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Michigan State University nor the names
# of its contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Contact: khmer-project@idyll.org
# pylint: disable=missing-docstring
"""
Produce the k-mer abundance distribution for the given file.
% python scripts/abundance-dist.py [ -z -s ] <htname> <data> <histout>
Use '-h' for parameter help.
"""
from __future__ import print_function
import sys
import csv
import khmer
import argparse
import textwrap
import os
from khmer import __version__
from khmer.kfile import check_input_files
from khmer.khmer_args import (info, sanitize_help, ComboFormatter,
_VersionStdErrAction)
def get_parser():
epilog = """\
Example::
load-into-countgraph.py -x 1e7 -N 2 -k 17 counts \\
tests/test-data/test-abund-read-2.fa
abundance-dist.py counts tests/test-data/test-abund-read-2.fa test-dist
"""
parser = argparse.ArgumentParser(
description="Calculate abundance distribution of the k-mers in "
"the sequence file using a pre-made k-mer countgraph.",
formatter_class=ComboFormatter, epilog=textwrap.dedent(epilog))
parser.add_argument('input_count_graph_filename', help='The name of the'
' input k-mer countgraph file.')
parser.add_argument('input_sequence_filename', help='The name of the input'
' FAST[AQ] sequence file.')
parser.add_argument('output_histogram_filename', help='The columns are: '
'(1) k-mer abundance, (2) k-mer count, (3) cumulative '
'count, (4) fraction of total distinct k-mers.')
parser.add_argument('-z', '--no-zero', dest='output_zero', default=True,
action='store_false',
help='Do not output zero-count bins')
parser.add_argument('-s', '--squash', dest='squash_output', default=False,
action='store_true',
help='Overwrite existing output_histogram_filename')
parser.add_argument('-b', '--no-bigcount', dest='bigcount', default=True,
action='store_false',
help='Do not count k-mers past 255')
parser.add_argument('--version', action=_VersionStdErrAction,
version='khmer {v}'.format(v=__version__))
parser.add_argument('-f', '--force', default=False, action='store_true',
help='Continue even if specified input files '
'do not exist or are empty.')
return parser
def main():
info('abundance-dist.py', ['counting'])
args = sanitize_help(get_parser()).parse_args()
infiles = [args.input_count_graph_filename,
args.input_sequence_filename]
for infile in infiles:
check_input_files(infile, False)
print('Counting graph from', args.input_count_graph_filename,
file=sys.stderr)
countgraph = khmer.load_
|
countgraph(
args.input_count_graph_filename)
if not countgrap
|
h.get_use_bigcount() and args.bigcount:
print("WARNING: The loaded graph has bigcount DISABLED while bigcount"
" reporting is ENABLED--counts higher than 255 will not be "
"reported.",
file=sys.stderr)
countgraph.set_use_bigcount(args.bigcount)
kmer_size = countgraph.ksize()
hashsizes = countgraph.hashsizes()
tracking = khmer._Nodegraph( # pylint: disable=protected-access
kmer_size, hashsizes)
print('K:', kmer_size, file=sys.stderr)
print('outputting to', args.output_histogram_filename, file=sys.stderr)
if args.output_histogram_filename in ('-', '/dev/stdout'):
pass
elif os.path.exists(args.output_histogram_filename):
if not args.squash_output:
print('ERROR: %s exists; not squashing.' %
args.output_histogram_filename,
file=sys.stderr)
sys.exit(1)
print('** squashing existing file %s' %
args.output_histogram_filename, file=sys.stderr)
print('preparing hist...', file=sys.stderr)
abundances = countgraph.abundance_distribution(
args.input_sequence_filename, tracking)
total = sum(abundances)
if 0 == total:
print("ERROR: abundance distribution is uniformly zero; "
"nothing to report.", file=sys.stderr)
print("\tPlease verify that the input files are valid.",
file=sys.stderr)
sys.exit(1)
if args.output_histogram_filename in ('-', '/dev/stdout'):
countgraph_fp = sys.stdout
else:
countgraph_fp = open(args.output_histogram_filename, 'w')
countgraph_fp_csv = csv.writer(countgraph_fp)
# write headers:
countgraph_fp_csv.writerow(['abundance', 'count', 'cumulative',
'cumulative_fraction'])
sofar = 0
for _, i in enumerate(abundances):
if i == 0 and not args.output_zero:
continue
sofar += i
frac = sofar / float(total)
countgraph_fp_csv.writerow([_, i, sofar, round(frac, 3)])
if sofar == total:
break
if __name__ == '__main__':
main()
# vim: set ft=python ts=4 sts=4 sw=4 et tw=79:
|
nijel/weblate
|
weblate/trans/migrations/0094_project_language_aliases.py
|
Python
|
gpl-3.0
| 752
| 0.00266
|
# Generated by Django 3.0.7 on 2020-08-03 08:42
from django.db import migrations, models
import weblate.utils.val
|
idators
class Migration(migrations.Migration):
dependencies = [
("trans", "0093_auto_20200730_1432"),
]
operations = [
migrations.AddField(
model_name="project",
name="language_aliases",
field=models.CharField(
default="",
blank=True,
help_text="Comma-separated list of language code mappings, for example: en_GB:en,en_US:en",
max_length=200,
validators=[weblate.u
|
tils.validators.validate_language_aliases],
verbose_name="Language aliases",
),
),
]
|
mozilla/moztrap
|
tests/runner.py
|
Python
|
bsd-2-clause
| 1,224
| 0.001634
|
from django.conf import settings
from django.test import TestCase, TransactionTestCase
from django.test.simple import DjangoTestSuiteRunner
from django.
|
test.runner import reorder_suite
from django.utils.importlib import import_module
from django.utils.unittest.loader import defaultTestLoader
class DiscoveryDjangoTestSuiteRunner(DjangoTestSuiteRunner):
def build_suite(self, test_labels, extra_tests=None,
|
**kwargs):
suite = None
discovery_root = settings.TEST_DISCOVERY_ROOT
if test_labels:
suite = defaultTestLoader.loadTestsFromNames(test_labels)
# if single named module has no tests, do discovery within it
if not suite.countTestCases() and len(test_labels) == 1:
suite = None
discovery_root = import_module(test_labels[0]).__path__[0]
if suite is None:
suite = defaultTestLoader.discover(
discovery_root,
top_level_dir=settings.BASE_PATH,
)
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return reorder_suite(suite, (
TestCase,
TransactionTestCase,
))
|
AJBrodie/inflowProject
|
pyKratos/newton_raphson_strategy.py
|
Python
|
bsd-2-clause
| 6,317
| 0.015197
|
# ----------------------------------------------------------------------
# author : Martin Ruchti
# contact : martin.ruchti@tum.de
# ----------------------------------------------------------------------
from __future__ import print_function, absolute_import, division
from numpy import *
from .variables import *
'''
this scheme is an implementation of the newton raphson scheme
the scheme is created as a modified version of 'static_scheme.py' and follows the implementation of the
residual based newton raphson strategy in KratosMultiphysics
'''
class NewtonRaphsonStrategy:
def __init__(self, model_part, scheme, builder_and_solver, max_iteration, epsAbs, ratioRel = 1e-10):
self.model_part = model_part
self.scheme = scheme
self.builder_and_solver = builder_and_solver
#self.adjoint_builder_and_solver = adjointbuilder_and_solv
self.max_iteration_number = max_iteration
self.epsilon = epsAbs
self.relativeRatio = ratioRel
#allocate matrices
self.A = zeros((0, 0))
self.b = zeros((0))
self.dx = zeros((0))
#file
self.file = 0
def Initialize(self):
# find list of dofs
self.builder_and_solver.SetupDofSet()
# allocate memory for the system
self.A, self.x, self.b = self.builder_and_solver.SetupSystem(
self.A, self.dx, self.b)
# def Solve(self): Removed to allow for recording of number of iterations if desired
def Solve(self,RecordIteration = 0):
print("=================================================================")
print("start solving process...")
self.Initialize()
# do prediction once
self.scheme.Predict()
# initialize parameters for NR - strategy
iteration_number = 1
is_converged = False
error_L2_norm = 0.0
init_L2_norm = 0.0
ratio = 1.0
dragLift = [0,0]
# first solve
self.A, self.dx, self.b = self.builder_and_solver.BuildAndSolve(self.A, self.x, self.b)
# check for convergence
error_L2_norm = 0.0
for i in range(0,len(self.dx)):
error_L2_norm += (self.dx[i])**2
#scale error with number of nodes
error_L2_norm = sqrt(error_L2_norm)/len(self.dx)
init_L2_norm = error_L2_norm
if( error_L2_norm <= self.epsilon ):
is_converged = True
print("coverged after step: ",iteration_number)
print("error is: ",error_L2_norm)
#self.builder_and_solver.ComputeReactions(self.A, self.x, self.b, dragLift)
else:
print("not converged, error is: ",error_L2_norm)
print("ratio is: ", 1)
print("-----------------------------------------------------------------")
# call scheme to
|
do update
self.scheme.Update(self.builder_and_solver.dofset, self.dx)
#iterate if not converged
while(not is_converged and iteration_number < self.max_iteration_number):
# do build and solve
self.A, self.dx, self.b = self.builder_and_solver.BuildAndSolve(self.A, self.x, self.b)
#
|
call scheme to do update
self.scheme.Update(self.builder_and_solver.dofset, self.dx)
#check for convergence
error_L2_norm = 0.0
for i in range(0,len(self.dx)):
error_L2_norm += (self.dx[i])**2
#scale error with number of nodes
error_L2_norm = sqrt(error_L2_norm)/len(self.dx)
#compute relative error
ratio = error_L2_norm / init_L2_norm
if( error_L2_norm <= self.epsilon or ratio <= self.relativeRatio ):
is_converged = True
else:
print("not converged, error is: ",error_L2_norm)
print("ratio is: ",ratio)
print("-----------------------------------------------------------------")
iteration_number += 1
# New lines to return iteration number if set in input
if(RecordIteration==1):
return iteration_number
# if(iteration_number == self.max_iteration_number):
# print("*********maximum iterations reached*********")
# print("error is: ",error_L2_norm)
# print("ratio is: ",ratio)
# self.builder_and_solver.ComputeReactions(self.A, self.x, self.b, dragLift)
# print("solving process done!")
# print("=================================================================")
# elif(iteration_number > 1):
# print("coverged after step: ",iteration_number)
# print("error is: ",error_L2_norm)
# print("ratio is: ",ratio)
# self.builder_and_solver.ComputeReactions(self.A, self.x, self.b, dragLift)
# print("solving process done!")
# print("=================================================================")
#
# if(self.file != 0):
# self.WriteDragforceToFile(dragLift)
def SpyMatrix(self):
try:
import matplotlib.pylab as pl
pl.spy(self.A)
pl.show()
except:
raise Exception(
"error in function Spy. Probably matplotlib not installed")
def WriteDragforceToFile(self, dragLift):
#time = self.model_part.ProcessInfo[TIME]
#output 20 digits
#output = str(time) + " " + str.format("{0:.20f}", dragLift[0]) + " " + str.format("{0:.20f}", dragLift[1]) + "\n"
#self.WriteToFile(output)
return
#file operations
def OpenFile(self, filename):
self.file = open(filename, "w")
def WriteToFile(self, data):
self.file.write(data)
self.file.flush()
def CloseFile(self):
self.file.close()
|
gem/geonode
|
geonode/layers/views.py
|
Python
|
gpl-3.0
| 29,767
| 0.000571
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import os
import sys
import logging
import shutil
import traceback
import uuid
import decimal
from guardian.shortcuts import get_perms
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.conf import settings
from django.template import RequestContext
from django.utils.translation import ugettext as _
try:
import json
except ImportError:
from django.utils import simplejson as json
from django.utils.html import escape
from django.template.defaultfilters import slugify
from django.forms.models import inlineformset_factory
from django.db import transaction
from django.db.models import F
from django.forms.util import ErrorList
from geonode.tasks.deletion import delete_layer
from geonode.services.models import Service
from geonode.layers.forms import LayerForm, LayerUploadForm, NewLayerUploadForm, LayerAttributeForm
from geonode.base.forms import CategoryForm
from geonode.layers.models import Layer, Attribute, UploadSession
from geonode.base.enumerations import CHARSETS
from geonode.base.models import TopicCategory
from geonode.utils import default_map_config
from geonode.utils import GXPLayer
from geonode.utils import GXPMap
from geonode.layers.utils import file_upload, is_raster, is_vector
from geonode.utils import resolve_object, llbbox_to_mercator
from geonode.people.forms import ProfileForm, PocForm
from geonode.security.views import _perms_info_json
from geonode.documents.models import get_related_documents
from geonode.utils import build_social_links
from geonode.geoserver.helpers import cascading_delete, gs_catalog
from geonode.geoserver.helpers import ogc_server_settings
if 'geonode.geoserver' in settings.INSTALLED_APPS:
from geonode.geoserver.helpers import _render_thumbnail
CONTEXT_LOG_FILE = ogc_server_settings.LOG_FILE
logger = logging.getLogger("geonode.layers.views")
DEFAULT_SEARCH_BATCH_SIZE = 10
MAX_SEARCH_BATCH_SIZE = 25
GENERIC_UPLOAD_ERROR = _("There was an error while attempting to upload your data. \
Please try again, or contact and administrator if the problem continues.")
METADATA_UPLOADED_PRESERVE_ERROR = _("Note: this layer's orginal metadata was \
populated and preserved by importing a metadata XML file. This metadata cannot be edited.")
_PERMISSION_MSG_DELETE = _("You are not permitted to delete this layer")
_PERMISSION_MSG_GENERIC = _('You do not have permissions for this layer.')
_PERMISSION_MSG_MODIFY = _("You are not permitted to modify this layer")
_PERMISSION_MSG_METADATA = _(
"You are not permitted to modify this layer's metadata")
_PERMISSION_MSG_VIEW = _("You are not permitted to view this layer")
def log_snippet(log_file):
if not os.path.isfile(log_file):
return "No log file at %s" % log_file
with open(log_file, "r") as f:
f.seek(0, 2) # Seek @ EOF
fsize = f.tell() # Get Size
f.seek(max(fsize - 10024, 0), 0) # Set pos @ last n chars
return f.read()
def _resolve_layer(request, typename, permission='base.view_resourcebase',
msg=_PERMISSION_MSG_GENERIC, **kwargs):
"""
Resolve the layer by the provided typename (which may include service name) and check the optional permission.
"""
service_typename = typename.split(":", 1)
if Service.objects.filter(name=service_typename[0]).exists():
service = Service.objects.filter(name=service_typename[0])
return resolve_object(request,
Layer,
{'typename': service_typename[1]
if service[0].method != "C" else typename},
permission=permission,
permission_msg=msg,
**kwargs)
else:
retu
|
rn resolve_object(request,
Layer,
{'typename': typename},
permission=permission,
permission_msg=msg,
**kwargs)
# Basic Layer Views #
@login_required
def layer_upload(request, template='upload/layer_upload.html'):
if request.method == 'GET':
mosaics = Layer.objects.filter(is_mo
|
saic=True).order_by('name')
ctx = {
'mosaics': mosaics,
'charsets': CHARSETS,
'is_layer': True,
}
return render_to_response(template, RequestContext(request, ctx))
elif request.method == 'POST':
form = NewLayerUploadForm(request.POST, request.FILES)
tempdir = None
errormsgs = []
out = {'success': False}
if form.is_valid():
title = form.cleaned_data["layer_title"]
# Replace dots in filename - GeoServer REST API upload bug
# and avoid any other invalid characters.
# Use the title if possible, otherwise default to the filename
if title is not None and len(title) > 0:
name_base = title
else:
name_base, __ = os.path.splitext(
form.cleaned_data["base_file"].name)
name = slugify(name_base.replace(".", "_"))
try:
# Moved this inside the try/except block because it can raise
# exceptions when unicode characters are present.
# This should be followed up in upstream Django.
tempdir, base_file = form.write_files()
saved_layer = file_upload(
base_file,
name=name,
user=request.user,
overwrite=False,
charset=form.cleaned_data["charset"],
abstract=form.cleaned_data["abstract"],
title=form.cleaned_data["layer_title"],
metadata_uploaded_preserve=form.cleaned_data["metadata_uploaded_preserve"]
)
except Exception as e:
exception_type, error, tb = sys.exc_info()
logger.exception(e)
out['success'] = False
out['errors'] = str(error)
# Assign the error message to the latest UploadSession from
# that user.
latest_uploads = UploadSession.objects.filter(
user=request.user).order_by('-date')
if latest_uploads.count() > 0:
upload_session = latest_uploads[0]
upload_session.error = str(error)
upload_session.traceback = traceback.format_exc(tb)
upload_session.context = log_snippet(CONTEXT_LOG_FILE)
upload_session.save()
out['traceback'] = upload_session.traceback
out['context'] = upload_session.context
out['upload_session'] = upload_session.id
else:
out['success'] = True
if hasattr(saved_layer, 'info'):
out['info'] = saved_layer.info
out['url'] = reverse(
'layer_detail', args=[
saved_layer.service_ty
|
sdh11/gnuradio
|
gr-filter/python/filter/design/filter_design.py
|
Python
|
gpl-3.0
| 91,770
| 0.004914
|
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import sys
import os
import re
import csv
import copy
import warnings
from optparse import OptionParser
from gnuradio import filter, fft
try:
import numpy as np
except ImportError:
raise SystemExit('Please install NumPy to run this script (https://www.np.org/)')
try:
import numpy.fft as fft_detail
except ImportError:
raise SystemExit('Could not import fft implementation of numpy')
try:
from numpy import poly1d
except ImportError:
raise SystemExit('Please install NumPy to run this script (https://www.np.org)')
try:
from scipy import signal
except ImportError:
raise SystemExit('Please install SciPy to run this script (https://www.scipy.org)')
try:
from PyQt5 import Qt, QtCore, QtWidgets
except ImportError:
raise SystemExit('Please install PyQt5 to run this script (https://www.riverbankcomputing.com/software/pyqt/download5)')
try:
import pyqtgraph as pg
except ImportError:
raise SystemExit('Please install pyqtgraph to run this script (http://www.pyqtgraph.org)')
try:
from gnuradio.filter.pyqt_filter_stacked import Ui_MainWindow
except ImportError:
raise SystemExit('Could not import from pyqt_filter_stacked. Please build with "pyuic5 pyqt_filter_stacked.ui -o pyqt_filter_stacked.py"')
try:
from gnuradio.filter.banditems import *
except ImportError:
raise SystemExit('Could not import from banditems. Please check whether banditems.py is in the library path')
try:
from gnuradio.filter.polezero_plot import *
except ImportError:
raise SystemExit('Could not import from polezero_plot. Please check whether polezero_plot.py is in the library path')
# Behavior is not quite working on 3.8 - TODO
# try:
# from gnuradio.filter.idealbanditems import *
# except ImportError:
# raise SystemExit('Could not import from idealbanditems. Please check whether idealbanditems.py is in the library path')
try:
from gnuradio.filter.api_object import *
except ImportError:
raise SystemExit('Could not import from api_object. Please check whether api_object.py is in the library path')
try:
from gnuradio.filter.fir_design import *
except ImportError:
raise SystemExit('Could not import from fir_design. Please check whether fir_design.py is in the library path')
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s): return s
# Gnuradio Filter design tool main window
class gr_plot_filter(QtGui.QMainWindow):
def __init__(self, options, callback=None, restype=""):
QtGui.QWidget.__init__(self, None)
self.gui = Ui_MainWindow()
self.callback = callback
# Set Global pyqtgraph options
pg.setConfigOption('foreground', 'k') # Default foreground color for text, lines, axes, etc.
pg.setConfigOption('background', None) # Default background for GraphicsView.
pg.setConfigOptions(antialias=True) # Draw lines with smooth edges at the cost of reduced performance.
self.gui.setupUi(self)
# Remove other filter combobox entry if some restriction is specified.
if restype == "iir":
ind = self.gui.fselectComboBox.findText("FIR")
if ind != -1:
self.gui.fselectComboBox.removeItem(ind)
elif restype == "fir":
ind = self.gui.fselectComboBox.findText("IIR(scipy)")
if ind != -1:
self.gui.fselectComboBox.removeItem(ind)
self.gui.action_save.triggered.connect(self.action_save_dialog)
self.gui.action_save.setEnabled(False)
self.gui.action_open.triggered.connect(self.action_open_dialog)
self.gui.filterTypeComboBox.currentIndexChanged['const QString&'].connect(self.changed_filter_type)
self.gui.iirfilterBandComboBox.currentIndexChanged['const QString&'].connect(self.changed_iirfilter_band)
self.gui.filterDesignTypeComboBox.currentIndexChanged['const QString&'].connect(self.changed_filter_design_type)
self.gui.fselectComboBox.currentIndexChanged['const QString&'].connect(self.changed_fselect)
self.gui.iirfilterTypeComboBox.currentIndexChanged['const QString&'].connect(self.set_order)
self.gui.designButton.released.connect(self.design)
# self.gui.tabGroup.currentChanged['int'].connect(self.tab_changed)
self.gui.nfftEdit.textEdited['QString'].connect(self.nfft_edit_changed)
self.gui.actionQuick_Access.triggered.connect(self.action_quick_access)
self.gui.actionSpec_Widget.triggered.connect(self.action_spec_widget)
self.gui.actionResponse_Widget.triggered.connect(self.action_response_widget)
self.gui.actionDesign_Widget.triggered.connect(self.action_design_widget)
self.gui.actionMagnitude_Response.triggered.connect(self.set_actmagresponse)
self.gui.actionGrid_2.triggered.connect(self.set_actgrid)
self.gui.actionPhase_Respone.triggered.connect(self.set_actphase)
self.gui.actionGroup_Delay.triggered.connect(self.set_actgdelay)
self.gui.actionFilter_Coefficients.triggered.connect(self.set_actfcoeff)
self.gui.actionBand_Diagram.triggered.connect(self.set_actband)
# self.gui.actionIdeal_Band.triggered.connect(self.set_drawideal)
self.gui.actionPole_Zero_Plot_2.triggered.connect(self.set_actpzplot)
self.gui.actionGridview.triggered.connect(self.set_switchview)
self.gui.actionPlot_select.triggered.connect(self.set_plotselect)
self.gui.actionPhase_Delay.triggered.connect(self.set_actpdelay)
self.gui.actionImpulse_Response.triggered.connect(self.set_actimpres)
self.gui.actionStep_Response.triggered.connect(self.set_actstepres)
self.gui.mfmagPush.released.connect(self.set_mfmagresponse)
self.gui.mfphasePush.released.connect(self.set_mfphaseresponse)
self.gui.mfgpdlyPush.released.connect(self.set_mfgroupdelay)
self.gui.mfphdlyPush.released.connect(self.set_mfphasedelay)
self.gui.mfoverlayPush.clicked.connect(self.set_mfoverlay)
self.gui.conjPush.clicked.connect(self.set_conj)
self.gui.mconjPush.clicked.connect(self.set_mconj)
self.gui.addzeroPush.clicked.connect(self.set_zeroadd)
self.gui.maddzeroPush.clicked.connect(self.set_mzeroadd)
self.gui.addpolePush.clicked.connect(self.set_poleadd)
self.gui.maddpolePush.clicked.connect(self.set_mpoleadd)
self.gui.delPush.clicked.connect(self.set_delpz)
self.gui.mdelPush.clicked.connect(self.set_mdelpz)
self.gui.mttapsPush.clicked.connect(self.set_mttaps)
self.gui.mtstepPush.clicked.connect(self.set_mtstep)
self.gui.mtimpPush.clicked.connect(self.set_mtimpulse)
self.gui.checkKeepcur.stateChanged['int'].connect(self.set_bufferplots)
self.gui.checkGrid.stateChanged['int'].connect(self.set_grid)
self.gui.checkMagres.stateChanged['int'].connect(self.set_magresponse)
self.gui.checkGdelay.stateChanged['int'].connect(self.set_gdelay)
self.gui.checkPhase.stateChanged['int'].connect(self.set_phase)
self.gui.checkFcoeff.stateChanged['int'].connect(self.set_fcoeff)
self.gui.checkBand.stateChanged['int'].connect(self.set_band)
self.gui.checkPzplot.stateChanged['int'].connect(self.set_pzplot)
self.gui.checkPdelay.stateChanged['int'].connect(self.set_pdelay)
self.gui.checkImpulse.stateChanged['int'].connect(self.set_impres)
self.gui.checkStep.stateChanged['int'].connect(self.set_stepres)
self.gridenable = False
|
self.mfoverlay = False
self.mtoverlay = False
|
self.iir = False
self.mfmagresponse = True
self.mfphaseresponse = False
self.mfgroupdelay = False
self.mfphasedelay = False
self.mttaps = True
self.mtstep = False
self.mtimpulse = False
self.gui.designButton.setShortcut(QtCore.Qt.Key_Return)
self.taps = []
self.a = []
|
gmimano/commcaretest
|
corehq/apps/accounting/urls.py
|
Python
|
bsd-3-clause
| 1,305
| 0.007663
|
from django.conf.urls.defaults import *
from corehq import AccountingAdminInterfaceDispatcher
from corehq.apps.accounting.views import *
urlpatterns = patterns('corehq.apps.accounting.views',
url(r'^$', 'accounting_default', name='accounting_default'),
url(r'^accounts/(\d+)/$', ManageBillingAccountView.as_view(), name=ManageBillingAccountView.urlname),
url(r
|
'^accounts/new/$', NewBillingAccountView.as_view(), name=NewBillingAccountView.urlname),
url(r'^subscriptions/(\d+)/$', EditSubscriptionView.as_view(), name=EditSubscriptionView.urlname),
url(r'^account
|
s/new_subscription/$', NewSubscriptionViewNoDefaultDomain.as_view(),
name=NewSubscriptionViewNoDefaultDomain.urlname),
url(r'^accounts/new_subscription/(\d+)/$', NewSubscriptionView.as_view(), name=NewSubscriptionView.urlname),
url(r'^software_plans/new/$', NewSoftwarePlanView.as_view(), name=NewSoftwarePlanView.urlname),
url(r'^software_plans/(\d+)/$', EditSoftwarePlanView.as_view(), name=EditSoftwarePlanView.urlname),
url(AccountingAdminInterfaceDispatcher.pattern(), AccountingAdminInterfaceDispatcher.as_view(),
name=AccountingAdminInterfaceDispatcher.name()),
url(r'^pricing_table/(?P<product>[\w-]+)/(?P<locale>[\w-]+)/$', 'pricing_table_json', name='pricing_table_json'),
)
|
fanout/webhookinbox
|
api/views.py
|
Python
|
mit
| 12,930
| 0.030085
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from base64 import b64encode, b64decode
import datetime
import copy
import json
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotFound, HttpResponseNotAllowed
from gripcontrol import Channel, HttpResponseFormat, HttpStreamFormat
from django_grip import set_hold_longpoll, set_hold_stream, publish
import redis_ops
def _setting(name, default):
v = getattr(settings, name, None)
if v is None:
return default
return v
db = redis_ops.RedisOps()
grip_prefix = _setting('WHINBOX_GRIP_PREFIX', 'wi-')
orig_headers = _setting('WHINBOX_ORIG_HEADERS', False)
# useful list derived from requestbin
ignore_headers = """
X-Varnish
X-Forwarded-For
X-Heroku-Dynos-In-Use
X-Request-Start
X-Heroku-Queue-Wait-Time
X-Heroku-Queue-Depth
X-Real-Ip
X-Forwarded-Proto
X-Via
X-Forwarded-Port
Grip-Sig
Grip-Feature
Grip-Last
""".split("\n")[1:-1]
def _ignore_header(name):
name = name.lower()
for h in ignore_headers:
if name == h.lower():
return True
return False
def _convert_header_name(name):
out = ''
word_start = True
for c in name:
if c == '_':
out += '-'
word_start = True
elif word_start:
out += c.upper()
word_start = False
else:
out += c.lower()
return out
def _req_to_item(req):
item = dict()
item['method'] = req.method
item['path'] = req.path
query = req.META.get('QUERY_STRING')
if query:
item['query'] = query
raw_headers = list()
content_length = req.META.get('CONTENT_LENGTH')
if content_length:
raw_headers.append(('CONTENT_LENGTH', content_length))
content_type = req.META.get('CONTENT_TYPE')
if content_type:
raw_headers.append(('CONTENT_TYPE', content_type))
for k, v in req.META.iteritems():
if k.startswith('HTTP_'):
raw_headers.append((k[5:], v))
# undjangoify the header names
headers = list()
for h in raw_headers:
headers.append((_convert_header_name(h[0]), h[1]))
if orig_headers:
# if this option is set, then we assume the exact headers are magic prefixed
tmp = list()
for h in headers:
if h[0].lower().startswith('eb9bf0f5-'):
tmp.append((h[0][9:], h[1]))
headers = tmp
else:
# otherwise, use the blacklist to clean things up
tmp = list()
for h in headers:
if not _ignore_header(h[0]):
tmp.append(h)
headers = tmp
item['headers'] = headers
if len(req.body) > 0:
try:
# if the body is valid utf-8, then store as text
item['body'] = req.body.decode('utf-8')
except:
# else, store as binary
item['body-bin'] = b64encode(req.body)
forwardedfor = req.META.get('HTTP_X_FORWARDED_FOR')
if forwardedfor:
ip_address = forwardedfor.split(',')[0].strip()
else:
ip_address = req.META['REMOTE_ADDR']
item['ip_address'] = ip_address
return item
def _convert_item(item, responded=False):
out = copy.deepcopy(item)
created = datetime.datetime.fromtimestamp(item['created']).isoformat()
if len(created) > 0 and created[-1] != 'Z':
created += 'Z'
out['created'] = created
if responded:
out['state'] = 'responded'
else:
out['state'] = 'response-pending'
return out
def root(req):
return HttpResponseNotFound('Not Found\n')
def create(req):
if req.method == 'POST':
host = req.META.get('HTTP_HOST')
if not host:
return HttpResponseBadRequest('Bad Request: No \'Host\' header\n')
inbox_id = req.POST.get('id')
if inbox_id is not None and len(inbox_id) > 64:
return HttpResponseBadRequest('Bad Request: Id length must not exceed 64\n')
ttl = req.POST.get('ttl')
if ttl is not None:
ttl = int(ttl)
if ttl is None:
ttl = 3600
response_mode = req.POST.get('response_mode')
if not response_mode:
response_mode = 'auto'
if response_mode not in ('auto', 'wait-verify', 'wait'):
return HttpResponseBadRequest('Bad Request: response_mode must be "auto", "wait-verify", or "wait"\n')
try:
inbox_id = db.inbox_create(inbox_id, ttl, response_mode)
except redis_ops.InvalidId:
return HttpResponseBadRequest('Bad Request: Invalid id\n')
except redis_ops.ObjectExists:
return HttpResponse('Conflict: Inbox already exists\n', status=409)
except:
return HttpResponse('Service Unavailable\n', status=503)
out = dict()
out['id'] = inbox_id
out['base_url'] = 'http://' + host + '/i/' + inbox_id + '/'
out['ttl'] = ttl
out['response_mode'] = response_mode
return HttpResponse(json.dumps(out) + '\n', content_type='application/json')
else:
return HttpResponseNotAllowed(['POST'])
def inbox(req, inbox_id):
if req.method == 'GET':
host = req.META.get('HTTP_HOST')
if not host:
return HttpResponseBadRequest('Bad Request: No \'Host\' header\n')
try:
inbox = db.inbox_get(inbox_id)
except redis_ops.InvalidId:
return HttpResponseBadReques
|
t('Bad Request: Invalid id\n')
except redis_ops.ObjectDoesNotExist:
return HttpResponseNotFound('Not Found\n')
except:
return HttpResponse('Service Unavailable\n', status=503)
out = dict()
out['id'] = inbox_id
out['base_url'] = 'http://' + host + '/i/' + inbox_id + '/'
out['ttl'] = inbox['ttl']
response_mode = inbox.get('response_mode')
if not response_mode:
response_mode = 'auto'
out['response_mode'] = response_mode
return HttpResponse(json.dumps(out) + '\n', content_t
|
ype='application/json')
elif req.method == 'DELETE':
try:
db.inbox_delete(inbox_id)
except redis_ops.InvalidId:
return HttpResponseBadRequest('Bad Request: Invalid id\n')
except redis_ops.ObjectDoesNotExist:
return HttpResponseNotFound('Not Found\n')
except:
return HttpResponse('Service Unavailable\n', status=503)
# we'll push a 404 to any long polls because we're that cool
publish(grip_prefix + 'inbox-%s' % inbox_id, HttpResponseFormat(code=404, headers={'Content-Type': 'text/html'}, body='Not Found\n'))
return HttpResponse('Deleted\n')
else:
return HttpResponseNotAllowed(['GET', 'DELETE'])
def refresh(req, inbox_id):
if req.method == 'POST':
ttl = req.POST.get('ttl')
if ttl is not None:
ttl = int(ttl)
try:
db.inbox_refresh(inbox_id, ttl)
except redis_ops.InvalidId:
return HttpResponseBadRequest('Bad Request: Invalid id\n')
except redis_ops.ObjectDoesNotExist:
return HttpResponseNotFound('Not Found\n')
except:
return HttpResponse('Service Unavailable\n', status=503)
return HttpResponse('Refreshed\n')
else:
return HttpResponseNotAllowed(['POST'])
def respond(req, inbox_id, item_id):
if req.method == 'POST':
try:
content = json.loads(req.body)
except:
return HttpResponseBadRequest('Bad Request: Body must be valid JSON\n')
try:
code = content.get('code')
if code is not None:
code = int(code)
else:
code = 200
reason = content.get('reason')
headers = content.get('headers')
if 'body-bin' in content:
body = b64decode(content['body-bin'])
elif 'body' in content:
body = content['body']
else:
body = ''
except:
return HttpResponseBadRequest('Bad Request: Bad format of response\n')
try:
db.request_remove_pending(inbox_id, item_id)
except redis_ops.InvalidId:
return HttpResponseBadRequest('Bad Request: Invalid id\n')
except redis_ops.ObjectDoesNotExist:
return HttpResponseNotFound('Not Found\n')
except:
return HttpResponse('Service Unavailable\n', status=503)
publish(grip_prefix + 'wait-%s-%s' % (inbox_id, item_id), HttpResponseFormat(code=code, reason=reason, headers=headers, body=body), id='1', prev_id='0')
return HttpResponse('Ok\n')
else:
return HttpResponseNotAllowed(['POST'])
def hit(req, inbox_id):
if len(req.grip.last) > 0:
for channel, last_id in req.grip.last.iteritems():
break
set_hold_longpoll(req, Channel(channel, last_id))
return HttpResponse('Service Unavailable\n', status=503, content_type='text/html')
try:
inbox = db.inbox_get(inbox_id)
except redis_ops.InvalidId:
return HttpResponseBadRequest('Bad Request: Invalid id\n')
except redis_ops.ObjectDoesNotExist:
return HttpResponseNotFound('Not Found\n')
except:
return HttpResponse('Service Unavailable\n', status=503)
response_mode = inbox.get('response_mode')
if not response_mode:
response_mode = 'auto'
# pub
|
emillynge/lasagne-caterer
|
lasagnecaterer/recipe.py
|
Python
|
gpl-3.0
| 50,854
| 0.001298
|
"""
Instructions on how to put together different kinds of lasagnas
"""
# builtins
from collections import (namedtuple, defaultdict, OrderedDict)
from functools import (lru_cache, partial)
from weakref import WeakKeyDictionary
# pip packages
from itertools import chain
import sys
from theano import tensor as T
import theano
import lasagne as L
import numpy as np
# github packages
from elymetaclasses.events import ChainedProps, args_from_opt
from .utils import ChainPropsABCMetaclass
from .fridge import ClassSaveLoadMixin, SaveLoadZipFilemixin
class ScalarParameter:
def opt_callback(self, instance, opt_name, value):
self.__set__(instance, value)
def __init__(self, opt_name, spec_type=L.init.Constant, shape=None,
default=0.0):
self.opt_name = opt_name
self.spec_type = spec_type
self.shape = shape
self.default = default
self.instances = WeakKeyDictionary()
def make_param(self, instance, set_val=None):
opt = instance.opt
callback = partial(self.opt_callback, instance)
init_val = set_val if set_val is not None else opt.get(self.opt_name,
self.default)
if not self.shape:
param = theano.shared(self.spec_type(init_val).sample((1,))[0])
else:
spec = self.spec_type()
param = L.utils.create_param(spec, shape=self.shape,
name=self.opt_name)
opt.set_callback(self.opt_name, callback)
return param, callback
def __get__(self, instance: ChainedProps, obj_type):
if instance not in self.instances:
self.instances[instance] = self.make_param(instance)
return self.instances[instance][0]
def __set__(self, instance, value) -> theano.Variable:
if instance not in self.instances:
self.instances[instance] = self.make_param(instance)
self.instances[instance][0].set_value(value)
class OneHotLayer(L.layers.Layer):
def __init__(self, incoming, axis=0, name=None):
self.axis = axis
super().__init__(incoming, name)
def get_output_for(self, input, **kwargs):
return L.utils.one_hot(T.argmax(input, axis=self.axis),
input.shape[self.axis])
class LasagneBase(ChainedProps, ClassSaveLoadMixin,
metaclass=ChainPropsABCMetaclass):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.saved_params = None
@property
def l_in(self, features, win_sz=1):
"""
Input layer into which x feeds
x is [batch_sz, seq_len, win_sz, features]
:param seq_len:
:param features:
:param win_sz:
:return:
"""
return L.layers.InputLayer((None, None, win_sz, features))
@property
def target_values(self):
"""
Shared variable to hold y
override if target values is *not* a 3D tensor
unlikely if using lasagne
:return:
"""
return T.tensor3('target_output')
@property
def l_bottom(self):
"""
Bottom most non-input layer
:return:
"""
return self.l_in
@property
def l_top(self):
"""
Topmost non-output layer
:return:
"""
return self.l_bottom
@property
def l_out_flat(self):
"""
Flattened output layer.
output of this layer should be transformed so it matches target values
(eg. softmax into classes)
:return:
"""
return self.out_transform(self.l_top)
@property
def l_out(self, seq_len, features):
"""
Reshaped output layer that matches the shape of y
:param seq_len:
:param features:
:return:
"""
return L.layers.ReshapeLayer(self.l_out_flat, (-1, seq_len, features))
def reset_params(self):
self.init_params(None)
def init_params(self, saved_params):
if saved_params:
self.set_all_params(saved_params)
@property
def all_params(self):
"""
list of shared variables which hold all parameters in the lasagne
:return:
"""
self.init_params(self.saved_params)
return L.layers.get_all_params(self.l_out_flat)
@property
def all_train_params(self):
self.init_params(self.saved_params)
return L.layers.get_all_params(self.l_out_flat, trainable=True)
def set_all_params(self, params):
params = [np.array(param, dtype=np.float32) for param in params]
L.layers.set_all_param_values(self.l_out_flat, params)
self.saved_params = params
def get_all_params_copy(self):
return L.layers.get_all_param_values(self.l_out_flat)
@property
def cost(self):
"""
Shared variable with the cost of target values vs predicted values
:param:
:return:
"""
flattened_output = L.layers.get_output(self.l_out_flat)
return self.cost_metric(flattened_output)
@property
def cost_det(self):
"""
Shared variable with the cost of target values vs predicted values
Computed deterministic such that any dropout is ignored
:param features:
:return:
"""
flattened_output = L.layers.get_output(self.l_out_flat,
deterministic=True)
return self.cost_metric(flattened_output)
def compiled_function(self, *args, givens=tuple(), **kwargs):
kwargs['givens'] = list(givens) # + [self.learning_rate]
return theano.function(*args, **kwargs)
learning_rate = ScalarParameter('learning_rate', default=.002)
decay_rate = ScalarParameter('decay_rate', default=.9)
final_grad_clip = ScalarParameter('final_grad_clip', default=5.0)
@property
def train_updates(self):
grads = theano.grad(self.cost, self.all_train_params)
if self.final_grad_clip.get_value():
grads = [T.clip(g, -self.final_grad_clip, self.final_grad_clip) for g in grads]
return L.updates.rmsprop(grads, self.all_train_params,
self.learning_rate, rho=self.decay_rate)
@property
def f_train(self):
"""
Compiled theano function that takes (x, y) and trains the lasagne
Updates use default cost (e.g with dropout)
But f_train returns the deterministic cost
:return:
"""
return self.compiled_function([self.l_in.input_var, self.target_values],
self.cost_det,
updates=self.train_updates,
allow_input_downcast=True)
@property
def f_train_no_return(self):
"""
Compiled theano function
|
that takes (x, y) and trains the lasagne
Updates use default cost (e.g with dropout)
Does *not* return a cost.
:return: None
"""
return self.compiled_function([self.l_in.input_var, self.target_values],
|
updates=self.train_updates,
allow_input_downcast=True)
@property
def f_cost(self):
"""
Compiled theano function that takes (x, y) and return cost.
No updates is made
:return:
"""
self.all_train_params
return self.compiled_function([self.l_in.input_var, self.target_values],
self.cost_det, allow_input_downcast=True)
@property
def f_predict(self, features):
"""
Compiled theano function that takes (x) and predicts y
Computed *deterministic*
:return:
"""
self.all_train_params
resh = L.layers.ReshapeLayer(self.l_out_flat, (-1, features))
output_transformed = self.predict_transform(resh)
prediction = L.layers.get_output(output_transformed, deterministic=True)
return self.compiled_function([self.l_in.input_
|
pinterest/wheeljack
|
wheeljack/exceptions.py
|
Python
|
mit
| 769
| 0
|
__author__ = 'davedash'
class WheeljackEx
|
ception(Exception):
pass
class RepoNotFoundException(WheeljackException):
"""Exception thrown when we interact with an undefined Repo."""
pass
class RepoAlreadyInstalle
|
dException(WheeljackException):
"""Exception thrown when we try to re-install a repo."""
pass
class ReposConfigException(WheeljackException):
"""Exception raised when there is a configuration error."""
pass
class WheeljackCodeDirectoryMissing(WheeljackException):
"""Raised if we are missing our base directory."""
pass
class GitNotRepoException(WheeljackException):
"""Raise if we interact with a non Git-dir in a Git-ish manner."""
pass
class GitNoOriginRemoteException(WheeljackException):
pass
|
fujicoin/fujicoin
|
test/functional/wallet_startup.py
|
Python
|
mit
| 2,543
| 0.00118
|
#!/usr/bin/env python3
# Copyright (c) 2017-2020 The Fujicoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet load on startup.
Verify that a fujicoind node can maintain list of wallets loading on startup
"""
from test_framework.test_framework import FujicoinTestFramework
from test_framework.util import (
assert_equal,
)
class WalletStartupTest(FujicoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_nodes(self):
self.add_nodes(self.num_nodes)
self.start_nodes()
def run_test(self):
self.log.info('Should start without any wallets')
assert_equal(self.nodes[0].listwallets(), [])
assert_equal(self.nodes[0].listwalletdir(), {'wallets': []})
self.log.info('New default wallet should load by default when there are no other wallets')
self.nodes[0].createwallet(wallet_name='', load_on_startup=False)
self.restart_node(0)
assert_equal(self.nodes[0].listwallets(), [''])
self.log.info('Test load on startup behavior')
self.nodes[0].createwallet(wallet_name='w0', load_on_startup=True)
self.nodes[0].createwallet(wallet_name='w1', load_on_startup=False)
self.nodes[0].createwallet(wallet_name='w2', load_on_startup=True)
self.nodes[0].createwallet(wallet_name='w3', load_on_startup=False)
self.nodes[0].createwallet(wallet_name='w4', load_on_startup=False)
self.nodes[0].unloadwallet(wallet_name='w0', load_on_startup=False)
self.nodes[0].unloadwallet(wallet_name='w4', load_on_startup=False)
self.nodes[0].loadwallet(filename='w4', load_on_startup=True)
assert_equal(set(self.nodes[0].listwallets()), set(('', 'w1', 'w2', 'w3', 'w4')))
self.restart_node(0)
assert_equal(set(self.no
|
des[0].listwallets()), set(('', 'w2', 'w4')))
self.nodes[0].unloadwallet(wallet_name='', load_on_startup=False)
self.nodes[0].unloadwallet(wa
|
llet_name='w4', load_on_startup=False)
self.nodes[0].loadwallet(filename='w3', load_on_startup=True)
self.nodes[0].loadwallet(filename='')
self.restart_node(0)
assert_equal(set(self.nodes[0].listwallets()), set(('w2', 'w3')))
if __name__ == '__main__':
WalletStartupTest().main()
|
FreeJournal/freejournal
|
models/collection.py
|
Python
|
mit
| 6,736
| 0.007423
|
from sqlalchemy import Column, ForeignKey, Integer, String, Text, DateTime, Table
from sqlalchemy.orm import relationship, backref
from models import DecBase
from models.document import Document
from models.keyword import Keyword
from jsonschema import *
from json_schemas import *
from models.collection_version import CollectionVersion
from timestamp.timestampfile import TimestampFile
import time
import json
# Define foreign keys required for joining defined tables together
collection_keywords = Table('collection_keywords', DecBase.metadata,
Column('keyword_id', Integer, ForeignKey('keyword.id')),
Column('collection_address', String, ForeignKey('collection.address'))
)
collection_docs = Table('collection_docs', DecBase.metadata,
Column('document_address', String, ForeignKey('document.hash')),
Column('collection_address_docs', String, ForeignKey('collection.address'))
)
hash_association = Table('collection_hashes', DecBase.metadata,
Column('hash', String, ForeignKey('collection_version.root_hash')),
Column('collection_address', String, ForeignKey('collection.address'))
)
class Collection(DecBase):
""" A Collection is the fundamental unit of organization in the FreeJournal network.
A Collection is a uniquely identifiable set of documents. Each collection is associated
with and signed by a BitMessage broadcast channel address. Each collection contains
a list of documents, a Bitcoin address for ranking, and a version. Messages on the network
called DocIndex messages share the state of a collection at a given version.
This class stores the latest version of each collection the FreeJournal node decides to mirror.
It also stores old timestamps and Merkle trees for bookkeeping purposes (@todo).
Attributes:
title: Title of collection (as in message spec)
description: Collection description (as in message spec)
address: Bitmessage address uniquely ID'ing collection (as in message spec)
btc: Bitcoin address for rating documents (as in message spec)
keywords: Keywords as list of Keyword class for searching (as in message spec)
documents: List of document classes included in the collection (as in message spec)
latest_broadcast_date: The date that this collection was last seen broadcasted in the Main Channel
creation_date: Earliest known timestamp of collection, or if none earliest approximation of creation date of
current version of collection
oldest_date: Earliest known timestamp of collection, or if none earliest approximation of creation date of
any version of collection
latest_btc_tx: Latest Bitcoin transaction timestamping merkle belonging to this collection
oldest_btc_tx: Oldest Bitcoin transaction timestamping merkle belong
|
ing to this collection
accesses: Number of times this collection is accessed by a user of this node (for cache pruning)
votes: Latest vote count from the Bitcoin network, used to rank collection
votes_last_checked: Latest poll of Bitcoin network for collection votes, to coordinate internal repolling
"
|
""
__tablename__ = 'collection'
title = Column(Text, nullable=False)
description = Column(String)
address = Column(String, primary_key=True)
btc = Column(String)
keywords = relationship("Keyword", secondary=collection_keywords, backref='collection')
documents = relationship("Document", secondary=collection_docs, backref='collection')
latest_broadcast_date = Column(DateTime, nullable=False)
creation_date = Column(DateTime, nullable=False)
oldest_date = Column(DateTime, nullable=False)
latest_btc_tx = Column(String)
oldest_btc_tx = Column(String)
accesses = Column(Integer, nullable=False, default=0)
votes = Column(Integer, nullable=False, default=0)
votes_last_checked = Column(DateTime)
version_list = relationship(CollectionVersion, backref="collection", lazy='dynamic', secondary=hash_association)
def to_json(self):
"""
Encodes a Collection as a json representation so it can be sent through the bitmessage network
:return: the json representation of the given Collection
"""
json_docs = []
for doc in self.documents:
json_docs.append({"address": doc.collection_address, "description": doc.description, "title": doc.title,
"hash": doc.hash, "filename": doc.filename, "accesses": doc.accesses})
json_keywords = []
for key in self.keywords:
json_keywords.append({"id": key.id, "name": key.name})
json_representation = {"type_id": 1,
"title": self.title,
"description": self.description,
"keywords": json_keywords,
"address": self.address,
"documents": json_docs,
"btc": self.btc,
"latest_broadcast_date": self.latest_broadcast_date.strftime("%A, %d. %B %Y %I:%M%p"),
"creation_date": self.creation_date.strftime("%A, %d. %B %Y %I:%M%p"),
"oldest_date": self.oldest_date.strftime("%A, %d. %B %Y %I:%M%p"),
"latest_btc_tx": self.latest_btc_tx,
"oldest_btc_tx": self.oldest_btc_tx,
"accesses": self.accesses,
"votes": self.votes,
"votes_last_checked": self.votes_last_checked.strftime("%A, %d. %B %Y %I:%M%p")}
try:
validate(json_representation, coll_schema)
return json.dumps(json_representation, sort_keys=True)
except ValidationError as m:
return None
def get_latest_version(self):
latest_version = self.version_list.order_by(CollectionVersion.collection_version.desc()).first()
if latest_version is None:
return 0
else:
return latest_version.collection_version
def get_latest_collection_version(self):
latest_version = self.version_list.order_by(CollectionVersion.collection_version.desc()).first()
return latest_version
|
pknight007/electrum-vtc
|
plugins/labels/labels.py
|
Python
|
mit
| 5,710
| 0.001576
|
import hashlib
import requests
import threading
import json
import sys
import traceback
import base64
import electrum_vtc as electrum
from electrum_vtc.plugins import BasePlugin, hook
from electrum_vtc.i18n import _
class LabelsPlugin(BasePlugin):
def __init__(self, parent, config, name):
BasePlugin.__init__(self, parent, config, name)
self.target_host = 'labels.bauerj.eu'
self.wallets = {}
def encode(self, wallet, msg):
password, iv, wallet_id = self.wallets[wallet]
encrypted = electrum.bitcoin.aes_encrypt_with_iv(password, iv,
msg.encode('utf8'))
return base64.b64encode(encrypted)
def decode(self, wallet, message):
password, iv, wallet_id = self.wallets[wallet]
decoded = base64.b64decode(message)
decrypted = electrum.bitcoin.aes_decrypt_with_iv(password, iv, decoded)
return decrypted.decode('utf8')
def get_nonce(self, wallet):
# nonce is the nonce to be used with the next change
nonce = wallet.storage.get('wallet_nonce')
if nonce is None:
nonce = 1
self.set_nonce(wallet, nonce)
return nonce
def set_nonce(self, wallet, nonce):
self.print_error("set", wallet.basename(), "nonce to", nonce)
wallet.storage.put("wallet_nonce", nonce)
@hook
def set_label(self, wallet, item, label):
if not wallet in self.wallets:
return
nonce = self.get_nonce(wallet)
wallet_id = self.wallets[wallet][2]
bundle = {"walletId": wallet_id,
"walletNonce": nonce,
"externalId": self.encode(wallet, item),
"encryptedLabel": self.encode(wallet, label)}
t = threading.Thread(target=self.do_request,
args=["POST", "/label", False, bundle])
t.setDaemon(True)
t.start()
# Caller will write the wallet
self.set_nonce(wallet, nonce + 1)
def do_request(self, method, url = "/labels", is_batch=False, data=None):
url = 'https://' + self.target_host + url
kwargs = {'headers': {}}
if method == 'GET' and data:
kwargs['params'] = data
elif method == 'POST' and data:
kwargs['data'] = json.dumps(data)
kwargs['headers']['Content-Type'] = 'application/json'
response = requests.request(method, url, **kwargs)
if response.status_code != 200:
raise BaseException(response.status_code, response.text)
response = response.json()
if "error" in response:
raise BaseException(response["error"])
return response
def push_thread(self, wallet):
wallet_id = self.wallets[wallet][2]
bundle = {"labels": [],
"walletId": wallet_id,
"walletNonce": self.get_nonce(wallet)}
for key, value in wallet.labels.iteritems():
try:
encoded_key = self.encode(wallet, key)
encoded_value = self.encode(wallet, value)
except:
self.print_error('cannot encode', repr(key), repr(value))
continue
bundle["labels"].append
|
({'encryptedLabel': encoded_value,
'externalId': encoded_key})
self.do_request("POST", "/labels", True, bundle)
def pull_thread(self, wallet, force):
wallet_id = self.wallets[wallet][2]
n
|
once = 1 if force else self.get_nonce(wallet) - 1
self.print_error("asking for labels since nonce", nonce)
try:
response = self.do_request("GET", ("/labels/since/%d/for/%s" % (nonce, wallet_id) ))
if response["labels"] is None:
self.print_error('no new labels')
return
result = {}
for label in response["labels"]:
try:
key = self.decode(wallet, label["externalId"])
value = self.decode(wallet, label["encryptedLabel"])
except:
continue
try:
json.dumps(key)
json.dumps(value)
except:
self.print_error('error: no json', key)
continue
result[key] = value
for key, value in result.items():
if force or not wallet.labels.get(key):
wallet.labels[key] = value
self.print_error("received %d labels" % len(response))
# do not write to disk because we're in a daemon thread
wallet.storage.put('labels', wallet.labels)
self.set_nonce(wallet, response["nonce"] + 1)
self.on_pulled(wallet)
except Exception as e:
traceback.print_exc(file=sys.stderr)
self.print_error("could not retrieve labels")
def start_wallet(self, wallet):
nonce = self.get_nonce(wallet)
self.print_error("wallet", wallet.basename(), "nonce is", nonce)
mpk = wallet.get_fingerprint()
if not mpk:
return
password = hashlib.sha1(mpk).digest().encode('hex')[:32]
iv = hashlib.sha256(password).digest()[:16]
wallet_id = hashlib.sha256(mpk).digest().encode('hex')
self.wallets[wallet] = (password, iv, wallet_id)
# If there is an auth token we can try to actually start syncing
t = threading.Thread(target=self.pull_thread, args=(wallet, False))
t.setDaemon(True)
t.start()
def stop_wallet(self, wallet):
self.wallets.pop(wallet, None)
|
saltstack/salt
|
tests/integration/modules/test_mac_timezone.py
|
Python
|
apache-2.0
| 7,775
| 0.00373
|
"""
Integration tests for mac_timezone
If using parallels, make sure Time sync is turned off. Otherwise, parallels will
keep changing your date/time settings while the tests are running. To turn off
Time sync do the following:
- Go to actions -> configure
- Select options at the top and 'More Options' on the left
- Set time to 'Do not sync'
"""
import datetime
import pytest
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
@pytest.mark.flaky(max_runs=4)
@pytest.mark.skip_unless_on_darwin
@pytest.mark.skip_if_binaries_missing("systemsetup")
@pytest.mark.skip_if_not_root
class MacTimezoneModuleTest(ModuleCase):
"""
Validate the mac_timezone module
"""
USE_NETWORK_TIME = False
TIME_SERVER = "time.apple.com"
TIME_ZONE = ""
CURRENT_DATE = ""
CURRENT_TIME = ""
def setUp(self):
"""
Get current settings
"""
self.USE_NETWORK_TIME = self.run_function("timezone.get_using_network_time")
self.TIME_SERVER = self.run_function("timezone.get_time_server")
self.TIME_ZONE = self.run_function("timezone.get_zone")
self.CURRENT_DATE = self.run_function("timezone.get_date")
self.CURRENT_TIME = self.run_function("timezone.get_time")
self.run_function("timezone.set_using_network_time", [False])
self.run_function("timezone.set_zone", ["America/Denver"])
def tearDown(self):
"""
Reset to original settings
"""
self.run_function("timezone.set_time_server", [self.TIME_SERVER])
self.run_function("timezone.set_using_network_time", [self.USE_NETWORK_TIME])
self.run_function("timezone.set_zone", [self.TIME_ZONE])
if not self.USE_NETWORK_TIME:
self.run_function("timezone.set_date", [self.CURRENT_DATE])
self.run_function("timezone.set_time", [self.CURRENT_TIME])
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_get_set_date(self):
"""
Test timezone.get_date
Test timezone.set_date
"""
# Correct Functionality
self.assertTrue(self.run_function("timezone.set_date", ["2/20/2011"]))
self.assertEqual(self.run_function("timezone.get_date"), "2/20/2011")
# Test bad date format
self.assertEqual(
self.run_function("timezone.set_date", ["13/12/2014"]),
"ERROR executing 'timezone.set_date': Invalid Date/Time Format: 13/12/2014",
)
@pytest.mark.slow_test
def test_get_time(self):
"""
Test timezone.get_time
"""
text_time = self.run_function("timezone.get_time")
self.assertNotEqual(text_time, "Invalid Timestamp")
obj_date = datetime.datetime.strptime(text_time, "%H:%M:%S")
self.assertIsInstance(obj_date, datetime.date)
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_set_time(self):
"""
Test timezone.set_time
"""
# Correct Functionality
self.assertTrue(self.run_function("timezone.set_time", ["3:14"]))
# Test bad time format
self.assertEqual(
self.run_function("timezone.set_time", ["3:71"]),
"ERROR executing 'timezone.set_time': Invalid Date/Time Format: 3:71",
)
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_get_set_zone(self):
"""
Test timezone.get_zone
Test timezone.set_zone
"""
# Correct Functionality
self.assertTrue(self.run_function("timezone.set_zone", ["Pacific/Wake"]))
self.assertEqual(self.run_function("timezone.get_zone"), "Pacific/Wake")
# Test bad time
|
zone
self.assertEqual(
self.run_function("timezone.set_zone", ["spongebob"]),
"ERROR
|
executing 'timezone.set_zone': Invalid Timezone: spongebob",
)
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_get_offset(self):
"""
Test timezone.get_offset
"""
self.assertTrue(self.run_function("timezone.set_zone", ["Pacific/Wake"]))
self.assertIsInstance(self.run_function("timezone.get_offset"), (str,))
self.assertEqual(self.run_function("timezone.get_offset"), "+1200")
self.assertTrue(self.run_function("timezone.set_zone", ["America/Los_Angeles"]))
self.assertIsInstance(self.run_function("timezone.get_offset"), (str,))
self.assertEqual(self.run_function("timezone.get_offset"), "-0700")
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_get_set_zonecode(self):
"""
Test timezone.get_zonecode
Test timezone.set_zonecode
"""
self.assertTrue(self.run_function("timezone.set_zone", ["America/Los_Angeles"]))
self.assertIsInstance(self.run_function("timezone.get_zonecode"), (str,))
self.assertEqual(self.run_function("timezone.get_zonecode"), "PDT")
self.assertTrue(self.run_function("timezone.set_zone", ["Pacific/Wake"]))
self.assertIsInstance(self.run_function("timezone.get_zonecode"), (str,))
self.assertEqual(self.run_function("timezone.get_zonecode"), "WAKT")
@pytest.mark.slow_test
def test_list_zones(self):
"""
Test timezone.list_zones
"""
zones = self.run_function("timezone.list_zones")
self.assertIsInstance(self.run_function("timezone.list_zones"), list)
self.assertIn("America/Denver", self.run_function("timezone.list_zones"))
self.assertIn("America/Los_Angeles", self.run_function("timezone.list_zones"))
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_zone_compare(self):
"""
Test timezone.zone_compare
"""
self.assertTrue(self.run_function("timezone.set_zone", ["America/Denver"]))
self.assertTrue(self.run_function("timezone.zone_compare", ["America/Denver"]))
self.assertFalse(self.run_function("timezone.zone_compare", ["Pacific/Wake"]))
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_get_set_using_network_time(self):
"""
Test timezone.get_using_network_time
Test timezone.set_using_network_time
"""
self.assertTrue(self.run_function("timezone.set_using_network_time", [True]))
self.assertTrue(self.run_function("timezone.get_using_network_time"))
self.assertTrue(self.run_function("timezone.set_using_network_time", [False]))
self.assertFalse(self.run_function("timezone.get_using_network_time"))
@skipIf(
True,
"Skip until we can figure out why modifying the system clock causes ZMQ errors",
)
@pytest.mark.destructive_test
def test_get_set_time_server(self):
"""
Test timezone.get_time_server
Test timezone.set_time_server
"""
self.assertTrue(
self.run_function("timezone.set_time_server", ["spongebob.com"])
)
self.assertEqual(self.run_function("timezone.get_time_server"), "spongebob.com")
|
brucexiejiaming/App_store
|
part1_crawler/appstore/appstore/settings.py
|
Python
|
mit
| 579
| 0.005181
|
# -*- coding: utf-8 -*-
# Scrapy settings for appstore project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'appstore'
SPIDER_MODULES = ['appstore.spiders']
NEWSPIDER_MODULE = 'appstore.spiders'
ITEM_P
|
IPELINES = {
'appstore.pipelines.AppstorePipeline': 300,
}
DOWNLOAD_DELAY=5
# Crawl responsibly
|
by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'appstore (+http://www.yourdomain.com)'
|
meduz/scikit-learn
|
sklearn/ensemble/tests/test_gradient_boosting.py
|
Python
|
bsd-3-clause
| 40,613
| 0.000074
|
"""
Testing for the gradient boosting module (sklearn.ensemble.gradient_boosting).
"""
import warnings
import numpy as np
from itertools import product
from scipy.sparse import csr_matrix
from scipy.sparse import csc_matrix
from scipy.sparse import coo_matrix
from sklearn import datasets
from sklearn.base import clone
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.ensemble.gradient_boosting import ZeroEstimator
from sklearn.metrics import mean_squared_error
from sklearn.utils import check_random_state, tosequence
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import skip_if_32bit
from sklearn.exceptions import DataConversionWarning
from sklearn.exceptions import NotFittedError
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [-1, 1, 1]
rng = np.random.RandomState(0)
# also load the boston dataset
# and randomly permute it
boston = datasets.load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
# also load the iris dataset
# and randomly permute it
iris = datasets.load_iris()
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
def check_classification_toy(presort, loss):
# Check classification on a toy dataset.
clf = GradientBoostingClassifier(loss=loss, n_estimators=10,
random_state=1, presort=presort)
assert_raises(ValueError, clf.predict, T)
clf.fit(X, y)
assert_array_equal(clf.predict(T), true_result)
assert_equal(10, len(clf.estimators_))
deviance_decrease = (clf.train_score_[:-1] - clf.train_score_[1:])
assert_true(np.any(deviance_decrease >= 0.0))
leaves = clf.apply(X)
assert_equal(leaves.shape, (6, 10, 1))
def test_classification_toy():
for presort, loss in product(('auto', True, False),
('deviance', 'exponential')):
yield check_classification_toy, presort, loss
def test_parameter_checks():
# Check input parameter validation.
assert_raises(ValueError,
GradientBoostingClassifier(n_estimators=0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(n_estimators=-1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(learning_rate=0.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(learning_rate=-1.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='foobar').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_split=0.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_split=-1.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_split=1.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_leaf=0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_samples_leaf=-1.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_weight_fraction_leaf=-1.).fit,
X, y)
assert_raises(ValueError,
GradientBoostingClassifier(min_weight_fraction_leaf=0.6).fit,
X, y)
assert_raises(ValueError,
GradientBoostingClassifier(subsample=0.0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(subsample=1.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(subsample=-0.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(max_depth=-0.1).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(max_depth=0).fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(init={}).fit, X, y)
# test fit before feature importance
assert_raises(ValueError,
lambda: GradientBoostingClassifier().feature_importances_)
# deviance requires ``n_classes >= 2``.
assert_raises(ValueError,
lambda X, y: GradientBoostingClassifier(
loss='deviance').fit(X, y),
X, [0, 0, 0, 0])
def test_loss_function():
assert_raises(ValueError,
GradientBoostingClassifier(loss='ls').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='lad').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='quantile').fit, X, y)
assert_raises(ValueError,
GradientBoostingClassifier(loss='huber').fit, X, y)
assert_raises(ValueError,
GradientBoostingRegressor(loss='deviance').fit, X, y)
assert_raises(ValueError,
GradientBoostingRegressor(loss='exponential').fit, X, y)
def check_classification_synthetic(presort, loss):
# Test GradientBoostingClassifier on synthetic dataset used by
# Hastie et al. in ESLII Example 12.7.
X, y = datasets.make_hastie_10_2(n_samples=12000, random_state=1)
X_train, X_test = X[:2000], X[2000:]
y_train, y_test = y[:2000], y[2000:]
gbrt = GradientBoostingClassifier(n_estimators=100, min_samples_split=2,
max_depth=1, loss=loss,
learning_rate=1.0, random_state=0)
gbrt.fit(X_train, y_train)
error_rate = (1.0 - gbrt.score(X_test, y_test))
assert_less(error_rate, 0.09)
gbrt = GradientBoostingClassifier(n_estimators=200, min_samples_split=2,
max_depth=1, loss=loss,
learning_rate=1.0, subsample=0.5,
random_state=0,
presort=presort)
gbrt.fit(X_train, y_train)
error_rate = (1.0 - gbrt.score(X_test, y_test))
assert_less(error_rate, 0.08)
def test_classification_synthetic():
for presort, loss in product(('auto', True, False), ('deviance', 'exponential')):
yield check_classifica
|
tion_synthetic, presort, loss
def check_boston(presort, loss, subsample):
# Check consistency on dataset boston house prices with least squares
# and least absolute deviation.
ones = np.ones(len(boston.target))
last_y_pred = None
for sample_weight in None, ones, 2 * ones:
clf = GradientBoostingRegressor(n_estimators=100,
|
loss=loss,
max_depth=4,
subsample=subsample,
min_samples_split=2,
random_state=1,
presort=presort)
assert_raises(ValueError, clf.predict, boston.data)
clf.fit(boston.data, boston.target,
sample_weight=sample_weight)
leaves = clf.apply(boston.data)
assert_equal(leaves.shape, (506, 100))
y_pred = clf.predict(boston.data)
mse = mean_squared_error(boston.target, y_pred)
assert_less(mse, 6.0)
if last_y_pred is not None:
assert_array_almost_equal(last_y_pred, y_pred)
last_y_pred = y_pred
def test_boston():
for presort, loss, subsample in product(('auto', True, False),
('ls', 'lad', 'huber'),
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.